@directus/api 33.3.1 → 34.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai/chat/lib/create-ui-stream.js +2 -1
- package/dist/ai/chat/lib/transform-file-parts.d.ts +12 -0
- package/dist/ai/chat/lib/transform-file-parts.js +36 -0
- package/dist/ai/files/adapters/anthropic.d.ts +3 -0
- package/dist/ai/files/adapters/anthropic.js +25 -0
- package/dist/ai/files/adapters/google.d.ts +3 -0
- package/dist/ai/files/adapters/google.js +58 -0
- package/dist/ai/files/adapters/index.d.ts +3 -0
- package/dist/ai/files/adapters/index.js +3 -0
- package/dist/ai/files/adapters/openai.d.ts +3 -0
- package/dist/ai/files/adapters/openai.js +22 -0
- package/dist/ai/files/controllers/upload.d.ts +2 -0
- package/dist/ai/files/controllers/upload.js +101 -0
- package/dist/ai/files/lib/fetch-provider.d.ts +1 -0
- package/dist/ai/files/lib/fetch-provider.js +23 -0
- package/dist/ai/files/lib/upload-to-provider.d.ts +4 -0
- package/dist/ai/files/lib/upload-to-provider.js +26 -0
- package/dist/ai/files/router.d.ts +1 -0
- package/dist/ai/files/router.js +5 -0
- package/dist/ai/files/types.d.ts +5 -0
- package/dist/ai/files/types.js +1 -0
- package/dist/ai/providers/anthropic-file-support.d.ts +12 -0
- package/dist/ai/providers/anthropic-file-support.js +94 -0
- package/dist/ai/providers/registry.js +3 -6
- package/dist/ai/tools/flows/index.d.ts +16 -16
- package/dist/ai/tools/schema.d.ts +8 -8
- package/dist/ai/tools/schema.js +2 -2
- package/dist/app.js +10 -1
- package/dist/controllers/deployment-webhooks.d.ts +2 -0
- package/dist/controllers/deployment-webhooks.js +95 -0
- package/dist/controllers/deployment.js +61 -165
- package/dist/controllers/files.js +2 -1
- package/dist/database/get-ast-from-query/lib/parse-fields.js +52 -26
- package/dist/database/helpers/date/dialects/oracle.js +2 -0
- package/dist/database/helpers/date/dialects/sqlite.js +2 -0
- package/dist/database/helpers/date/types.d.ts +1 -1
- package/dist/database/helpers/date/types.js +3 -1
- package/dist/database/helpers/fn/dialects/mssql.d.ts +1 -0
- package/dist/database/helpers/fn/dialects/mssql.js +21 -0
- package/dist/database/helpers/fn/dialects/mysql.d.ts +2 -0
- package/dist/database/helpers/fn/dialects/mysql.js +30 -0
- package/dist/database/helpers/fn/dialects/oracle.d.ts +1 -0
- package/dist/database/helpers/fn/dialects/oracle.js +21 -0
- package/dist/database/helpers/fn/dialects/postgres.d.ts +14 -0
- package/dist/database/helpers/fn/dialects/postgres.js +40 -0
- package/dist/database/helpers/fn/dialects/sqlite.d.ts +1 -0
- package/dist/database/helpers/fn/dialects/sqlite.js +12 -0
- package/dist/database/helpers/fn/json/parse-function.d.ts +19 -0
- package/dist/database/helpers/fn/json/parse-function.js +66 -0
- package/dist/database/helpers/fn/types.d.ts +8 -0
- package/dist/database/helpers/fn/types.js +19 -0
- package/dist/database/helpers/schema/dialects/mysql.d.ts +1 -0
- package/dist/database/helpers/schema/dialects/mysql.js +11 -0
- package/dist/database/helpers/schema/types.d.ts +1 -0
- package/dist/database/helpers/schema/types.js +3 -0
- package/dist/database/index.js +2 -1
- package/dist/database/migrations/20260211A-add-deployment-webhooks.d.ts +3 -0
- package/dist/database/migrations/20260211A-add-deployment-webhooks.js +37 -0
- package/dist/database/run-ast/lib/apply-query/filter/get-filter-type.d.ts +2 -2
- package/dist/database/run-ast/lib/apply-query/filter/operator.js +17 -7
- package/dist/database/run-ast/lib/parse-current-level.js +8 -1
- package/dist/database/run-ast/run-ast.js +11 -1
- package/dist/database/run-ast/utils/apply-function-to-column-name.js +7 -1
- package/dist/database/run-ast/utils/get-column.js +13 -2
- package/dist/deployment/deployment.d.ts +25 -2
- package/dist/deployment/drivers/netlify.d.ts +6 -2
- package/dist/deployment/drivers/netlify.js +114 -12
- package/dist/deployment/drivers/vercel.d.ts +5 -2
- package/dist/deployment/drivers/vercel.js +84 -5
- package/dist/deployment.d.ts +5 -0
- package/dist/deployment.js +34 -0
- package/dist/permissions/utils/get-unaliased-field-key.js +9 -1
- package/dist/request/is-denied-ip.js +24 -23
- package/dist/services/authentication.js +27 -22
- package/dist/services/collections.js +1 -0
- package/dist/services/deployment-projects.d.ts +31 -2
- package/dist/services/deployment-projects.js +109 -5
- package/dist/services/deployment-runs.d.ts +19 -1
- package/dist/services/deployment-runs.js +86 -0
- package/dist/services/deployment.d.ts +44 -3
- package/dist/services/deployment.js +263 -15
- package/dist/services/files/utils/get-metadata.js +6 -6
- package/dist/services/files.d.ts +3 -1
- package/dist/services/files.js +26 -3
- package/dist/services/graphql/resolvers/query.js +23 -6
- package/dist/services/payload.d.ts +6 -0
- package/dist/services/payload.js +27 -2
- package/dist/services/server.js +1 -1
- package/dist/services/users.js +6 -1
- package/dist/utils/get-field-relational-depth.d.ts +13 -0
- package/dist/utils/get-field-relational-depth.js +22 -0
- package/dist/utils/parse-value.d.ts +4 -0
- package/dist/utils/parse-value.js +11 -0
- package/dist/utils/sanitize-query.js +3 -2
- package/dist/utils/split-fields.d.ts +4 -0
- package/dist/utils/split-fields.js +32 -0
- package/dist/utils/validate-query.js +2 -1
- package/package.json +29 -29
|
@@ -1,13 +1,18 @@
|
|
|
1
1
|
import { useEnv } from '@directus/env';
|
|
2
2
|
import { InvalidPayloadError, InvalidProviderConfigError } from '@directus/errors';
|
|
3
|
-
import { mergeFilters
|
|
3
|
+
import { mergeFilters } from '@directus/utils';
|
|
4
4
|
import { has, isEmpty } from 'lodash-es';
|
|
5
5
|
import { getCache, getCacheValueWithTTL, setCacheValueWithExpiry } from '../cache.js';
|
|
6
6
|
import { getDeploymentDriver } from '../deployment.js';
|
|
7
|
+
import { useLogger } from '../logger/index.js';
|
|
7
8
|
import { getMilliseconds } from '../utils/get-milliseconds.js';
|
|
9
|
+
import { parseValue } from '../utils/parse-value.js';
|
|
10
|
+
import { DeploymentProjectsService } from './deployment-projects.js';
|
|
11
|
+
import { DeploymentRunsService } from './deployment-runs.js';
|
|
8
12
|
import { ItemsService } from './items.js';
|
|
9
13
|
const env = useEnv();
|
|
10
14
|
const DEPLOYMENT_CACHE_TTL = getMilliseconds(env['CACHE_DEPLOYMENT_TTL']) || 5000; // Default 5s
|
|
15
|
+
const SYNC_THRESHOLD_MS = 60 * 60 * 1000; // 1 hour
|
|
11
16
|
export class DeploymentService extends ItemsService {
|
|
12
17
|
constructor(options) {
|
|
13
18
|
super('directus_deployments', options);
|
|
@@ -22,14 +27,14 @@ export class DeploymentService extends ItemsService {
|
|
|
22
27
|
}
|
|
23
28
|
let credentials;
|
|
24
29
|
try {
|
|
25
|
-
credentials =
|
|
30
|
+
credentials = parseValue(data.credentials, {});
|
|
26
31
|
}
|
|
27
32
|
catch {
|
|
28
33
|
throw new InvalidPayloadError({ reason: 'Credentials must be valid JSON' });
|
|
29
34
|
}
|
|
30
35
|
let options;
|
|
31
36
|
try {
|
|
32
|
-
options =
|
|
37
|
+
options = parseValue(data.options, undefined);
|
|
33
38
|
}
|
|
34
39
|
catch {
|
|
35
40
|
throw new InvalidPayloadError({ reason: 'Options must be valid JSON' });
|
|
@@ -61,10 +66,10 @@ export class DeploymentService extends ItemsService {
|
|
|
61
66
|
const existing = await this.readOne(key);
|
|
62
67
|
const provider = existing.provider;
|
|
63
68
|
const internal = await this.readConfig(provider);
|
|
64
|
-
let credentials =
|
|
69
|
+
let credentials = parseValue(internal.credentials, {});
|
|
65
70
|
if (hasCredentials) {
|
|
66
71
|
try {
|
|
67
|
-
const parsed =
|
|
72
|
+
const parsed = parseValue(data.credentials, {});
|
|
68
73
|
credentials = { ...credentials, ...parsed };
|
|
69
74
|
}
|
|
70
75
|
catch {
|
|
@@ -74,7 +79,7 @@ export class DeploymentService extends ItemsService {
|
|
|
74
79
|
let options = existing.options ?? undefined;
|
|
75
80
|
if (hasOptions) {
|
|
76
81
|
try {
|
|
77
|
-
options =
|
|
82
|
+
options = parseValue(data.options, undefined);
|
|
78
83
|
}
|
|
79
84
|
catch {
|
|
80
85
|
throw new InvalidPayloadError({ reason: 'Options must be valid JSON' });
|
|
@@ -122,6 +127,17 @@ export class DeploymentService extends ItemsService {
|
|
|
122
127
|
*/
|
|
123
128
|
async deleteByProvider(provider) {
|
|
124
129
|
const deployment = await this.readByProvider(provider);
|
|
130
|
+
// Webhook cleanup
|
|
131
|
+
if (deployment.webhook_ids && deployment.webhook_ids.length > 0) {
|
|
132
|
+
try {
|
|
133
|
+
const driver = await this.getDriver(provider);
|
|
134
|
+
await driver.unregisterWebhook(deployment.webhook_ids);
|
|
135
|
+
}
|
|
136
|
+
catch (err) {
|
|
137
|
+
const logger = useLogger();
|
|
138
|
+
logger.error(`Failed to unregister webhook for ${provider}: ${err}`);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
125
141
|
return this.deleteOne(deployment.id);
|
|
126
142
|
}
|
|
127
143
|
/**
|
|
@@ -143,24 +159,77 @@ export class DeploymentService extends ItemsService {
|
|
|
143
159
|
return results[0];
|
|
144
160
|
}
|
|
145
161
|
/**
|
|
146
|
-
*
|
|
162
|
+
* Get webhook config for a provider
|
|
147
163
|
*/
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
164
|
+
async getWebhookConfig(provider) {
|
|
165
|
+
const config = await this.readConfig(provider);
|
|
166
|
+
return {
|
|
167
|
+
webhook_secret: config.webhook_secret ?? null,
|
|
168
|
+
credentials: parseValue(config.credentials, {}),
|
|
169
|
+
options: parseValue(config.options, {}),
|
|
170
|
+
};
|
|
154
171
|
}
|
|
155
172
|
/**
|
|
156
173
|
* Get a deployment driver instance with decrypted credentials
|
|
157
174
|
*/
|
|
158
175
|
async getDriver(provider) {
|
|
159
176
|
const deployment = await this.readConfig(provider);
|
|
160
|
-
const credentials =
|
|
161
|
-
const options =
|
|
177
|
+
const credentials = parseValue(deployment.credentials, {});
|
|
178
|
+
const options = parseValue(deployment.options, {});
|
|
162
179
|
return getDeploymentDriver(deployment.provider, credentials, options);
|
|
163
180
|
}
|
|
181
|
+
/**
|
|
182
|
+
* Sync webhook registration with current tracked projects.
|
|
183
|
+
*/
|
|
184
|
+
async syncWebhook(provider) {
|
|
185
|
+
const logger = useLogger();
|
|
186
|
+
logger.debug(`[webhook:${provider}] Starting webhook sync`);
|
|
187
|
+
const config = await this.readConfig(provider);
|
|
188
|
+
const projectsService = new ItemsService('directus_deployment_projects', {
|
|
189
|
+
knex: this.knex,
|
|
190
|
+
schema: this.schema,
|
|
191
|
+
accountability: null,
|
|
192
|
+
});
|
|
193
|
+
const projects = await projectsService.readByQuery({
|
|
194
|
+
filter: { deployment: { _eq: config.id } },
|
|
195
|
+
limit: -1,
|
|
196
|
+
});
|
|
197
|
+
const projectExternalIds = projects.map((p) => p.external_id);
|
|
198
|
+
const driver = await this.getDriver(provider);
|
|
199
|
+
// No projects → unregister webhooks if any exist
|
|
200
|
+
if (projectExternalIds.length === 0) {
|
|
201
|
+
if (config.webhook_ids && config.webhook_ids.length > 0) {
|
|
202
|
+
logger.debug(`[webhook:${provider}] No projects, unregistering ${config.webhook_ids.length} webhook(s)`);
|
|
203
|
+
try {
|
|
204
|
+
await driver.unregisterWebhook(config.webhook_ids);
|
|
205
|
+
}
|
|
206
|
+
catch (err) {
|
|
207
|
+
logger.warn(`[webhook:${provider}] Failed to unregister: ${err}`);
|
|
208
|
+
}
|
|
209
|
+
await super.updateOne(config.id, { webhook_ids: null, webhook_secret: null });
|
|
210
|
+
}
|
|
211
|
+
return;
|
|
212
|
+
}
|
|
213
|
+
// Unregister existing webhooks before re-registering
|
|
214
|
+
if (config.webhook_ids && config.webhook_ids.length > 0) {
|
|
215
|
+
logger.debug(`[webhook:${provider}] Unregistering ${config.webhook_ids.length} existing webhook(s)`);
|
|
216
|
+
try {
|
|
217
|
+
await driver.unregisterWebhook(config.webhook_ids);
|
|
218
|
+
}
|
|
219
|
+
catch (err) {
|
|
220
|
+
logger.warn(`[webhook:${provider}] Failed to unregister: ${err}`);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
const publicUrl = env['PUBLIC_URL'];
|
|
224
|
+
const webhookUrl = `${publicUrl}/deployments/webhooks/${provider}`;
|
|
225
|
+
logger.debug(`[webhook:${provider}] Registering webhook → ${webhookUrl} for ${projectExternalIds.length} project(s)`);
|
|
226
|
+
const result = await driver.registerWebhook(webhookUrl, projectExternalIds);
|
|
227
|
+
await super.updateOne(config.id, {
|
|
228
|
+
webhook_ids: result.webhook_ids,
|
|
229
|
+
webhook_secret: result.webhook_secret,
|
|
230
|
+
});
|
|
231
|
+
logger.info(`[webhook:${provider}] Registered ${result.webhook_ids.length} webhook(s): [${result.webhook_ids.join(', ')}]`);
|
|
232
|
+
}
|
|
164
233
|
/**
|
|
165
234
|
* List projects from provider with caching
|
|
166
235
|
*/
|
|
@@ -199,4 +268,183 @@ export class DeploymentService extends ItemsService {
|
|
|
199
268
|
// Return with full TTL (just cached)
|
|
200
269
|
return { data: project, remainingTTL: DEPLOYMENT_CACHE_TTL };
|
|
201
270
|
}
|
|
271
|
+
/**
|
|
272
|
+
* Dashboard: projects + latest run status + stats
|
|
273
|
+
*/
|
|
274
|
+
async getDashboard(provider, sinceDate) {
|
|
275
|
+
const projectsService = new DeploymentProjectsService({
|
|
276
|
+
accountability: this.accountability,
|
|
277
|
+
schema: this.schema,
|
|
278
|
+
});
|
|
279
|
+
const runsService = new DeploymentRunsService({
|
|
280
|
+
accountability: this.accountability,
|
|
281
|
+
schema: this.schema,
|
|
282
|
+
});
|
|
283
|
+
const deployment = await this.readByProvider(provider);
|
|
284
|
+
const selectedProjects = await projectsService.readByQuery({
|
|
285
|
+
filter: { deployment: { _eq: deployment.id } },
|
|
286
|
+
limit: -1,
|
|
287
|
+
});
|
|
288
|
+
if (selectedProjects.length === 0) {
|
|
289
|
+
return {
|
|
290
|
+
projects: [],
|
|
291
|
+
stats: { active_deployments: 0, successful_builds: 0, failed_builds: 0 },
|
|
292
|
+
};
|
|
293
|
+
}
|
|
294
|
+
const projectIds = selectedProjects.map((p) => p.id);
|
|
295
|
+
// Latest run per project + aggregated stats
|
|
296
|
+
const [latestRuns, activeResult, statusCounts] = await Promise.all([
|
|
297
|
+
Promise.all(projectIds.map(async (projectId) => {
|
|
298
|
+
const runs = await runsService.readByQuery({
|
|
299
|
+
filter: { project: { _eq: projectId } },
|
|
300
|
+
sort: ['-date_created'],
|
|
301
|
+
limit: 1,
|
|
302
|
+
});
|
|
303
|
+
return { projectId, run: runs?.[0] ?? null };
|
|
304
|
+
})),
|
|
305
|
+
runsService.readByQuery({
|
|
306
|
+
filter: { project: { _in: projectIds }, status: { _eq: 'building' } },
|
|
307
|
+
aggregate: { count: ['*'] },
|
|
308
|
+
}),
|
|
309
|
+
runsService.readByQuery({
|
|
310
|
+
filter: {
|
|
311
|
+
_and: [
|
|
312
|
+
{ project: { _in: projectIds } },
|
|
313
|
+
{ status: { _in: ['ready', 'error'] } },
|
|
314
|
+
{ date_created: { _gte: sinceDate.toISOString() } },
|
|
315
|
+
],
|
|
316
|
+
},
|
|
317
|
+
aggregate: { count: ['*'] },
|
|
318
|
+
group: ['status'],
|
|
319
|
+
}),
|
|
320
|
+
]);
|
|
321
|
+
const latestRunMap = new Map(latestRuns.map((r) => [r.projectId, r.run]));
|
|
322
|
+
const countByStatus = (status) => Number(statusCounts.find((r) => r['status'] === status)?.['count'] ?? 0);
|
|
323
|
+
// Background sync of project metadata if stale
|
|
324
|
+
this.syncProjectMetadataIfStale(provider, deployment).catch((err) => {
|
|
325
|
+
const logger = useLogger();
|
|
326
|
+
logger.error(`Failed to sync project metadata for ${provider}: ${err}`);
|
|
327
|
+
});
|
|
328
|
+
return {
|
|
329
|
+
projects: selectedProjects.map((p) => {
|
|
330
|
+
const latestRun = latestRunMap.get(p.id);
|
|
331
|
+
return {
|
|
332
|
+
id: p.id,
|
|
333
|
+
external_id: p.external_id,
|
|
334
|
+
name: p.name,
|
|
335
|
+
url: p.url,
|
|
336
|
+
framework: p.framework,
|
|
337
|
+
deployable: p.deployable,
|
|
338
|
+
...(latestRun && {
|
|
339
|
+
latest_deployment: {
|
|
340
|
+
status: latestRun.status,
|
|
341
|
+
created_at: latestRun.started_at ?? latestRun.date_created,
|
|
342
|
+
finished_at: latestRun.completed_at ?? null,
|
|
343
|
+
},
|
|
344
|
+
}),
|
|
345
|
+
};
|
|
346
|
+
}),
|
|
347
|
+
stats: {
|
|
348
|
+
active_deployments: Number(activeResult[0]?.['count'] ?? 0),
|
|
349
|
+
successful_builds: countByStatus('ready'),
|
|
350
|
+
failed_builds: countByStatus('error'),
|
|
351
|
+
},
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
/**
|
|
355
|
+
* Refresh project metadata (name, url, framework, deployable) if stale.
|
|
356
|
+
*/
|
|
357
|
+
async syncProjectMetadataIfStale(provider, deployment) {
|
|
358
|
+
if (deployment.last_synced_at) {
|
|
359
|
+
const lastSync = new Date(deployment.last_synced_at).getTime();
|
|
360
|
+
if (Date.now() - lastSync < SYNC_THRESHOLD_MS)
|
|
361
|
+
return;
|
|
362
|
+
}
|
|
363
|
+
const logger = useLogger();
|
|
364
|
+
logger.debug(`[metadata:${provider}] Syncing project metadata`);
|
|
365
|
+
const projectsService = new DeploymentProjectsService({
|
|
366
|
+
accountability: null,
|
|
367
|
+
schema: this.schema,
|
|
368
|
+
});
|
|
369
|
+
const driver = await this.getDriver(provider);
|
|
370
|
+
const selectedProjects = await projectsService.readByQuery({
|
|
371
|
+
filter: { deployment: { _eq: deployment.id } },
|
|
372
|
+
limit: -1,
|
|
373
|
+
});
|
|
374
|
+
// Fetch details per project
|
|
375
|
+
const updates = await Promise.all(selectedProjects.map(async (p) => {
|
|
376
|
+
const details = await driver.getProject(p.external_id);
|
|
377
|
+
return {
|
|
378
|
+
id: p.id,
|
|
379
|
+
name: details.name,
|
|
380
|
+
url: details.url ?? null,
|
|
381
|
+
framework: details.framework ?? null,
|
|
382
|
+
deployable: details.deployable,
|
|
383
|
+
};
|
|
384
|
+
}));
|
|
385
|
+
if (updates.length > 0) {
|
|
386
|
+
await projectsService.updateBatch(updates);
|
|
387
|
+
}
|
|
388
|
+
// Mark sync timestamp
|
|
389
|
+
const internalService = new DeploymentService({
|
|
390
|
+
accountability: null,
|
|
391
|
+
schema: this.schema,
|
|
392
|
+
});
|
|
393
|
+
await internalService.updateOne(deployment.id, { last_synced_at: new Date().toISOString() });
|
|
394
|
+
}
|
|
395
|
+
/**
|
|
396
|
+
* Trigger a deployment for a project
|
|
397
|
+
*/
|
|
398
|
+
async triggerDeployment(provider, projectId, options) {
|
|
399
|
+
const projectsService = new DeploymentProjectsService({
|
|
400
|
+
accountability: this.accountability,
|
|
401
|
+
schema: this.schema,
|
|
402
|
+
});
|
|
403
|
+
const runsService = new DeploymentRunsService({
|
|
404
|
+
accountability: this.accountability,
|
|
405
|
+
schema: this.schema,
|
|
406
|
+
});
|
|
407
|
+
const project = await projectsService.readOne(projectId);
|
|
408
|
+
const driver = await this.getDriver(provider);
|
|
409
|
+
const result = await driver.triggerDeployment(project.external_id, {
|
|
410
|
+
preview: options.preview,
|
|
411
|
+
clearCache: options.clearCache,
|
|
412
|
+
});
|
|
413
|
+
const runId = await runsService.createOne({
|
|
414
|
+
project: projectId,
|
|
415
|
+
external_id: result.deployment_id,
|
|
416
|
+
target: options.preview ? 'preview' : 'production',
|
|
417
|
+
status: result.status,
|
|
418
|
+
started_at: result.created_at.toISOString(),
|
|
419
|
+
...(result.url ? { url: result.url } : {}),
|
|
420
|
+
});
|
|
421
|
+
return runsService.readOne(runId);
|
|
422
|
+
}
|
|
423
|
+
/**
|
|
424
|
+
* Cancel a deployment run
|
|
425
|
+
*/
|
|
426
|
+
async cancelDeployment(provider, runId) {
|
|
427
|
+
const runsService = new DeploymentRunsService({
|
|
428
|
+
accountability: this.accountability,
|
|
429
|
+
schema: this.schema,
|
|
430
|
+
});
|
|
431
|
+
const run = await runsService.readOne(runId);
|
|
432
|
+
const driver = await this.getDriver(provider);
|
|
433
|
+
const status = await driver.cancelDeployment(run.external_id);
|
|
434
|
+
await runsService.updateOne(runId, { status });
|
|
435
|
+
return runsService.readOne(runId);
|
|
436
|
+
}
|
|
437
|
+
/**
|
|
438
|
+
* Get a run with its logs from the provider
|
|
439
|
+
*/
|
|
440
|
+
async getRunWithLogs(provider, runId, since) {
|
|
441
|
+
const runsService = new DeploymentRunsService({
|
|
442
|
+
accountability: this.accountability,
|
|
443
|
+
schema: this.schema,
|
|
444
|
+
});
|
|
445
|
+
const run = await runsService.readOne(runId);
|
|
446
|
+
const driver = await this.getDriver(provider);
|
|
447
|
+
const logs = await driver.getDeploymentLogs(run.external_id, since ? { since } : undefined);
|
|
448
|
+
return { ...run, logs };
|
|
449
|
+
}
|
|
202
450
|
}
|
|
@@ -78,14 +78,14 @@ export async function getMetadata(stream, allowList = env['FILE_METADATA_ALLOW_L
|
|
|
78
78
|
logger.warn(err);
|
|
79
79
|
}
|
|
80
80
|
}
|
|
81
|
-
if (fullMetadata?.iptc?.['
|
|
82
|
-
metadata.description = fullMetadata.iptc
|
|
81
|
+
if (fullMetadata?.iptc?.['caption'] && typeof fullMetadata.iptc['caption'] === 'string') {
|
|
82
|
+
metadata.description = fullMetadata.iptc['caption'];
|
|
83
83
|
}
|
|
84
|
-
if (fullMetadata?.iptc?.['
|
|
85
|
-
metadata.title = fullMetadata.iptc['
|
|
84
|
+
if (fullMetadata?.iptc?.['headline'] && typeof fullMetadata.iptc['headline'] === 'string') {
|
|
85
|
+
metadata.title = fullMetadata.iptc['headline'];
|
|
86
86
|
}
|
|
87
|
-
if (fullMetadata?.iptc?.['
|
|
88
|
-
metadata.tags = fullMetadata.iptc['
|
|
87
|
+
if (fullMetadata?.iptc?.['keywords']) {
|
|
88
|
+
metadata.tags = fullMetadata.iptc['keywords'];
|
|
89
89
|
}
|
|
90
90
|
if (allowList === '*' || allowList?.[0] === '*') {
|
|
91
91
|
metadata.metadata = fullMetadata;
|
package/dist/services/files.d.ts
CHANGED
|
@@ -15,7 +15,9 @@ export declare class FilesService extends ItemsService<File> {
|
|
|
15
15
|
/**
|
|
16
16
|
* Import a single file from an external URL
|
|
17
17
|
*/
|
|
18
|
-
importOne(importURL: string, body: Partial<File
|
|
18
|
+
importOne(importURL: string, body: Partial<File>, options?: {
|
|
19
|
+
filterMimeType?: string[];
|
|
20
|
+
}): Promise<PrimaryKey>;
|
|
19
21
|
/**
|
|
20
22
|
* Create a file (only applicable when it is not a multipart/data POST request)
|
|
21
23
|
* Useful for associating metadata with existing file in storage
|
package/dist/services/files.js
CHANGED
|
@@ -3,12 +3,13 @@ import zlib from 'node:zlib';
|
|
|
3
3
|
import path from 'path';
|
|
4
4
|
import url from 'url';
|
|
5
5
|
import { useEnv } from '@directus/env';
|
|
6
|
-
import { ContentTooLargeError, InvalidPayloadError, ServiceUnavailableError } from '@directus/errors';
|
|
6
|
+
import { ContentTooLargeError, InternalServerError, InvalidPayloadError, ServiceUnavailableError, } from '@directus/errors';
|
|
7
7
|
import formatTitle from '@directus/format-title';
|
|
8
8
|
import { toArray } from '@directus/utils';
|
|
9
9
|
import encodeURL from 'encodeurl';
|
|
10
10
|
import { clone, cloneDeep } from 'lodash-es';
|
|
11
11
|
import { extension } from 'mime-types';
|
|
12
|
+
import { minimatch } from 'minimatch';
|
|
12
13
|
import { RESUMABLE_UPLOADS } from '../constants.js';
|
|
13
14
|
import emitter from '../emitter.js';
|
|
14
15
|
import { useLogger } from '../logger/index.js';
|
|
@@ -114,6 +115,9 @@ export class FilesService extends ItemsService {
|
|
|
114
115
|
if (err instanceof ContentTooLargeError) {
|
|
115
116
|
throw err;
|
|
116
117
|
}
|
|
118
|
+
else if (err?.code && ['EROFS', 'EACCES', 'EPERM'].includes(err.code)) {
|
|
119
|
+
throw new InternalServerError();
|
|
120
|
+
}
|
|
117
121
|
else {
|
|
118
122
|
throw new ServiceUnavailableError({ service: 'files', reason: `Couldn't save file ${payload.filename_disk}` });
|
|
119
123
|
}
|
|
@@ -164,7 +168,7 @@ export class FilesService extends ItemsService {
|
|
|
164
168
|
/**
|
|
165
169
|
* Import a single file from an external URL
|
|
166
170
|
*/
|
|
167
|
-
async importOne(importURL, body) {
|
|
171
|
+
async importOne(importURL, body, options = {}) {
|
|
168
172
|
if (this.accountability) {
|
|
169
173
|
await validateAccess({
|
|
170
174
|
accountability: this.accountability,
|
|
@@ -193,10 +197,29 @@ export class FilesService extends ItemsService {
|
|
|
193
197
|
}
|
|
194
198
|
const parsedURL = url.parse(fileResponse.request.res.responseUrl);
|
|
195
199
|
const filename = decodeURI(path.basename(parsedURL.pathname));
|
|
200
|
+
const mimeType = fileResponse.headers['content-type']?.split(';')[0]?.trim() || 'application/octet-stream';
|
|
201
|
+
// Check against global MIME type allow list from env
|
|
202
|
+
const globalAllowedPatterns = toArray(env['FILES_MIME_TYPE_ALLOW_LIST']);
|
|
203
|
+
const globalMimeTypeAllowed = globalAllowedPatterns.some((pattern) => minimatch(mimeType, pattern));
|
|
204
|
+
if (globalMimeTypeAllowed === false) {
|
|
205
|
+
throw new InvalidPayloadError({
|
|
206
|
+
reason: `File content type "${mimeType}" is not allowed for upload by your global file type restrictions`,
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
const { filterMimeType } = options;
|
|
210
|
+
// Check against interface-level MIME type restrictions if provided
|
|
211
|
+
if (filterMimeType && filterMimeType.length > 0) {
|
|
212
|
+
const interfaceMimeTypeAllowed = filterMimeType.some((pattern) => minimatch(mimeType, pattern));
|
|
213
|
+
if (interfaceMimeTypeAllowed === false) {
|
|
214
|
+
throw new InvalidPayloadError({
|
|
215
|
+
reason: `File content type "${mimeType}" is not allowed for upload by this field's file type restrictions`,
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
}
|
|
196
219
|
const payload = {
|
|
197
220
|
filename_download: filename,
|
|
198
221
|
storage: toArray(env['STORAGE_LOCATIONS'])[0],
|
|
199
|
-
type:
|
|
222
|
+
type: mimeType,
|
|
200
223
|
title: formatTitle(filename),
|
|
201
224
|
...(body || {}),
|
|
202
225
|
};
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { parseFilterFunctionPath } from '@directus/utils';
|
|
2
|
+
import { omit } from 'lodash-es';
|
|
2
3
|
import { parseArgs } from '../schema/parse-args.js';
|
|
3
4
|
import { getQuery } from '../schema/parse-query.js';
|
|
4
5
|
import { getAggregateQuery } from '../utils/aggregate-query.js';
|
|
@@ -40,13 +41,29 @@ export async function resolveQuery(gql, info) {
|
|
|
40
41
|
const result = await gql.read(collection, query, args['id']);
|
|
41
42
|
if (args['id'])
|
|
42
43
|
return result;
|
|
44
|
+
/**
|
|
45
|
+
* Since grouped fields are returned at the top level, we duplicate those fields
|
|
46
|
+
* into the expected `group` property on the payload, excluding any non-group
|
|
47
|
+
* fields (i.e. aggregate keys).
|
|
48
|
+
*
|
|
49
|
+
* The payload can only contain grouped fields and aggregate keys, as the
|
|
50
|
+
* aggregate selection is restricted to those fields. Therefore, the original
|
|
51
|
+
* grouped fields can safely remain at the top level.
|
|
52
|
+
*
|
|
53
|
+
* We cannot iterate over `query.group`, because grouped fields that use
|
|
54
|
+
* functions are normalized (e.g. function(field) → function_field), which would
|
|
55
|
+
* result in them being skipped.
|
|
56
|
+
*
|
|
57
|
+
* Before:
|
|
58
|
+
* { year_date: 2023, count: { id: 42 } }
|
|
59
|
+
*
|
|
60
|
+
* After:
|
|
61
|
+
* { year_date: 2023, count: { id: 42 }, group: { year_date: 2023 } }
|
|
62
|
+
*/
|
|
43
63
|
if (query.group) {
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
groupValues[key] = field[key];
|
|
48
|
-
}
|
|
49
|
-
field['group'] = groupValues;
|
|
64
|
+
const aggregateKeys = Object.keys(query.aggregate ?? {});
|
|
65
|
+
result['map']((payload) => {
|
|
66
|
+
payload['group'] = omit(payload, aggregateKeys);
|
|
50
67
|
});
|
|
51
68
|
}
|
|
52
69
|
return result;
|
|
@@ -41,6 +41,12 @@ export declare class PayloadService {
|
|
|
41
41
|
* to check if the value is a raw instance before stringifying it in the next step.
|
|
42
42
|
*/
|
|
43
43
|
processGeometries<T extends Partial<Record<string, any>>[]>(fieldEntries: [string, FieldOverview][], payloads: T, action: PayloadAction): T;
|
|
44
|
+
/**
|
|
45
|
+
* When accessing JSON paths that contain objects or arrays, certain databases return stringified
|
|
46
|
+
* JSON (MySQL, SQLite, MSSQL, Oracle). The fn helper's parseJsonResult handles this per-dialect —
|
|
47
|
+
* vendors whose drivers already deserialize the result (e.g. pg for PostgreSQL) use a no-op.
|
|
48
|
+
*/
|
|
49
|
+
processJsonFunctionResults<T extends Partial<Record<string, any>>[]>(payloads: T, aliasMap?: Record<string, string>): void;
|
|
44
50
|
/**
|
|
45
51
|
* Knex returns `datetime` and `date` columns as Date.. This is wrong for date / datetime, as those
|
|
46
52
|
* shouldn't return with time / timezone info respectively
|
package/dist/services/payload.js
CHANGED
|
@@ -7,8 +7,9 @@ import { unflatten } from 'flat';
|
|
|
7
7
|
import Joi from 'joi';
|
|
8
8
|
import { clone, cloneDeep, isNil, isObject, isPlainObject, pick } from 'lodash-es';
|
|
9
9
|
import { parse as wktToGeoJSON } from 'wellknown';
|
|
10
|
-
import { getHelpers } from '../database/helpers/index.js';
|
|
10
|
+
import { getFunctions, getHelpers } from '../database/helpers/index.js';
|
|
11
11
|
import getDatabase from '../database/index.js';
|
|
12
|
+
import { useLogger } from '../logger/index.js';
|
|
12
13
|
import { decrypt, encrypt } from '../utils/encrypt.js';
|
|
13
14
|
import { generateHash } from '../utils/generate-hash.js';
|
|
14
15
|
import { getSecret } from '../utils/get-secret.js';
|
|
@@ -133,7 +134,13 @@ export class PayloadService {
|
|
|
133
134
|
// In-system calls can still get the decrypted value
|
|
134
135
|
if (accountability === null) {
|
|
135
136
|
const key = getSecret();
|
|
136
|
-
|
|
137
|
+
try {
|
|
138
|
+
return await decrypt(value, key);
|
|
139
|
+
}
|
|
140
|
+
catch (err) {
|
|
141
|
+
useLogger().warn(`Failed to decrypt field value: ${err.message}`);
|
|
142
|
+
return null;
|
|
143
|
+
}
|
|
137
144
|
}
|
|
138
145
|
// Requests from the API entrypoints have accountability and shouldn't get the raw value
|
|
139
146
|
return '**********';
|
|
@@ -177,6 +184,9 @@ export class PayloadService {
|
|
|
177
184
|
}
|
|
178
185
|
this.processGeometries(fieldEntries, processedPayload, action);
|
|
179
186
|
this.processDates(fieldEntries, processedPayload, action, aliasMap, aggregate);
|
|
187
|
+
if (action === 'read') {
|
|
188
|
+
this.processJsonFunctionResults(processedPayload, aliasMap);
|
|
189
|
+
}
|
|
180
190
|
if (['create', 'update'].includes(action)) {
|
|
181
191
|
processedPayload.forEach((record) => {
|
|
182
192
|
for (const [key, value] of Object.entries(record)) {
|
|
@@ -260,6 +270,21 @@ export class PayloadService {
|
|
|
260
270
|
}
|
|
261
271
|
return payloads;
|
|
262
272
|
}
|
|
273
|
+
/**
|
|
274
|
+
* When accessing JSON paths that contain objects or arrays, certain databases return stringified
|
|
275
|
+
* JSON (MySQL, SQLite, MSSQL, Oracle). The fn helper's parseJsonResult handles this per-dialect —
|
|
276
|
+
* vendors whose drivers already deserialize the result (e.g. pg for PostgreSQL) use a no-op.
|
|
277
|
+
*/
|
|
278
|
+
processJsonFunctionResults(payloads, aliasMap = {}) {
|
|
279
|
+
const fn = getFunctions(this.knex, this.schema);
|
|
280
|
+
for (const [aliasField, originalField] of Object.entries(aliasMap)) {
|
|
281
|
+
if (!originalField.startsWith('json(') || !originalField.endsWith(')'))
|
|
282
|
+
continue;
|
|
283
|
+
for (const payload of payloads) {
|
|
284
|
+
payload[aliasField] = fn.parseJsonResult(payload[aliasField]);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
}
|
|
263
288
|
/**
|
|
264
289
|
* Knex returns `datetime` and `date` columns as Date.. This is wrong for date / datetime, as those
|
|
265
290
|
* shouldn't return with time / timezone info respectively
|
package/dist/services/server.js
CHANGED
|
@@ -62,7 +62,7 @@ export class ServerService {
|
|
|
62
62
|
info['mcp_enabled'] = toBoolean(env['MCP_ENABLED'] ?? true);
|
|
63
63
|
info['ai_enabled'] = toBoolean(env['AI_ENABLED'] ?? true);
|
|
64
64
|
info['files'] = {
|
|
65
|
-
mimeTypeAllowList: env['FILES_MIME_TYPE_ALLOW_LIST'],
|
|
65
|
+
mimeTypeAllowList: toArray(env['FILES_MIME_TYPE_ALLOW_LIST']),
|
|
66
66
|
};
|
|
67
67
|
if (env['RATE_LIMITER_ENABLED']) {
|
|
68
68
|
info['rateLimit'] = {
|
package/dist/services/users.js
CHANGED
|
@@ -8,6 +8,7 @@ import Joi from 'joi';
|
|
|
8
8
|
import jwt from 'jsonwebtoken';
|
|
9
9
|
import { isEmpty } from 'lodash-es';
|
|
10
10
|
import { clearSystemCache } from '../cache.js';
|
|
11
|
+
import { DEFAULT_AUTH_PROVIDER } from '../constants.js';
|
|
11
12
|
import getDatabase from '../database/index.js';
|
|
12
13
|
import { useLogger } from '../logger/index.js';
|
|
13
14
|
import { validateRemainingAdminUsers } from '../permissions/modules/validate-remaining-admin/validate-remaining-admin-users.js';
|
|
@@ -109,7 +110,7 @@ export class UsersService extends ItemsService {
|
|
|
109
110
|
*/
|
|
110
111
|
async getUserByEmail(email) {
|
|
111
112
|
return this.knex
|
|
112
|
-
.select('id', 'role', 'status', 'password', 'email')
|
|
113
|
+
.select('id', 'role', 'status', 'password', 'email', 'provider')
|
|
113
114
|
.from('directus_users')
|
|
114
115
|
.whereRaw(`LOWER(??) = ?`, ['email', email.toLowerCase()])
|
|
115
116
|
.first();
|
|
@@ -459,6 +460,10 @@ export class UsersService extends ItemsService {
|
|
|
459
460
|
await stall(STALL_TIME, timeStart);
|
|
460
461
|
throw new ForbiddenError();
|
|
461
462
|
}
|
|
463
|
+
if (user.provider !== DEFAULT_AUTH_PROVIDER) {
|
|
464
|
+
await stall(STALL_TIME, timeStart);
|
|
465
|
+
throw new ForbiddenError();
|
|
466
|
+
}
|
|
462
467
|
const mailService = new MailService({
|
|
463
468
|
schema: this.schema,
|
|
464
469
|
knex: this.knex,
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Counts the number of relational segments in a field path. Handles function syntax
|
|
3
|
+
* (e.g. json(), year()) by counting relational segments in the prefix and in the first argument
|
|
4
|
+
* separately, while ignoring subsequent arguments (e.g. json paths).
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* getFieldRelationalDepth('a.b.c') // 3
|
|
8
|
+
* getFieldRelationalDepth('year(user.date_created)') // 2
|
|
9
|
+
* getFieldRelationalDepth('category_id.json(metadata, a.b.c)') // 2
|
|
10
|
+
* getFieldRelationalDepth('json(a.b.field, some.path)') // 3
|
|
11
|
+
* getFieldRelationalDepth('json(metadata, path.to.value)') // 1
|
|
12
|
+
*/
|
|
13
|
+
export declare function getFieldRelationalDepth(field: string): number;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Counts the number of relational segments in a field path. Handles function syntax
|
|
3
|
+
* (e.g. json(), year()) by counting relational segments in the prefix and in the first argument
|
|
4
|
+
* separately, while ignoring subsequent arguments (e.g. json paths).
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* getFieldRelationalDepth('a.b.c') // 3
|
|
8
|
+
* getFieldRelationalDepth('year(user.date_created)') // 2
|
|
9
|
+
* getFieldRelationalDepth('category_id.json(metadata, a.b.c)') // 2
|
|
10
|
+
* getFieldRelationalDepth('json(a.b.field, some.path)') // 3
|
|
11
|
+
* getFieldRelationalDepth('json(metadata, path.to.value)') // 1
|
|
12
|
+
*/
|
|
13
|
+
export function getFieldRelationalDepth(field) {
|
|
14
|
+
const openParenIndex = field.indexOf('(');
|
|
15
|
+
if (openParenIndex === -1) {
|
|
16
|
+
return field.split('.').length;
|
|
17
|
+
}
|
|
18
|
+
const functionDepth = field.slice(0, openParenIndex).split('.').length - 1;
|
|
19
|
+
const commaIndex = field.indexOf(',', openParenIndex);
|
|
20
|
+
const fieldDepth = field.slice(openParenIndex, commaIndex).split('.').length;
|
|
21
|
+
return functionDepth + fieldDepth;
|
|
22
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { parseJSON } from '@directus/utils';
|
|
2
|
+
/**
|
|
3
|
+
* Parse a value that might be a JSON string, returning a typed result or fallback.
|
|
4
|
+
*/
|
|
5
|
+
export function parseValue(value, fallback) {
|
|
6
|
+
if (!value)
|
|
7
|
+
return fallback;
|
|
8
|
+
if (typeof value === 'string')
|
|
9
|
+
return parseJSON(value);
|
|
10
|
+
return value;
|
|
11
|
+
}
|