google-drive-mock 1.0.13 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/release.yml +12 -4
- package/dist/batch.js +1 -1
- package/dist/index.js +1 -1
- package/dist/routes/v3.js +54 -4
- package/dist/store.js +1 -1
- package/package.json +1 -1
- package/src/batch.ts +2 -1
- package/src/index.ts +1 -1
- package/src/routes/v3.ts +57 -3
- package/src/store.ts +1 -1
- package/test/batch_insert_download.test.ts +150 -0
- package/test/concurrent_fetch.test.ts +17 -10
- package/test/dates_and_sorting.test.ts +0 -2
- package/test/iterate_changes.test.ts +408 -0
- package/test/parallel_update.test.ts +138 -0
- package/test/url_parameters.test.ts +76 -0
|
@@ -4,8 +4,8 @@ on:
|
|
|
4
4
|
workflow_dispatch:
|
|
5
5
|
inputs:
|
|
6
6
|
version:
|
|
7
|
-
description: 'New Version (e.g. 1.0.0)'
|
|
8
|
-
required:
|
|
7
|
+
description: 'New Version (e.g. 1.0.0, or leave empty for minor)'
|
|
8
|
+
required: false
|
|
9
9
|
type: string
|
|
10
10
|
|
|
11
11
|
jobs:
|
|
@@ -25,7 +25,15 @@ jobs:
|
|
|
25
25
|
cache: 'npm'
|
|
26
26
|
|
|
27
27
|
- run: npm install
|
|
28
|
-
-
|
|
28
|
+
- name: Bump Version
|
|
29
|
+
id: bump
|
|
30
|
+
run: |
|
|
31
|
+
if [ -z "${{ inputs.version }}" ]; then
|
|
32
|
+
VERSION=$(npm version minor --no-git-tag-version)
|
|
33
|
+
else
|
|
34
|
+
VERSION=$(npm version ${{ inputs.version }} --no-git-tag-version)
|
|
35
|
+
fi
|
|
36
|
+
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
|
29
37
|
- run: npm run build
|
|
30
38
|
- run: npm run lint
|
|
31
39
|
- run: npm test
|
|
@@ -38,5 +46,5 @@ jobs:
|
|
|
38
46
|
git config --global user.name 'github-actions[bot]'
|
|
39
47
|
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
|
40
48
|
git add package.json package-lock.json
|
|
41
|
-
git commit -m "release: ${{
|
|
49
|
+
git commit -m "release: ${{ steps.bump.outputs.version }}"
|
|
42
50
|
git push
|
package/dist/batch.js
CHANGED
|
@@ -100,7 +100,7 @@ function processPart(part, req) {
|
|
|
100
100
|
body: Object.assign({ kind: "drive#about" }, about)
|
|
101
101
|
};
|
|
102
102
|
}
|
|
103
|
-
// POST Create File
|
|
103
|
+
// POST Create File (Standard)
|
|
104
104
|
if (part.method === 'POST' && filesListMatch) {
|
|
105
105
|
if (!part.body || !part.body.name) {
|
|
106
106
|
return { contentId: part.contentId, statusCode: 400, body: { error: { code: 400, message: 'Name required' } } };
|
package/dist/index.js
CHANGED
|
@@ -26,7 +26,7 @@ const createApp = (config = {}) => {
|
|
|
26
26
|
}
|
|
27
27
|
const app = (0, express_1.default)();
|
|
28
28
|
app.use((0, cors_1.default)({
|
|
29
|
-
exposedHeaders: ['ETag']
|
|
29
|
+
exposedHeaders: ['ETag', 'Date', 'Content-Length']
|
|
30
30
|
}));
|
|
31
31
|
app.set('etag', false); // Disable default ETag generation to match Real API behavior
|
|
32
32
|
app.use((req, res, next) => __awaiter(void 0, void 0, void 0, function* () {
|
package/dist/routes/v3.js
CHANGED
|
@@ -22,7 +22,7 @@ const createV3Router = () => {
|
|
|
22
22
|
// Enhanced query parser for Mock
|
|
23
23
|
// Recursive function to handle nested OR/AND logic with parens
|
|
24
24
|
const evaluateQuery = (queryStr, file) => {
|
|
25
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
|
|
25
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;
|
|
26
26
|
const str = queryStr.trim();
|
|
27
27
|
if (!str)
|
|
28
28
|
return true;
|
|
@@ -136,6 +136,10 @@ const createV3Router = () => {
|
|
|
136
136
|
const timeStr = (_k = part.match(/modifiedTime < '(.*)'/)) === null || _k === void 0 ? void 0 : _k[1];
|
|
137
137
|
return !!(timeStr && new Date(file.modifiedTime) < new Date(timeStr));
|
|
138
138
|
}
|
|
139
|
+
if (part.startsWith("modifiedTime = '")) {
|
|
140
|
+
const timeStr = (_l = part.match(/modifiedTime = '(.*)'/)) === null || _l === void 0 ? void 0 : _l[1];
|
|
141
|
+
return !!(timeStr && new Date(file.modifiedTime).toISOString() === new Date(timeStr).toISOString());
|
|
142
|
+
}
|
|
139
143
|
// Fallback / Unknown
|
|
140
144
|
return true;
|
|
141
145
|
};
|
|
@@ -172,10 +176,33 @@ const createV3Router = () => {
|
|
|
172
176
|
return 0;
|
|
173
177
|
});
|
|
174
178
|
}
|
|
179
|
+
// Pagination
|
|
180
|
+
const pageSize = req.query.pageSize ? parseInt(req.query.pageSize, 10) : 100; // Default 100
|
|
181
|
+
let skip = 0;
|
|
182
|
+
if (req.query.pageToken) {
|
|
183
|
+
try {
|
|
184
|
+
const tokenJson = Buffer.from(req.query.pageToken, 'base64').toString('utf-8');
|
|
185
|
+
const tokenData = JSON.parse(tokenJson);
|
|
186
|
+
if (typeof tokenData.skip === 'number') {
|
|
187
|
+
skip = tokenData.skip;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
catch (_a) {
|
|
191
|
+
// Ignore invalid token, start from 0
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
const totalFiles = files.length;
|
|
195
|
+
const resultFiles = files.slice(skip, skip + pageSize);
|
|
196
|
+
let nextPageToken;
|
|
197
|
+
if (skip + pageSize < totalFiles) {
|
|
198
|
+
const nextSkip = skip + pageSize;
|
|
199
|
+
nextPageToken = Buffer.from(JSON.stringify({ skip: nextSkip })).toString('base64');
|
|
200
|
+
}
|
|
175
201
|
res.json({
|
|
176
202
|
kind: "drive#fileList",
|
|
177
203
|
incompleteSearch: false,
|
|
178
|
-
files:
|
|
204
|
+
files: resultFiles,
|
|
205
|
+
nextPageToken
|
|
179
206
|
});
|
|
180
207
|
});
|
|
181
208
|
// Changes: Get Start Page Token
|
|
@@ -419,8 +446,10 @@ const createV3Router = () => {
|
|
|
419
446
|
res.status(400).send("Invalid file ID");
|
|
420
447
|
return;
|
|
421
448
|
}
|
|
422
|
-
const updates = req.body;
|
|
423
|
-
|
|
449
|
+
const updates = req.body || {};
|
|
450
|
+
const hasBody = Object.keys(updates).length > 0;
|
|
451
|
+
const hasQueryParams = req.query.addParents || req.query.removeParents;
|
|
452
|
+
if (!hasBody && !hasQueryParams) {
|
|
424
453
|
res.status(400).json({ error: { code: 400, message: "Bad Request: No updates provided" } });
|
|
425
454
|
return;
|
|
426
455
|
}
|
|
@@ -429,6 +458,27 @@ const createV3Router = () => {
|
|
|
429
458
|
res.status(404).json({ error: { code: 404, message: "File not found" } });
|
|
430
459
|
return;
|
|
431
460
|
}
|
|
461
|
+
const addParents = req.query.addParents;
|
|
462
|
+
if (addParents) {
|
|
463
|
+
const parentsToAdd = addParents.split(',');
|
|
464
|
+
const currentParents = updatedFile.parents || [];
|
|
465
|
+
const newParents = [...new Set([...currentParents, ...parentsToAdd])]; // Union
|
|
466
|
+
// Update the file with new parents
|
|
467
|
+
const result = store_1.driveStore.updateFile(fileId, { parents: newParents });
|
|
468
|
+
if (result) {
|
|
469
|
+
Object.assign(updatedFile, result);
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
const removeParents = req.query.removeParents;
|
|
473
|
+
if (removeParents) {
|
|
474
|
+
const parentsToRemove = removeParents.split(',');
|
|
475
|
+
const currentParents = updatedFile.parents || [];
|
|
476
|
+
const newParents = currentParents.filter(p => !parentsToRemove.includes(p));
|
|
477
|
+
const result = store_1.driveStore.updateFile(fileId, { parents: newParents });
|
|
478
|
+
if (result) {
|
|
479
|
+
Object.assign(updatedFile, result);
|
|
480
|
+
}
|
|
481
|
+
}
|
|
432
482
|
res.json(updatedFile);
|
|
433
483
|
});
|
|
434
484
|
// Files: Delete
|
package/dist/store.js
CHANGED
|
@@ -81,7 +81,7 @@ class DriveStore {
|
|
|
81
81
|
}
|
|
82
82
|
// Merge updates and increment version
|
|
83
83
|
const newVersion = file.version + 1;
|
|
84
|
-
const updatedFile = Object.assign(Object.assign(Object.assign(Object.assign({}, file), updates), statsUpdates), { version: newVersion, etag: String(newVersion), modifiedTime: new Date().toISOString() });
|
|
84
|
+
const updatedFile = Object.assign(Object.assign(Object.assign(Object.assign({}, file), updates), statsUpdates), { version: newVersion, etag: String(newVersion), modifiedTime: updates.modifiedTime || new Date().toISOString() });
|
|
85
85
|
this.files.set(id, updatedFile);
|
|
86
86
|
this.addChange(updatedFile);
|
|
87
87
|
return updatedFile;
|
package/package.json
CHANGED
package/src/batch.ts
CHANGED
|
@@ -133,7 +133,8 @@ function processPart(part: BatchPart, req: Request): BatchResponse {
|
|
|
133
133
|
};
|
|
134
134
|
}
|
|
135
135
|
|
|
136
|
-
|
|
136
|
+
|
|
137
|
+
// POST Create File (Standard)
|
|
137
138
|
if (part.method === 'POST' && filesListMatch) {
|
|
138
139
|
if (!part.body || !part.body.name) {
|
|
139
140
|
return { contentId: part.contentId, statusCode: 400, body: { error: { code: 400, message: 'Name required' } } };
|
package/src/index.ts
CHANGED
|
@@ -14,7 +14,7 @@ const createApp = (config: AppConfig = {}) => {
|
|
|
14
14
|
|
|
15
15
|
const app = express();
|
|
16
16
|
app.use(cors({
|
|
17
|
-
exposedHeaders: ['ETag']
|
|
17
|
+
exposedHeaders: ['ETag', 'Date', 'Content-Length']
|
|
18
18
|
}));
|
|
19
19
|
app.set('etag', false); // Disable default ETag generation to match Real API behavior
|
|
20
20
|
|
package/src/routes/v3.ts
CHANGED
|
@@ -136,6 +136,10 @@ export const createV3Router = () => {
|
|
|
136
136
|
const timeStr = part.match(/modifiedTime < '(.*)'/)?.[1];
|
|
137
137
|
return !!(timeStr && new Date(file.modifiedTime) < new Date(timeStr));
|
|
138
138
|
}
|
|
139
|
+
if (part.startsWith("modifiedTime = '")) {
|
|
140
|
+
const timeStr = part.match(/modifiedTime = '(.*)'/)?.[1];
|
|
141
|
+
return !!(timeStr && new Date(file.modifiedTime).toISOString() === new Date(timeStr).toISOString());
|
|
142
|
+
}
|
|
139
143
|
|
|
140
144
|
// Fallback / Unknown
|
|
141
145
|
return true;
|
|
@@ -177,10 +181,35 @@ export const createV3Router = () => {
|
|
|
177
181
|
});
|
|
178
182
|
}
|
|
179
183
|
|
|
184
|
+
// Pagination
|
|
185
|
+
const pageSize = req.query.pageSize ? parseInt(req.query.pageSize as string, 10) : 100; // Default 100
|
|
186
|
+
let skip = 0;
|
|
187
|
+
if (req.query.pageToken) {
|
|
188
|
+
try {
|
|
189
|
+
const tokenJson = Buffer.from(req.query.pageToken as string, 'base64').toString('utf-8');
|
|
190
|
+
const tokenData = JSON.parse(tokenJson);
|
|
191
|
+
if (typeof tokenData.skip === 'number') {
|
|
192
|
+
skip = tokenData.skip;
|
|
193
|
+
}
|
|
194
|
+
} catch {
|
|
195
|
+
// Ignore invalid token, start from 0
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const totalFiles = files.length;
|
|
200
|
+
const resultFiles = files.slice(skip, skip + pageSize);
|
|
201
|
+
|
|
202
|
+
let nextPageToken: string | undefined;
|
|
203
|
+
if (skip + pageSize < totalFiles) {
|
|
204
|
+
const nextSkip = skip + pageSize;
|
|
205
|
+
nextPageToken = Buffer.from(JSON.stringify({ skip: nextSkip })).toString('base64');
|
|
206
|
+
}
|
|
207
|
+
|
|
180
208
|
res.json({
|
|
181
209
|
kind: "drive#fileList",
|
|
182
210
|
incompleteSearch: false,
|
|
183
|
-
files:
|
|
211
|
+
files: resultFiles,
|
|
212
|
+
nextPageToken
|
|
184
213
|
});
|
|
185
214
|
});
|
|
186
215
|
|
|
@@ -474,9 +503,11 @@ export const createV3Router = () => {
|
|
|
474
503
|
res.status(400).send("Invalid file ID");
|
|
475
504
|
return;
|
|
476
505
|
}
|
|
477
|
-
const updates = req.body;
|
|
506
|
+
const updates = req.body || {};
|
|
507
|
+
const hasBody = Object.keys(updates).length > 0;
|
|
508
|
+
const hasQueryParams = req.query.addParents || req.query.removeParents;
|
|
478
509
|
|
|
479
|
-
if (!
|
|
510
|
+
if (!hasBody && !hasQueryParams) {
|
|
480
511
|
res.status(400).json({ error: { code: 400, message: "Bad Request: No updates provided" } });
|
|
481
512
|
return;
|
|
482
513
|
}
|
|
@@ -488,6 +519,29 @@ export const createV3Router = () => {
|
|
|
488
519
|
return;
|
|
489
520
|
}
|
|
490
521
|
|
|
522
|
+
const addParents = req.query.addParents as string;
|
|
523
|
+
if (addParents) {
|
|
524
|
+
const parentsToAdd = addParents.split(',');
|
|
525
|
+
const currentParents = updatedFile.parents || [];
|
|
526
|
+
const newParents = [...new Set([...currentParents, ...parentsToAdd])]; // Union
|
|
527
|
+
// Update the file with new parents
|
|
528
|
+
const result = driveStore.updateFile(fileId, { parents: newParents });
|
|
529
|
+
if (result) {
|
|
530
|
+
Object.assign(updatedFile, result);
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
const removeParents = req.query.removeParents as string;
|
|
535
|
+
if (removeParents) {
|
|
536
|
+
const parentsToRemove = removeParents.split(',');
|
|
537
|
+
const currentParents = updatedFile.parents || [];
|
|
538
|
+
const newParents = currentParents.filter(p => !parentsToRemove.includes(p));
|
|
539
|
+
const result = driveStore.updateFile(fileId, { parents: newParents });
|
|
540
|
+
if (result) {
|
|
541
|
+
Object.assign(updatedFile, result);
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
|
|
491
545
|
res.json(updatedFile);
|
|
492
546
|
});
|
|
493
547
|
|
package/src/store.ts
CHANGED
|
@@ -114,7 +114,7 @@ export class DriveStore {
|
|
|
114
114
|
...statsUpdates,
|
|
115
115
|
version: newVersion,
|
|
116
116
|
etag: String(newVersion),
|
|
117
|
-
modifiedTime: new Date().toISOString()
|
|
117
|
+
modifiedTime: updates.modifiedTime || new Date().toISOString()
|
|
118
118
|
};
|
|
119
119
|
this.files.set(id, updatedFile);
|
|
120
120
|
this.addChange(updatedFile);
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
|
|
2
|
+
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
|
3
|
+
import { getTestConfig, TestConfig } from './config';
|
|
4
|
+
|
|
5
|
+
describe('Batch Insert and Download Test', () => {
|
|
6
|
+
let config: TestConfig;
|
|
7
|
+
|
|
8
|
+
beforeAll(async () => {
|
|
9
|
+
config = await getTestConfig();
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
afterAll(() => {
|
|
13
|
+
if (config) config.stop();
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
// Helper from user request (adapted)
|
|
17
|
+
async function insertDocumentFiles<RxDocType>(
|
|
18
|
+
googleDriveOptions: { apiEndpoint: string, authToken: string },
|
|
19
|
+
init: { docsFolderId: string },
|
|
20
|
+
primaryPath: string,
|
|
21
|
+
docs: RxDocType[]
|
|
22
|
+
) {
|
|
23
|
+
const boundary = "batch_" + Math.random().toString(16).slice(2);
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
const parts = docs.map((doc, i) => {
|
|
28
|
+
const id = (doc as Record<string, unknown>)[primaryPath] as string;
|
|
29
|
+
const body = JSON.stringify({
|
|
30
|
+
name: id + '.json',
|
|
31
|
+
mimeType: 'application/json',
|
|
32
|
+
parents: [init.docsFolderId],
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
return (
|
|
36
|
+
`--${boundary}\r\n` +
|
|
37
|
+
`Content-Type: application/http\r\n` +
|
|
38
|
+
`Content-ID: <item-${i}>\r\n\r\n` +
|
|
39
|
+
`POST /drive/v3/files HTTP/1.1\r\n` +
|
|
40
|
+
`Content-Type: application/json; charset=UTF-8\r\n\r\n` +
|
|
41
|
+
`${body}\r\n`
|
|
42
|
+
);
|
|
43
|
+
});
|
|
44
|
+
const batchBody = parts.join("") + `--${boundary}--`;
|
|
45
|
+
const res = await fetch(googleDriveOptions.apiEndpoint + "/batch/drive/v3", {
|
|
46
|
+
method: "POST",
|
|
47
|
+
headers: {
|
|
48
|
+
Authorization: `Bearer ${googleDriveOptions.authToken}`,
|
|
49
|
+
"Content-Type": `multipart/mixed; boundary=${boundary}`,
|
|
50
|
+
},
|
|
51
|
+
body: batchBody,
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
if (!res.ok) {
|
|
56
|
+
const text = await res.text().catch(() => "");
|
|
57
|
+
throw new Error(`GDR13: Batch insert failed. Status: ${res.status}. Error: ${text}`);
|
|
58
|
+
}
|
|
59
|
+
const text = await res.text();
|
|
60
|
+
console.log('Batch Response:', text.substring(0, 1000)); // Log snippet
|
|
61
|
+
return text;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
it('should insert docs using batch POST and download them', async () => {
|
|
65
|
+
const docCount = 3;
|
|
66
|
+
const docs = [];
|
|
67
|
+
for (let i = 0; i < docCount; i++) {
|
|
68
|
+
docs.push({ id: `item_${Date.now()}_${i}`, data: `Data ${i}` });
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
console.log('Inserting docs via batch POST...');
|
|
72
|
+
const batchResponse = await insertDocumentFiles(
|
|
73
|
+
{ apiEndpoint: config.baseUrl, authToken: config.token },
|
|
74
|
+
{ docsFolderId: config.testFolderId },
|
|
75
|
+
'id',
|
|
76
|
+
docs
|
|
77
|
+
);
|
|
78
|
+
|
|
79
|
+
// Parse IDs from batch response?
|
|
80
|
+
// The user snippet returns raw text.
|
|
81
|
+
// We usually rely on create returning the created file metadata (including ID).
|
|
82
|
+
// Let's parse the response to get the IDs.
|
|
83
|
+
|
|
84
|
+
const boundaryMatch = (batchResponse.match(/boundary=(.+)/) || [])[1];
|
|
85
|
+
let boundary = boundaryMatch;
|
|
86
|
+
// Or inspect Content-Type header from response? Ideally yes but user helper returns body text.
|
|
87
|
+
// Let's try to detect boundary from body first line if not found?
|
|
88
|
+
// But headers are gone.
|
|
89
|
+
|
|
90
|
+
// Wait, the user helper receives the Response object and returns text().
|
|
91
|
+
// We lose headers :(.
|
|
92
|
+
// Let's assume boundary from body first line.
|
|
93
|
+
const firstLine = batchResponse.trim().split(/\r?\n/)[0];
|
|
94
|
+
if (firstLine.startsWith('--')) {
|
|
95
|
+
boundary = firstLine.substring(2).trim();
|
|
96
|
+
} else {
|
|
97
|
+
// Maybe no boundary in body if empty? unlikely for batch.
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
if (!boundary) throw new Error('Could not detect boundary in batch response');
|
|
101
|
+
|
|
102
|
+
const parts = batchResponse.split(`--${boundary}`);
|
|
103
|
+
const createdIds: string[] = [];
|
|
104
|
+
|
|
105
|
+
for (const part of parts) {
|
|
106
|
+
if (!part.trim() || part.trim() === '--') continue;
|
|
107
|
+
// Find JSON body
|
|
108
|
+
const jsonStart = part.indexOf('{');
|
|
109
|
+
const jsonEnd = part.lastIndexOf('}');
|
|
110
|
+
if (jsonStart !== -1 && jsonEnd !== -1) {
|
|
111
|
+
try {
|
|
112
|
+
const jsonStr = part.substring(jsonStart, jsonEnd + 1);
|
|
113
|
+
const file = JSON.parse(jsonStr);
|
|
114
|
+
if (file.id) {
|
|
115
|
+
createdIds.push(file.id);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
} catch {
|
|
119
|
+
// ignore parse errors
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
expect(createdIds.length).toBe(docCount);
|
|
125
|
+
console.log(`Created ${createdIds.length} files. Downloading...`);
|
|
126
|
+
|
|
127
|
+
for (const fileId of createdIds) {
|
|
128
|
+
const downloadUrl = `${config.baseUrl}/drive/v3/files/${encodeURIComponent(fileId)}?alt=media&supportsAllDrives=true`;
|
|
129
|
+
// console.log(`Downloading ${fileId} from ${downloadUrl}`);
|
|
130
|
+
|
|
131
|
+
const res = await fetch(downloadUrl, {
|
|
132
|
+
headers: { Authorization: `Bearer ${config.token}` }
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
if (!res.ok) {
|
|
136
|
+
throw new Error(`Download failed for ${fileId}: ${res.status} ${await res.text()}`);
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
const text = await res.text();
|
|
141
|
+
// console.log(`Content for ${fileId}:`, text);
|
|
142
|
+
|
|
143
|
+
// Expect empty content for metadata-only insert?
|
|
144
|
+
// Or maybe Drive defaults to empty JSON object '{}'?
|
|
145
|
+
// Real API behavior for empty file created via metadata POST: 0 bytes.
|
|
146
|
+
expect(text).toBe('');
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
}, 30000);
|
|
150
|
+
});
|
|
@@ -41,13 +41,12 @@ export async function batchFetchDocumentContentsRaw(
|
|
|
41
41
|
}
|
|
42
42
|
|
|
43
43
|
// This will be a multipart/mixed body that you must parse yourself.
|
|
44
|
-
|
|
44
|
+
const text = await res.text();
|
|
45
|
+
console.log('############################# text:');
|
|
46
|
+
console.log(text);
|
|
47
|
+
return text;
|
|
45
48
|
}
|
|
46
49
|
|
|
47
|
-
/**
|
|
48
|
-
* Parses a multipart/mixed response body from Google Drive Batch API.
|
|
49
|
-
* Returns an array of objects containing { status, headers, body }.
|
|
50
|
-
*/
|
|
51
50
|
/**
|
|
52
51
|
* Parses a multipart/mixed response body from Google Drive Batch API.
|
|
53
52
|
* Returns an array of objects containing { status, headers, body }.
|
|
@@ -135,7 +134,15 @@ describe('Batch Fetch Test', () => {
|
|
|
135
134
|
if (config) config.stop();
|
|
136
135
|
});
|
|
137
136
|
|
|
138
|
-
|
|
137
|
+
|
|
138
|
+
interface TestContent {
|
|
139
|
+
index: number;
|
|
140
|
+
timestamp: number;
|
|
141
|
+
msg: string;
|
|
142
|
+
random: number;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
async function uploadJsonFile(name: string, content: TestContent): Promise<string> {
|
|
139
146
|
const metadata = {
|
|
140
147
|
name: name,
|
|
141
148
|
parents: [config.testFolderId],
|
|
@@ -175,13 +182,13 @@ describe('Batch Fetch Test', () => {
|
|
|
175
182
|
it('should fetch content of many files in a single batch request', async () => {
|
|
176
183
|
const fileCount = 5;
|
|
177
184
|
const fileIds: string[] = [];
|
|
178
|
-
const expectedContents: Record<string,
|
|
185
|
+
const expectedContents: Record<string, TestContent> = {};
|
|
179
186
|
|
|
180
187
|
console.log(`Creating ${fileCount} files...`);
|
|
181
188
|
|
|
182
189
|
for (let i = 0; i < fileCount; i++) {
|
|
183
190
|
const fileName = `BatchFile_${i}_${Date.now()}.json`;
|
|
184
|
-
const content = { index: i, timestamp: Date.now(), msg: `Hello World ${i}`, random: Math.random() };
|
|
191
|
+
const content: TestContent = { index: i, timestamp: Date.now(), msg: `Hello World ${i}`, random: Math.random() };
|
|
185
192
|
|
|
186
193
|
const id = await uploadJsonFile(fileName, content);
|
|
187
194
|
fileIds.push(id);
|
|
@@ -215,7 +222,7 @@ describe('Batch Fetch Test', () => {
|
|
|
215
222
|
expect(parsedResults.length).toBe(fileCount);
|
|
216
223
|
|
|
217
224
|
for (const result of parsedResults) {
|
|
218
|
-
let content = result.body;
|
|
225
|
+
let content = result.body as TestContent;
|
|
219
226
|
|
|
220
227
|
|
|
221
228
|
// All environments (Mock and Real) must return 302 Redirect for alt=media in batch
|
|
@@ -236,7 +243,7 @@ describe('Batch Fetch Test', () => {
|
|
|
236
243
|
});
|
|
237
244
|
|
|
238
245
|
if (!res.ok) throw new Error(`Failed to follow redirect: ${res.status} ${await res.text()}`);
|
|
239
|
-
content = await res.json();
|
|
246
|
+
content = await res.json() as TestContent;
|
|
240
247
|
|
|
241
248
|
|
|
242
249
|
const expected = Object.values(expectedContents).find(c => c.index === content.index);
|
|
@@ -319,7 +319,6 @@ describe('Date Updates and Sorting', () => {
|
|
|
319
319
|
throw new Error(`Create failed: ${createRes.status} ${text}`);
|
|
320
320
|
}
|
|
321
321
|
const file = await createRes.json();
|
|
322
|
-
console.log('Explicit Time Check File:', JSON.stringify(file, null, 2));
|
|
323
322
|
if (!file.modifiedTime) throw new Error('modifiedTime missing from response');
|
|
324
323
|
const modifiedTimeCreate = new Date(file.modifiedTime).getTime();
|
|
325
324
|
|
|
@@ -468,7 +467,6 @@ describe('Date Updates and Sorting', () => {
|
|
|
468
467
|
body: 'Time Check Separate'
|
|
469
468
|
});
|
|
470
469
|
const file = await createRes.json();
|
|
471
|
-
console.log('Explicit Time Check File:', JSON.stringify(file, null, 2));
|
|
472
470
|
if (!file.modifiedTime) throw new Error('modifiedTime missing from response');
|
|
473
471
|
const modifiedTimeCreate = new Date(file.modifiedTime).getTime();
|
|
474
472
|
|
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
import { describe, it, expect, beforeAll } from 'vitest';
|
|
2
|
+
import {
|
|
3
|
+
getTestConfig,
|
|
4
|
+
TestConfig
|
|
5
|
+
} from './config';
|
|
6
|
+
import { DriveFile } from '../src/store';
|
|
7
|
+
|
|
8
|
+
const randomString = () => Math.random().toString(36).substring(7);
|
|
9
|
+
|
|
10
|
+
const createFileWithContent = async (name: string, content: string, config: TestConfig) => {
|
|
11
|
+
const res = await fetch(`${config.baseUrl}/upload/drive/v3/files?uploadType=media`, {
|
|
12
|
+
method: 'POST',
|
|
13
|
+
headers: {
|
|
14
|
+
'Authorization': `Bearer ${config.token}`,
|
|
15
|
+
'Content-Type': 'text/plain'
|
|
16
|
+
},
|
|
17
|
+
body: content
|
|
18
|
+
});
|
|
19
|
+
const file = await res.json();
|
|
20
|
+
// V3 standard upload might not set name in body for media upload if not multipart.
|
|
21
|
+
// But store.createFile handles it.
|
|
22
|
+
// To be safe and ensure name is set as expected for query (though create with media upload sets name to Untitled usually),
|
|
23
|
+
// let's update it or use multipart.
|
|
24
|
+
// actually, let's just use the patch to set name/metadata to ensure it's correct for the test.
|
|
25
|
+
|
|
26
|
+
// Better: use multipart or just update after create.
|
|
27
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${file.id}`, {
|
|
28
|
+
method: 'PATCH',
|
|
29
|
+
headers: {
|
|
30
|
+
'Authorization': `Bearer ${config.token}`,
|
|
31
|
+
'Content-Type': 'application/json'
|
|
32
|
+
},
|
|
33
|
+
body: JSON.stringify({ name })
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
// Fetch again to get full fields including modifiedTime
|
|
37
|
+
const getRes = await fetch(`${config.baseUrl}/drive/v3/files/${file.id}?fields=*`, {
|
|
38
|
+
headers: { 'Authorization': `Bearer ${config.token}` }
|
|
39
|
+
});
|
|
40
|
+
return await getRes.json();
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
describe('Iterate Changes Queries', () => {
|
|
44
|
+
let config: TestConfig;
|
|
45
|
+
let headers: Record<string, string>;
|
|
46
|
+
|
|
47
|
+
beforeAll(async () => {
|
|
48
|
+
config = await getTestConfig();
|
|
49
|
+
headers = {
|
|
50
|
+
Authorization: `Bearer ${config.token}`
|
|
51
|
+
};
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
it('should find files where last write time was greater than X, sorted by modifiedTime and id, with limit', async () => {
|
|
55
|
+
// Create 3 files with slight delays to ensure different modifiedTimes
|
|
56
|
+
const file1 = await createFileWithContent('file1', randomString(), config);
|
|
57
|
+
await new Promise(r => setTimeout(r, 1100)); // Ensure > 1s diff for reliable sorting if seconds resolution
|
|
58
|
+
const file2 = await createFileWithContent('file2', randomString(), config);
|
|
59
|
+
await new Promise(r => setTimeout(r, 1100));
|
|
60
|
+
const file3 = await createFileWithContent('file3', randomString(), config);
|
|
61
|
+
|
|
62
|
+
// Use file1's modifiedTime as the baseline (X)
|
|
63
|
+
const timeX = file1.modifiedTime;
|
|
64
|
+
|
|
65
|
+
// Query: modifiedTime > X, orderBy modifiedTime asc, name asc (using name as proxy for ID stability in test if needed, but user asked for ID)
|
|
66
|
+
// User asked for: Sorted by write data and id. with limit
|
|
67
|
+
const q = `modifiedTime > '${timeX}' and trashed = false`;
|
|
68
|
+
const orderBy = 'modifiedTime asc, name asc';
|
|
69
|
+
const pageSize = 1;
|
|
70
|
+
|
|
71
|
+
// First page
|
|
72
|
+
const url1 = `${config.baseUrl}/drive/v3/files?q=${encodeURIComponent(q)}&orderBy=${encodeURIComponent(orderBy)}&pageSize=${pageSize}`;
|
|
73
|
+
const res1 = await fetch(url1, { headers });
|
|
74
|
+
if (res1.status !== 200) {
|
|
75
|
+
const txt = await res1.text();
|
|
76
|
+
console.error('Error 1:', txt);
|
|
77
|
+
}
|
|
78
|
+
expect(res1.status).toBe(200);
|
|
79
|
+
const data1 = await res1.json();
|
|
80
|
+
|
|
81
|
+
expect(data1.files.length).toBe(1);
|
|
82
|
+
expect(data1.files[0].id).toBe(file2.id);
|
|
83
|
+
|
|
84
|
+
// If we want to simulate iteration, we would use nextPageToken or just offset logic if we supported it,
|
|
85
|
+
// but here we just test that the query works and LIMIT works.
|
|
86
|
+
|
|
87
|
+
// Verify we can get the next one if we increase limit
|
|
88
|
+
const url2 = `${config.baseUrl}/drive/v3/files?q=${encodeURIComponent(q)}&orderBy=${encodeURIComponent(orderBy)}&pageSize=2`;
|
|
89
|
+
const res2 = await fetch(url2, { headers });
|
|
90
|
+
const data2 = await res2.json();
|
|
91
|
+
expect(data2.files.length).toBe(2);
|
|
92
|
+
expect(data2.files[0].id).toBe(file2.id);
|
|
93
|
+
expect(data2.files[1].id).toBe(file3.id);
|
|
94
|
+
}, 60000);
|
|
95
|
+
|
|
96
|
+
it('should find all files where write time was equal to X, sorted by name, with limit', async () => {
|
|
97
|
+
// Create 3 files effectively at the "same" time.
|
|
98
|
+
// To do this reliably on Real API, we create one, get its time, and then PATCH the others to have that same time (if possible).
|
|
99
|
+
// However, Drive API might not allow arbitrary modifiedTime patching easily without setModifiedDate=true param or similar.
|
|
100
|
+
// Actually, V3 supports modifying modifiedTime.
|
|
101
|
+
|
|
102
|
+
const file1 = await createFileWithContent('file_B_middle', randomString(), config);
|
|
103
|
+
// Get the time from file1 to use as target
|
|
104
|
+
const timeXRes = await fetch(`${config.baseUrl}/drive/v3/files/${file1.id}?fields=modifiedTime`, { headers });
|
|
105
|
+
const timeX = (await timeXRes.json()).modifiedTime;
|
|
106
|
+
|
|
107
|
+
// Create two more files
|
|
108
|
+
const file2 = await createFileWithContent('file_A_first', randomString(), config);
|
|
109
|
+
const file3 = await createFileWithContent('file_C_last', randomString(), config);
|
|
110
|
+
|
|
111
|
+
// Patch file2 and file3 to have the SAME modifiedTime as file1
|
|
112
|
+
// We need to wait a bit to ensure they would naturally have different times if we didn't patch,
|
|
113
|
+
// to prove the patch worked and we are sorting by name not time.
|
|
114
|
+
await new Promise(r => setTimeout(r, 1100));
|
|
115
|
+
|
|
116
|
+
const patchBody = JSON.stringify({ modifiedTime: timeX });
|
|
117
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${file2.id}`, { method: 'PATCH', headers: { ...headers, 'Content-Type': 'application/json' }, body: patchBody });
|
|
118
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${file3.id}`, { method: 'PATCH', headers: { ...headers, 'Content-Type': 'application/json' }, body: patchBody });
|
|
119
|
+
|
|
120
|
+
const q = `modifiedTime = '${timeX}' and trashed = false`;
|
|
121
|
+
const orderBy = 'name asc';
|
|
122
|
+
const pageSize = 10;
|
|
123
|
+
|
|
124
|
+
const url = `${config.baseUrl}/drive/v3/files?q=${encodeURIComponent(q)}&orderBy=${encodeURIComponent(orderBy)}&pageSize=${pageSize}&fields=files(id,name,modifiedTime)`;
|
|
125
|
+
const res = await fetch(url, { headers });
|
|
126
|
+
expect(res.status).toBe(200);
|
|
127
|
+
const data = await res.json();
|
|
128
|
+
|
|
129
|
+
// Should find all 3 files
|
|
130
|
+
const relevantFiles = data.files.filter((f: DriveFile) => [file1.id, file2.id, file3.id].includes(f.id));
|
|
131
|
+
expect(relevantFiles.length).toBe(3);
|
|
132
|
+
|
|
133
|
+
// Verify they are sorted by name: A, B, C
|
|
134
|
+
expect(relevantFiles[0].name).toBe('file_A_first');
|
|
135
|
+
expect(relevantFiles[1].name).toBe('file_B_middle');
|
|
136
|
+
expect(relevantFiles[2].name).toBe('file_C_last');
|
|
137
|
+
|
|
138
|
+
// Verify times
|
|
139
|
+
relevantFiles.forEach((f: DriveFile) => {
|
|
140
|
+
if (!f.modifiedTime) {
|
|
141
|
+
console.error('Missing modifiedTime for file:', f.id, f.name);
|
|
142
|
+
}
|
|
143
|
+
expect(new Date(f.modifiedTime).toISOString()).toBe(new Date(timeX).toISOString());
|
|
144
|
+
});
|
|
145
|
+
}, 60000);
|
|
146
|
+
|
|
147
|
+
it('should find files where write time = X AND inside a specific parent folder, sorted by name', async () => {
|
|
148
|
+
// 1. Create a parent folder
|
|
149
|
+
const parentRes = await fetch(`${config.baseUrl}/drive/v3/files`, {
|
|
150
|
+
method: 'POST',
|
|
151
|
+
headers: { ...headers, 'Content-Type': 'application/json' },
|
|
152
|
+
body: JSON.stringify({
|
|
153
|
+
name: 'ParentFolder_EqualTime_' + randomString(),
|
|
154
|
+
mimeType: 'application/vnd.google-apps.folder'
|
|
155
|
+
})
|
|
156
|
+
});
|
|
157
|
+
expect(parentRes.status).toBe(200);
|
|
158
|
+
const parentId = (await parentRes.json()).id;
|
|
159
|
+
|
|
160
|
+
// 2. Create 3 files IN parent + 1 file OUTSIDE parent
|
|
161
|
+
// We want them all to have the SAME modifiedTime eventually.
|
|
162
|
+
|
|
163
|
+
// Create baseline file in parent
|
|
164
|
+
const file1 = await createFileWithContent('file_B_middle', randomString(), config);
|
|
165
|
+
// Move to parent
|
|
166
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${file1.id}?addParents=${parentId}`, { method: 'PATCH', headers });
|
|
167
|
+
|
|
168
|
+
// Get target time
|
|
169
|
+
const timeXRes = await fetch(`${config.baseUrl}/drive/v3/files/${file1.id}?fields=modifiedTime`, { headers });
|
|
170
|
+
const timeX = (await timeXRes.json()).modifiedTime;
|
|
171
|
+
|
|
172
|
+
// Create other files
|
|
173
|
+
const file2 = await createFileWithContent('file_A_first', randomString(), config);
|
|
174
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${file2.id}?addParents=${parentId}`, { method: 'PATCH', headers });
|
|
175
|
+
|
|
176
|
+
const file3 = await createFileWithContent('file_C_last', randomString(), config);
|
|
177
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${file3.id}?addParents=${parentId}`, { method: 'PATCH', headers });
|
|
178
|
+
|
|
179
|
+
const fileOutside = await createFileWithContent('file_Outside', randomString(), config);
|
|
180
|
+
|
|
181
|
+
// DELAY to ensure natural time diff, then PATCH all to timeX
|
|
182
|
+
await new Promise(r => setTimeout(r, 1100));
|
|
183
|
+
|
|
184
|
+
const patchBody = JSON.stringify({ modifiedTime: timeX });
|
|
185
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${file2.id}`, { method: 'PATCH', headers: { ...headers, 'Content-Type': 'application/json' }, body: patchBody });
|
|
186
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${file3.id}`, { method: 'PATCH', headers: { ...headers, 'Content-Type': 'application/json' }, body: patchBody });
|
|
187
|
+
await fetch(`${config.baseUrl}/drive/v3/files/${fileOutside.id}`, { method: 'PATCH', headers: { ...headers, 'Content-Type': 'application/json' }, body: patchBody });
|
|
188
|
+
|
|
189
|
+
// 3. Query: modifiedTime = X AND parentId in parents
|
|
190
|
+
const q = `modifiedTime = '${timeX}' and '${parentId}' in parents and trashed = false`;
|
|
191
|
+
const orderBy = 'name asc';
|
|
192
|
+
|
|
193
|
+
const url = `${config.baseUrl}/drive/v3/files?q=${encodeURIComponent(q)}&orderBy=${encodeURIComponent(orderBy)}&fields=files(id,name,modifiedTime,parents)`;
|
|
194
|
+
const res = await fetch(url, { headers });
|
|
195
|
+
expect(res.status).toBe(200);
|
|
196
|
+
const data = await res.json();
|
|
197
|
+
|
|
198
|
+
// 4. Verify results
|
|
199
|
+
// Should find file1, file2, file3
|
|
200
|
+
// Should NOT find fileOutside
|
|
201
|
+
const ids = data.files.map((f: DriveFile) => f.id);
|
|
202
|
+
expect(ids).toContain(file1.id);
|
|
203
|
+
expect(ids).toContain(file2.id);
|
|
204
|
+
expect(ids).toContain(file3.id);
|
|
205
|
+
expect(ids).not.toContain(fileOutside.id);
|
|
206
|
+
expect(data.files.length).toBe(3);
|
|
207
|
+
|
|
208
|
+
// Verify Sort Order
|
|
209
|
+
expect(data.files[0].name).toBe('file_A_first');
|
|
210
|
+
expect(data.files[1].name).toBe('file_B_middle');
|
|
211
|
+
expect(data.files[2].name).toBe('file_C_last');
|
|
212
|
+
|
|
213
|
+
}, 60000);
|
|
214
|
+
|
|
215
|
+
it('should iterate via changes tokens with specific fields', async () => {
|
|
216
|
+
// User request verification:
|
|
217
|
+
// const params = new URLSearchParams({
|
|
218
|
+
// pageToken: checkpoint.pageToken, // we need to get a start page token first
|
|
219
|
+
// pageSize: String(batchSize),
|
|
220
|
+
// includeItemsFromAllDrives: "true",
|
|
221
|
+
// supportsAllDrives: "true",
|
|
222
|
+
// includeRemoved: "true",
|
|
223
|
+
// fields: "changes(fileId,removed,file(id,name,parents,trashed)),nextPageToken,newStartPageToken",
|
|
224
|
+
// });
|
|
225
|
+
|
|
226
|
+
// 1. Get Start Page Token
|
|
227
|
+
const startTokenUrl = `${config.baseUrl}/drive/v3/changes/startPageToken?supportsAllDrives=true`;
|
|
228
|
+
const startTokenRes = await fetch(startTokenUrl, { headers });
|
|
229
|
+
expect(startTokenRes.status).toBe(200);
|
|
230
|
+
const startTokenData = await startTokenRes.json();
|
|
231
|
+
const startPageToken = startTokenData.startPageToken;
|
|
232
|
+
expect(startPageToken).toBeDefined();
|
|
233
|
+
|
|
234
|
+
// 2. Make some changes
|
|
235
|
+
const file1 = await createFileWithContent('change-file-1', randomString(), config);
|
|
236
|
+
await new Promise(r => setTimeout(r, 1000));
|
|
237
|
+
|
|
238
|
+
// Trash a file to test includeRemoved/removed field
|
|
239
|
+
const trashRes = await fetch(`${config.baseUrl}/drive/v3/files/${file1.id}`, {
|
|
240
|
+
method: 'PATCH',
|
|
241
|
+
headers: { ...headers, 'Content-Type': 'application/json' },
|
|
242
|
+
body: JSON.stringify({ trashed: true })
|
|
243
|
+
});
|
|
244
|
+
expect(trashRes.status).toBe(200);
|
|
245
|
+
|
|
246
|
+
// 3. List Changes
|
|
247
|
+
const params = new URLSearchParams({
|
|
248
|
+
pageToken: startPageToken,
|
|
249
|
+
pageSize: "10",
|
|
250
|
+
includeItemsFromAllDrives: "true",
|
|
251
|
+
supportsAllDrives: "true",
|
|
252
|
+
includeRemoved: "true",
|
|
253
|
+
fields: "changes(fileId,removed,file(id,name,parents,trashed)),nextPageToken,newStartPageToken"
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
const listUrl = `${config.baseUrl}/drive/v3/changes?${params.toString()}`;
|
|
257
|
+
const listRes = await fetch(listUrl, { headers });
|
|
258
|
+
|
|
259
|
+
if (listRes.status !== 200) {
|
|
260
|
+
console.error('Changes List Error:', await listRes.text());
|
|
261
|
+
}
|
|
262
|
+
expect(listRes.status).toBe(200);
|
|
263
|
+
const data = await listRes.json();
|
|
264
|
+
|
|
265
|
+
expect(data.changes).toBeDefined();
|
|
266
|
+
// We expect at least the creation and trash of file1.
|
|
267
|
+
// Note: Real API might batch them or show multiple entries.
|
|
268
|
+
// We just verify structure and presence of fields requested.
|
|
269
|
+
|
|
270
|
+
if (data.changes.length > 0) {
|
|
271
|
+
const change = data.changes[0];
|
|
272
|
+
expect(change.fileId).toBeDefined();
|
|
273
|
+
// removed can be boolean
|
|
274
|
+
expect(change.removed).toBeDefined();
|
|
275
|
+
if (!change.removed && change.file) {
|
|
276
|
+
const file = change.file as DriveFile;
|
|
277
|
+
expect(file.id).toBeDefined();
|
|
278
|
+
expect(file.name).toBeDefined();
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}, 60000);
|
|
282
|
+
|
|
283
|
+
it('should find files where write time > X AND inside a specific parent folder', async () => {
|
|
284
|
+
// 1. Create a parent folder
|
|
285
|
+
const parentRes = await fetch(`${config.baseUrl}/drive/v3/files`, {
|
|
286
|
+
method: 'POST',
|
|
287
|
+
headers: { ...headers, 'Content-Type': 'application/json' },
|
|
288
|
+
body: JSON.stringify({
|
|
289
|
+
name: 'ParentFolder_' + randomString(),
|
|
290
|
+
mimeType: 'application/vnd.google-apps.folder'
|
|
291
|
+
})
|
|
292
|
+
});
|
|
293
|
+
expect(parentRes.status).toBe(200);
|
|
294
|
+
const parent = await parentRes.json();
|
|
295
|
+
const parentId = parent.id;
|
|
296
|
+
|
|
297
|
+
// 2. Create 3 files:
|
|
298
|
+
// - One IN parent, modified OLD
|
|
299
|
+
// - One IN parent, modified NEW
|
|
300
|
+
// - One OUTSIDE parent, modified NEW
|
|
301
|
+
|
|
302
|
+
// Define "Old" and "New" times.
|
|
303
|
+
// We'll just create them sequentially with delays.
|
|
304
|
+
|
|
305
|
+
// File 1: In parent, first.
|
|
306
|
+
const file1 = await createFileWithContent('FileInParentOld', 'content1', config);
|
|
307
|
+
// Move to parent
|
|
308
|
+
const moveRes1 = await fetch(`${config.baseUrl}/drive/v3/files/${file1.id}?addParents=${parentId}`, {
|
|
309
|
+
method: 'PATCH', headers
|
|
310
|
+
});
|
|
311
|
+
expect(moveRes1.status).toBe(200);
|
|
312
|
+
|
|
313
|
+
await new Promise(r => setTimeout(r, 1100));
|
|
314
|
+
|
|
315
|
+
// This time will be our X
|
|
316
|
+
// We want to find files modified > this time.
|
|
317
|
+
// So file1 should NOT be found (it is <= itself/X, or we rely on creating a checkpoint after it).
|
|
318
|
+
// Let's create a checkpoint file or just use file1's time.
|
|
319
|
+
const timeXRes = await fetch(`${config.baseUrl}/drive/v3/files/${file1.id}?fields=modifiedTime`, { headers });
|
|
320
|
+
const timeXJson = await timeXRes.json();
|
|
321
|
+
const timeX = timeXJson.modifiedTime;
|
|
322
|
+
|
|
323
|
+
await new Promise(r => setTimeout(r, 1100));
|
|
324
|
+
|
|
325
|
+
// File 2: In parent, NEW (should be found)
|
|
326
|
+
const file2 = await createFileWithContent('FileInParentNew', 'content2', config);
|
|
327
|
+
const moveRes2 = await fetch(`${config.baseUrl}/drive/v3/files/${file2.id}?addParents=${parentId}`, {
|
|
328
|
+
method: 'PATCH', headers
|
|
329
|
+
});
|
|
330
|
+
expect(moveRes2.status).toBe(200);
|
|
331
|
+
|
|
332
|
+
// File 3: Outside parent, NEW (should NOT be found)
|
|
333
|
+
await createFileWithContent('FileOutsideNew', 'content3', config);
|
|
334
|
+
// (Default parent or root, explicitly not our parentId)
|
|
335
|
+
|
|
336
|
+
// 3. Query: modifiedTime > X AND 'parentId' in parents
|
|
337
|
+
const q = `modifiedTime > '${timeX}' and '${parentId}' in parents and trashed = false`;
|
|
338
|
+
const orderBy = 'modifiedTime asc, name asc';
|
|
339
|
+
|
|
340
|
+
const url = `${config.baseUrl}/drive/v3/files?q=${encodeURIComponent(q)}&orderBy=${encodeURIComponent(orderBy)}&fields=files(id,name,parents,modifiedTime)`;
|
|
341
|
+
const res = await fetch(url, { headers });
|
|
342
|
+
expect(res.status).toBe(200);
|
|
343
|
+
const data = await res.json();
|
|
344
|
+
|
|
345
|
+
const matchingFiles = data.files.filter((f: DriveFile) => f.id === file2.id);
|
|
346
|
+
const nonMatchingFile1 = data.files.filter((f: DriveFile) => f.id === file1.id);
|
|
347
|
+
|
|
348
|
+
// Should find file2
|
|
349
|
+
expect(matchingFiles.length).toBe(1);
|
|
350
|
+
expect(matchingFiles[0].id).toBe(file2.id);
|
|
351
|
+
|
|
352
|
+
// Should NOT find file1 (too old)
|
|
353
|
+
expect(nonMatchingFile1.length).toBe(0);
|
|
354
|
+
|
|
355
|
+
// Should NOT find file3 (wrong parent) check manually in case it returned it
|
|
356
|
+
const file3Found = data.files.find((f: DriveFile) => f.name === 'FileOutsideNew');
|
|
357
|
+
expect(file3Found).toBeUndefined();
|
|
358
|
+
|
|
359
|
+
}, 60000);
|
|
360
|
+
|
|
361
|
+
it('should paginate through files using nextPageToken', async () => {
|
|
362
|
+
// Create files
|
|
363
|
+
const totalFiles = 6;
|
|
364
|
+
const baseName = 'PaginatedFile_' + randomString();
|
|
365
|
+
for (let i = 0; i < totalFiles; i++) {
|
|
366
|
+
await createFileWithContent(`${baseName}_${i}`, `content_${i}`, config);
|
|
367
|
+
// Small delay to ensure order if we sort by time, but we'll sort by name to be deterministic
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
const q = `name contains '${baseName}' and trashed = false`;
|
|
371
|
+
const orderBy = 'name asc';
|
|
372
|
+
const pageSize = 2;
|
|
373
|
+
const collectedFiles: DriveFile[] = [];
|
|
374
|
+
let pageToken: string | undefined;
|
|
375
|
+
|
|
376
|
+
// Iterate pages until no token
|
|
377
|
+
do {
|
|
378
|
+
const url: string = `${config.baseUrl}/drive/v3/files?q=${encodeURIComponent(q)}&orderBy=${encodeURIComponent(orderBy)}&pageSize=${pageSize}` + (pageToken ? `&pageToken=${pageToken}` : '');
|
|
379
|
+
const res = await fetch(url, { headers });
|
|
380
|
+
|
|
381
|
+
if (res.status !== 200) {
|
|
382
|
+
console.error('Pagination Error:', await res.text());
|
|
383
|
+
}
|
|
384
|
+
expect(res.status).toBe(200);
|
|
385
|
+
const data = await res.json();
|
|
386
|
+
|
|
387
|
+
if (data.files) {
|
|
388
|
+
collectedFiles.push(...data.files);
|
|
389
|
+
}
|
|
390
|
+
pageToken = data.nextPageToken;
|
|
391
|
+
|
|
392
|
+
// Safety break to prevent infinite loops if API is broken
|
|
393
|
+
if (collectedFiles.length > totalFiles + 10) break;
|
|
394
|
+
} while (pageToken);
|
|
395
|
+
|
|
396
|
+
// Verify total
|
|
397
|
+
// Note: Drive API matching is eventually consistent.
|
|
398
|
+
// We might need to retry or wait if count is not yet totalFiles,
|
|
399
|
+
// but since we created them with delays, it usually works.
|
|
400
|
+
// If it flakes on count, we might need a retry loop wrapper around the whole test or query.
|
|
401
|
+
expect(collectedFiles.length).toBe(totalFiles);
|
|
402
|
+
|
|
403
|
+
// Verify unique IDs
|
|
404
|
+
const ids = new Set(collectedFiles.map(f => f.id));
|
|
405
|
+
expect(ids.size).toBe(totalFiles);
|
|
406
|
+
|
|
407
|
+
}, 120000); // 25 files creation might take a bit
|
|
408
|
+
});
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
|
|
2
|
+
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
|
3
|
+
import { getTestConfig, TestConfig } from './config';
|
|
4
|
+
|
|
5
|
+
describe('Parallel Content Update Test', () => {
|
|
6
|
+
let config: TestConfig;
|
|
7
|
+
|
|
8
|
+
beforeAll(async () => {
|
|
9
|
+
config = await getTestConfig();
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
afterAll(() => {
|
|
13
|
+
if (config) config.stop();
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
// Helper from user request (adapted)
|
|
17
|
+
async function updateDocumentFiles<DocType>(
|
|
18
|
+
googleDriveOptions: { apiEndpoint: string, authToken: string },
|
|
19
|
+
primaryPath: string,
|
|
20
|
+
docs: DocType[],
|
|
21
|
+
fileIdByDocId: Record<string, string>,
|
|
22
|
+
concurrency = 5
|
|
23
|
+
) {
|
|
24
|
+
const queue = [...docs];
|
|
25
|
+
const results: Record<string, { id: string }> = {};
|
|
26
|
+
|
|
27
|
+
async function worker() {
|
|
28
|
+
while (queue.length) {
|
|
29
|
+
const doc = queue.shift()!;
|
|
30
|
+
|
|
31
|
+
const docId = (doc as Record<string, unknown>)[primaryPath] as string;
|
|
32
|
+
const fileId = fileIdByDocId[docId];
|
|
33
|
+
|
|
34
|
+
if (!fileId) throw new Error(`File ID not found for doc ${docId}`);
|
|
35
|
+
|
|
36
|
+
const url =
|
|
37
|
+
googleDriveOptions.apiEndpoint +
|
|
38
|
+
`/upload/drive/v3/files/${encodeURIComponent(fileId)}` +
|
|
39
|
+
`?uploadType=media&supportsAllDrives=true&fields=id`;
|
|
40
|
+
|
|
41
|
+
const res = await fetch(url, {
|
|
42
|
+
method: "PATCH",
|
|
43
|
+
headers: {
|
|
44
|
+
Authorization: `Bearer ${googleDriveOptions.authToken}`,
|
|
45
|
+
"Content-Type": "application/json; charset=UTF-8",
|
|
46
|
+
},
|
|
47
|
+
body: JSON.stringify(doc),
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
if (!res.ok) {
|
|
51
|
+
const text = await res.text().catch(() => "");
|
|
52
|
+
throw new Error(`GDR15: Update failed for ${docId}. Status: ${res.status}. Error: ${text}`);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
results[docId] = await res.json(); // { id }
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
await Promise.all(Array.from({ length: concurrency }, () => worker()));
|
|
60
|
+
return results;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
async function createFile(name: string, content: unknown): Promise<string> {
|
|
65
|
+
const metadata = {
|
|
66
|
+
name: name,
|
|
67
|
+
parents: [config.testFolderId],
|
|
68
|
+
mimeType: 'application/json'
|
|
69
|
+
};
|
|
70
|
+
const multipartBoundary = '-------TestBoundary' + Date.now();
|
|
71
|
+
const delimiter = '\r\n--' + multipartBoundary + '\r\n';
|
|
72
|
+
const closeDelim = '\r\n--' + multipartBoundary + '--';
|
|
73
|
+
|
|
74
|
+
const body = delimiter +
|
|
75
|
+
'Content-Type: application/json\r\n\r\n' +
|
|
76
|
+
JSON.stringify(metadata) +
|
|
77
|
+
delimiter +
|
|
78
|
+
'Content-Type: application/json\r\n\r\n' +
|
|
79
|
+
JSON.stringify(content) +
|
|
80
|
+
closeDelim;
|
|
81
|
+
|
|
82
|
+
const res = await fetch(`${config.baseUrl}/upload/drive/v3/files?uploadType=multipart&fields=id`, {
|
|
83
|
+
method: 'POST',
|
|
84
|
+
headers: {
|
|
85
|
+
Authorization: 'Bearer ' + config.token,
|
|
86
|
+
'Content-Type': 'multipart/related; boundary="' + multipartBoundary + '"'
|
|
87
|
+
},
|
|
88
|
+
body: body
|
|
89
|
+
});
|
|
90
|
+
if (!res.ok) throw new Error(`Create failed: ${res.status}`);
|
|
91
|
+
const data = await res.json();
|
|
92
|
+
return data.id;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
it('should update file contents in parallel', async () => {
|
|
96
|
+
const docCount = 5;
|
|
97
|
+
const docs = [];
|
|
98
|
+
const fileIdByDocId: Record<string, string> = {};
|
|
99
|
+
|
|
100
|
+
// 1. Create initial files
|
|
101
|
+
console.log(`Creating ${docCount} initial files...`);
|
|
102
|
+
for (let i = 0; i < docCount; i++) {
|
|
103
|
+
const docId = `doc_${Date.now()}_${i}`;
|
|
104
|
+
const initialContent = { id: docId, data: 'initial' };
|
|
105
|
+
const fileName = `${docId}.json`;
|
|
106
|
+
const fileId = await createFile(fileName, initialContent);
|
|
107
|
+
fileIdByDocId[docId] = fileId;
|
|
108
|
+
docs.push({ id: docId, data: 'updated_' + i, random: Math.random() });
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// 2. Run parallel update
|
|
112
|
+
console.log('Running parallel updates...');
|
|
113
|
+
await updateDocumentFiles(
|
|
114
|
+
{ apiEndpoint: config.baseUrl, authToken: config.token },
|
|
115
|
+
'id',
|
|
116
|
+
docs,
|
|
117
|
+
fileIdByDocId,
|
|
118
|
+
3 // Concurrency
|
|
119
|
+
);
|
|
120
|
+
|
|
121
|
+
// 3. Verify updates
|
|
122
|
+
console.log('Verifying content...');
|
|
123
|
+
for (const doc of docs) {
|
|
124
|
+
const fileId = fileIdByDocId[doc.id];
|
|
125
|
+
const url = `${config.baseUrl}/drive/v3/files/${fileId}?alt=media`;
|
|
126
|
+
const res = await fetch(url, {
|
|
127
|
+
headers: { Authorization: `Bearer ${config.token}` }
|
|
128
|
+
});
|
|
129
|
+
if (!res.ok) throw new Error(`Download failed: ${res.status}`);
|
|
130
|
+
const downloadedContent = await res.json();
|
|
131
|
+
|
|
132
|
+
// Note: Drive might not return exact JSON identical if it adds properties?
|
|
133
|
+
// Usually strict JSON equality works.
|
|
134
|
+
expect(downloadedContent).toEqual(doc);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
}, 30000);
|
|
138
|
+
});
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
|
|
2
|
+
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
|
3
|
+
import { getTestConfig, TestConfig } from './config';
|
|
4
|
+
|
|
5
|
+
describe('URL Parameters Test', () => {
|
|
6
|
+
let config: TestConfig;
|
|
7
|
+
|
|
8
|
+
beforeAll(async () => {
|
|
9
|
+
config = await getTestConfig();
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
afterAll(() => {
|
|
13
|
+
if (config) config.stop();
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
it('should download file content using alt=media and supportsAllDrives=true', async () => {
|
|
17
|
+
const fileName = `UrlParamTest_${Date.now()}.json`;
|
|
18
|
+
const content = { msg: 'Hello World', timestamp: Date.now() };
|
|
19
|
+
|
|
20
|
+
// 1. Upload File
|
|
21
|
+
const metadata = {
|
|
22
|
+
name: fileName,
|
|
23
|
+
parents: [config.testFolderId],
|
|
24
|
+
mimeType: 'application/json'
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
const multipartBoundary = '-------TestBoundary' + Date.now();
|
|
28
|
+
const delimiter = '\r\n--' + multipartBoundary + '\r\n';
|
|
29
|
+
const closeDelim = '\r\n--' + multipartBoundary + '--';
|
|
30
|
+
|
|
31
|
+
const body = delimiter +
|
|
32
|
+
'Content-Type: application/json\r\n\r\n' +
|
|
33
|
+
JSON.stringify(metadata) +
|
|
34
|
+
delimiter +
|
|
35
|
+
'Content-Type: application/json\r\n\r\n' +
|
|
36
|
+
JSON.stringify(content) +
|
|
37
|
+
closeDelim;
|
|
38
|
+
|
|
39
|
+
const uploadUrl = `${config.baseUrl}/upload/drive/v3/files?uploadType=multipart&fields=id`;
|
|
40
|
+
const uploadRes = await fetch(uploadUrl, {
|
|
41
|
+
method: 'POST',
|
|
42
|
+
headers: {
|
|
43
|
+
Authorization: 'Bearer ' + config.token,
|
|
44
|
+
'Content-Type': 'multipart/related; boundary="' + multipartBoundary + '"'
|
|
45
|
+
},
|
|
46
|
+
body: body
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
if (!uploadRes.ok) {
|
|
50
|
+
throw new Error(`Failed to upload file. Status: ${uploadRes.status} ${await uploadRes.text()}`);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const data = await uploadRes.json();
|
|
54
|
+
const fileId = data.id;
|
|
55
|
+
expect(fileId).toBeDefined();
|
|
56
|
+
|
|
57
|
+
// 2. Download with specific URL parameters
|
|
58
|
+
const downloadUrl = `${config.baseUrl}/drive/v3/files/${encodeURIComponent(fileId)}?alt=media&supportsAllDrives=true`;
|
|
59
|
+
console.log('Downloading from:', downloadUrl);
|
|
60
|
+
|
|
61
|
+
const res = await fetch(downloadUrl, {
|
|
62
|
+
method: 'GET',
|
|
63
|
+
headers: {
|
|
64
|
+
Authorization: 'Bearer ' + config.token
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
if (!res.ok) {
|
|
69
|
+
throw new Error(`Failed to download file. Status: ${res.status} ${await res.text()}`);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const downloadedContent = await res.json();
|
|
73
|
+
expect(downloadedContent).toEqual(content);
|
|
74
|
+
|
|
75
|
+
}, 30000);
|
|
76
|
+
});
|