vscode-apollo 2.0.1 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.circleci/config.yml +1 -1
- package/.vscode/launch.json +5 -1
- package/CHANGELOG.md +41 -0
- package/package.json +9 -4
- package/renovate.json +2 -1
- package/sampleWorkspace/localSchema/src/test.js +3 -0
- package/sampleWorkspace/rover/apollo.config.js +3 -0
- package/sampleWorkspace/rover/src/test.graphql +14 -0
- package/sampleWorkspace/rover/src/test.js +30 -0
- package/sampleWorkspace/sampleWorkspace.code-workspace +25 -19
- package/src/language-server/__tests__/document.test.ts +161 -3
- package/src/language-server/__tests__/fixtures/TypeScript.tmLanguage.json +5749 -0
- package/src/language-server/__tests__/fixtures/documents/commentWithTemplate.ts +41 -0
- package/src/language-server/__tests__/fixtures/documents/commentWithTemplate.ts.snap +185 -0
- package/src/language-server/__tests__/fixtures/documents/functionCall.ts +93 -0
- package/src/language-server/__tests__/fixtures/documents/functionCall.ts.snap +431 -0
- package/src/language-server/__tests__/fixtures/documents/taggedTemplate.ts +80 -0
- package/src/language-server/__tests__/fixtures/documents/taggedTemplate.ts.snap +353 -0
- package/src/language-server/__tests__/fixtures/documents/templateWithComment.ts +38 -0
- package/src/language-server/__tests__/fixtures/documents/templateWithComment.ts.snap +123 -0
- package/src/language-server/config/__tests__/loadConfig.ts +28 -16
- package/src/language-server/config/config.ts +50 -12
- package/src/language-server/config/loadConfig.ts +2 -1
- package/src/language-server/config/which.d.ts +19 -0
- package/src/language-server/document.ts +86 -53
- package/src/language-server/fileSet.ts +8 -6
- package/src/language-server/project/base.ts +64 -315
- package/src/language-server/project/client.ts +731 -21
- package/src/language-server/project/internal.ts +354 -0
- package/src/language-server/project/rover/DocumentSynchronization.ts +385 -0
- package/src/language-server/project/rover/__tests__/DocumentSynchronization.test.ts +302 -0
- package/src/language-server/project/rover/project.ts +341 -0
- package/src/language-server/server.ts +187 -98
- package/src/language-server/utilities/__tests__/source.test.ts +162 -0
- package/src/language-server/utilities/languageIdForExtension.ts +39 -0
- package/src/language-server/utilities/source.ts +38 -3
- package/src/language-server/workspace.ts +61 -12
- package/src/languageServerClient.ts +13 -15
- package/src/tools/utilities/getLanguageInformation.ts +41 -0
- package/src/tools/utilities/languageInformation.ts +41 -0
- package/syntaxes/graphql.js.json +18 -21
- package/src/language-server/languageProvider.ts +0 -795
|
@@ -0,0 +1,385 @@
|
|
|
1
|
+
import { extractGraphQLSources } from "../../document";
|
|
2
|
+
import {
|
|
3
|
+
ProtocolNotificationType,
|
|
4
|
+
DidChangeTextDocumentNotification,
|
|
5
|
+
DidOpenTextDocumentNotification,
|
|
6
|
+
DidCloseTextDocumentNotification,
|
|
7
|
+
TextDocumentPositionParams,
|
|
8
|
+
Diagnostic,
|
|
9
|
+
NotificationHandler,
|
|
10
|
+
PublishDiagnosticsParams,
|
|
11
|
+
SemanticTokensRequest,
|
|
12
|
+
ProtocolRequestType,
|
|
13
|
+
SemanticTokensParams,
|
|
14
|
+
SemanticTokens,
|
|
15
|
+
CancellationToken,
|
|
16
|
+
} from "vscode-languageserver-protocol";
|
|
17
|
+
import { TextDocument } from "vscode-languageserver-textdocument";
|
|
18
|
+
import { DocumentUri, GraphQLProject } from "../base";
|
|
19
|
+
import { generateKeyBetween } from "fractional-indexing";
|
|
20
|
+
import { Source } from "graphql";
|
|
21
|
+
import {
|
|
22
|
+
findContainedSourceAndPosition,
|
|
23
|
+
rangeInContainingDocument,
|
|
24
|
+
} from "../../utilities/source";
|
|
25
|
+
import { URI } from "vscode-uri";
|
|
26
|
+
import { DEBUG } from "./project";
|
|
27
|
+
|
|
28
|
+
export interface FilePart {
|
|
29
|
+
fractionalIndex: string;
|
|
30
|
+
source: Source;
|
|
31
|
+
diagnostics: Diagnostic[];
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function handleFilePartUpdates(
|
|
35
|
+
parsed: ReadonlyArray<Source>,
|
|
36
|
+
previousParts: ReadonlyArray<FilePart>,
|
|
37
|
+
): ReadonlyArray<FilePart> {
|
|
38
|
+
const newParts: FilePart[] = [];
|
|
39
|
+
let newIdx = 0;
|
|
40
|
+
let oldIdx = 0;
|
|
41
|
+
let offsetCorrection = 0;
|
|
42
|
+
while (newIdx < parsed.length || oldIdx < previousParts.length) {
|
|
43
|
+
const source = parsed[newIdx] as Source | undefined;
|
|
44
|
+
const oldPart = previousParts[oldIdx] as FilePart | undefined;
|
|
45
|
+
if (!source) return newParts;
|
|
46
|
+
const newOffset = source.locationOffset.line;
|
|
47
|
+
|
|
48
|
+
if (
|
|
49
|
+
oldPart &&
|
|
50
|
+
(source.body === oldPart.source.body ||
|
|
51
|
+
newOffset === oldPart.source.locationOffset.line + offsetCorrection)
|
|
52
|
+
) {
|
|
53
|
+
// replacement of chunk
|
|
54
|
+
newParts.push({ ...oldPart, source });
|
|
55
|
+
offsetCorrection =
|
|
56
|
+
source.locationOffset.line - oldPart.source.locationOffset.line;
|
|
57
|
+
newIdx++;
|
|
58
|
+
oldIdx++;
|
|
59
|
+
} else if (
|
|
60
|
+
!oldPart ||
|
|
61
|
+
newOffset < oldPart.source.locationOffset.line + offsetCorrection
|
|
62
|
+
) {
|
|
63
|
+
// inserted chunk
|
|
64
|
+
const fractionalIndex = generateKeyBetween(
|
|
65
|
+
newParts.length == 0
|
|
66
|
+
? null
|
|
67
|
+
: newParts[newParts.length - 1].fractionalIndex,
|
|
68
|
+
oldPart ? oldPart.fractionalIndex : null,
|
|
69
|
+
);
|
|
70
|
+
newParts.push({ source, fractionalIndex, diagnostics: [] });
|
|
71
|
+
newIdx++;
|
|
72
|
+
offsetCorrection += source.body.split("\n").length - 1;
|
|
73
|
+
} else {
|
|
74
|
+
// deleted chunk
|
|
75
|
+
oldIdx++;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
return newParts;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
function getUri(document: TextDocument, part: FilePart) {
|
|
82
|
+
let uri = URI.parse(part.source.name);
|
|
83
|
+
if (document.languageId !== "graphql") {
|
|
84
|
+
uri = uri.with({ fragment: part.fractionalIndex });
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return uri.toString();
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function splitUri(fullUri: DocumentUri) {
|
|
91
|
+
const uri = URI.parse(fullUri);
|
|
92
|
+
return {
|
|
93
|
+
uri: uri.with({ fragment: null }).toString(),
|
|
94
|
+
fractionalIndex: uri.fragment || "a0",
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
export class DocumentSynchronization {
|
|
99
|
+
private pendingDocumentChanges = new Map<DocumentUri, TextDocument>();
|
|
100
|
+
private knownFiles = new Map<
|
|
101
|
+
DocumentUri,
|
|
102
|
+
{
|
|
103
|
+
full: TextDocument;
|
|
104
|
+
parts: ReadonlyArray<FilePart>;
|
|
105
|
+
}
|
|
106
|
+
>();
|
|
107
|
+
|
|
108
|
+
constructor(
|
|
109
|
+
private sendNotification: <P, RO>(
|
|
110
|
+
type: ProtocolNotificationType<P, RO>,
|
|
111
|
+
params?: P,
|
|
112
|
+
) => Promise<void>,
|
|
113
|
+
private sendRequest: <P, R, PR, E, RO>(
|
|
114
|
+
type: ProtocolRequestType<P, R, PR, E, RO>,
|
|
115
|
+
params: P,
|
|
116
|
+
token?: CancellationToken,
|
|
117
|
+
) => Promise<R>,
|
|
118
|
+
private sendDiagnostics: NotificationHandler<PublishDiagnosticsParams>,
|
|
119
|
+
) {}
|
|
120
|
+
|
|
121
|
+
private documentSynchronizationScheduled = false;
|
|
122
|
+
/**
|
|
123
|
+
* Ensures that only one `syncNextDocumentChange` is queued with the connection at a time.
|
|
124
|
+
* As a result, other, more important, changes can be processed with higher priority.
|
|
125
|
+
*/
|
|
126
|
+
private scheduleDocumentSync = async () => {
|
|
127
|
+
if (
|
|
128
|
+
this.pendingDocumentChanges.size === 0 ||
|
|
129
|
+
this.documentSynchronizationScheduled
|
|
130
|
+
) {
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
this.documentSynchronizationScheduled = true;
|
|
135
|
+
try {
|
|
136
|
+
const next = this.pendingDocumentChanges.values().next();
|
|
137
|
+
if (next.done) return;
|
|
138
|
+
await this.sendDocumentChanges(next.value);
|
|
139
|
+
} finally {
|
|
140
|
+
this.documentSynchronizationScheduled = false;
|
|
141
|
+
setImmediate(this.scheduleDocumentSync);
|
|
142
|
+
}
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
private async sendDocumentChanges(
|
|
146
|
+
document: TextDocument,
|
|
147
|
+
previousParts = this.knownFiles.get(document.uri)?.parts || [],
|
|
148
|
+
) {
|
|
149
|
+
this.pendingDocumentChanges.delete(document.uri);
|
|
150
|
+
|
|
151
|
+
const previousObj = Object.fromEntries(
|
|
152
|
+
previousParts.map((p) => [p.fractionalIndex, p]),
|
|
153
|
+
);
|
|
154
|
+
const newParts = handleFilePartUpdates(
|
|
155
|
+
extractGraphQLSources(document) || [],
|
|
156
|
+
previousParts,
|
|
157
|
+
);
|
|
158
|
+
const newObj = Object.fromEntries(
|
|
159
|
+
newParts.map((p) => [p.fractionalIndex, p]),
|
|
160
|
+
);
|
|
161
|
+
this.knownFiles.set(document.uri, { full: document, parts: newParts });
|
|
162
|
+
|
|
163
|
+
for (const newPart of newParts) {
|
|
164
|
+
const previousPart = previousObj[newPart.fractionalIndex];
|
|
165
|
+
if (!previousPart) {
|
|
166
|
+
await this.sendNotification(DidOpenTextDocumentNotification.type, {
|
|
167
|
+
textDocument: {
|
|
168
|
+
uri: getUri(document, newPart),
|
|
169
|
+
languageId: "graphql",
|
|
170
|
+
version: document.version,
|
|
171
|
+
text: newPart.source.body,
|
|
172
|
+
},
|
|
173
|
+
});
|
|
174
|
+
} else if (newPart.source.body !== previousPart.source.body) {
|
|
175
|
+
await this.sendNotification(DidChangeTextDocumentNotification.type, {
|
|
176
|
+
textDocument: {
|
|
177
|
+
uri: getUri(document, newPart),
|
|
178
|
+
version: document.version,
|
|
179
|
+
},
|
|
180
|
+
contentChanges: [
|
|
181
|
+
{
|
|
182
|
+
text: newPart.source.body,
|
|
183
|
+
},
|
|
184
|
+
],
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
for (const previousPart of previousParts) {
|
|
189
|
+
if (!newObj[previousPart.fractionalIndex]) {
|
|
190
|
+
await this.sendNotification(DidCloseTextDocumentNotification.type, {
|
|
191
|
+
textDocument: {
|
|
192
|
+
uri: getUri(document, previousPart),
|
|
193
|
+
},
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
async resendAllDocuments() {
|
|
200
|
+
for (const file of this.knownFiles.values()) {
|
|
201
|
+
await this.sendDocumentChanges(file.full, []);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
onDidOpenTextDocument: NonNullable<GraphQLProject["onDidOpen"]> = async (
|
|
206
|
+
params,
|
|
207
|
+
) => {
|
|
208
|
+
this.documentDidChange(params.document);
|
|
209
|
+
};
|
|
210
|
+
|
|
211
|
+
onDidCloseTextDocument: NonNullable<GraphQLProject["onDidClose"]> = (
|
|
212
|
+
params,
|
|
213
|
+
) => {
|
|
214
|
+
const known = this.knownFiles.get(params.document.uri);
|
|
215
|
+
if (!known) {
|
|
216
|
+
return;
|
|
217
|
+
}
|
|
218
|
+
this.knownFiles.delete(params.document.uri);
|
|
219
|
+
return Promise.all(
|
|
220
|
+
known.parts.map((part) =>
|
|
221
|
+
this.sendNotification(DidCloseTextDocumentNotification.type, {
|
|
222
|
+
textDocument: {
|
|
223
|
+
uri: getUri(known.full, part),
|
|
224
|
+
},
|
|
225
|
+
}),
|
|
226
|
+
),
|
|
227
|
+
);
|
|
228
|
+
};
|
|
229
|
+
|
|
230
|
+
async documentDidChange(document: TextDocument) {
|
|
231
|
+
if (this.pendingDocumentChanges.has(document.uri)) {
|
|
232
|
+
// this will put the document at the end of the queue again
|
|
233
|
+
// in hopes that we can skip a bit of unnecessary work sometimes
|
|
234
|
+
// when many files change around a lot
|
|
235
|
+
// we will always ensure that a document is synchronized via `synchronizedWithDocument`
|
|
236
|
+
// before we do other operations on the document, so this is safe
|
|
237
|
+
this.pendingDocumentChanges.delete(document.uri);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
this.pendingDocumentChanges.set(document.uri, document);
|
|
241
|
+
this.scheduleDocumentSync();
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
async synchronizedWithDocument(documentUri: DocumentUri): Promise<void> {
|
|
245
|
+
const document = this.pendingDocumentChanges.get(documentUri);
|
|
246
|
+
if (document) {
|
|
247
|
+
await this.sendDocumentChanges(document);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
async insideVirtualDocument<T>(
|
|
252
|
+
positionParams: TextDocumentPositionParams,
|
|
253
|
+
cb: (virtualPositionParams: TextDocumentPositionParams) => Promise<T>,
|
|
254
|
+
): Promise<T | undefined> {
|
|
255
|
+
await this.synchronizedWithDocument(positionParams.textDocument.uri);
|
|
256
|
+
const found = this.knownFiles.get(positionParams.textDocument.uri);
|
|
257
|
+
if (!found) {
|
|
258
|
+
return;
|
|
259
|
+
}
|
|
260
|
+
const match = findContainedSourceAndPosition(
|
|
261
|
+
found.parts,
|
|
262
|
+
positionParams.position,
|
|
263
|
+
);
|
|
264
|
+
|
|
265
|
+
if (!match) return;
|
|
266
|
+
return cb({
|
|
267
|
+
textDocument: {
|
|
268
|
+
uri: getUri(found.full, match),
|
|
269
|
+
},
|
|
270
|
+
position: match.position,
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
handlePartDiagnostics(params: PublishDiagnosticsParams) {
|
|
275
|
+
DEBUG && console.log("Received diagnostics", params);
|
|
276
|
+
const uriDetails = splitUri(params.uri);
|
|
277
|
+
if (!uriDetails) {
|
|
278
|
+
return;
|
|
279
|
+
}
|
|
280
|
+
const found = this.knownFiles.get(uriDetails.uri);
|
|
281
|
+
if (!found) {
|
|
282
|
+
return;
|
|
283
|
+
}
|
|
284
|
+
const part = found.parts.find(
|
|
285
|
+
(p) => p.fractionalIndex === uriDetails.fractionalIndex,
|
|
286
|
+
);
|
|
287
|
+
if (!part) {
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
part.diagnostics = params.diagnostics;
|
|
291
|
+
|
|
292
|
+
const fullDocumentParams: PublishDiagnosticsParams = {
|
|
293
|
+
uri: found.full.uri,
|
|
294
|
+
version: found.full.version,
|
|
295
|
+
diagnostics: found.parts.flatMap((p) =>
|
|
296
|
+
p.diagnostics.map((diagnostic) => ({
|
|
297
|
+
...diagnostic,
|
|
298
|
+
range: rangeInContainingDocument(p.source, diagnostic.range),
|
|
299
|
+
})),
|
|
300
|
+
),
|
|
301
|
+
};
|
|
302
|
+
|
|
303
|
+
this.sendDiagnostics(fullDocumentParams);
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
get openDocuments() {
|
|
307
|
+
return [...this.knownFiles.values()].map((f) => f.full);
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
clearAllDiagnostics() {
|
|
311
|
+
for (const file of this.knownFiles.values()) {
|
|
312
|
+
for (const part of file.parts) {
|
|
313
|
+
part.diagnostics = [];
|
|
314
|
+
}
|
|
315
|
+
this.sendDiagnostics({ uri: file.full.uri, diagnostics: [] });
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
/**
|
|
320
|
+
* Receives semantic tokens for all sub-documents and glues them together.
|
|
321
|
+
* See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens
|
|
322
|
+
* TLDR: The tokens are a flat array of numbers, where each token is represented by 5 numbers.
|
|
323
|
+
* The first two numbers represent the token's delta line and delta start character and might need adjusing
|
|
324
|
+
* relative to the start of a sub-document in relation to the position of the last token of the previous sub-document.
|
|
325
|
+
*
|
|
326
|
+
* There is also an "incremental" version of this request, but we don't support it yet.
|
|
327
|
+
* This is complicated enough as it is.
|
|
328
|
+
*/
|
|
329
|
+
async getFullSemanticTokens(
|
|
330
|
+
params: SemanticTokensParams,
|
|
331
|
+
cancellationToken: CancellationToken,
|
|
332
|
+
): Promise<SemanticTokens | null> {
|
|
333
|
+
await this.synchronizedWithDocument(params.textDocument.uri);
|
|
334
|
+
const found = this.knownFiles.get(params.textDocument.uri);
|
|
335
|
+
if (!found) {
|
|
336
|
+
return null;
|
|
337
|
+
}
|
|
338
|
+
const allParts = await Promise.all(
|
|
339
|
+
found.parts.map(async (part) => {
|
|
340
|
+
return {
|
|
341
|
+
part,
|
|
342
|
+
tokens: await this.sendRequest(
|
|
343
|
+
SemanticTokensRequest.type,
|
|
344
|
+
{
|
|
345
|
+
textDocument: { uri: getUri(found.full, part) },
|
|
346
|
+
},
|
|
347
|
+
cancellationToken,
|
|
348
|
+
),
|
|
349
|
+
};
|
|
350
|
+
}),
|
|
351
|
+
);
|
|
352
|
+
let line = 0,
|
|
353
|
+
char = 0,
|
|
354
|
+
lastLine = 0,
|
|
355
|
+
lastChar = 0;
|
|
356
|
+
const combinedTokens = [];
|
|
357
|
+
for (const { part, tokens } of allParts) {
|
|
358
|
+
if (!tokens) {
|
|
359
|
+
continue;
|
|
360
|
+
}
|
|
361
|
+
line = part.source.locationOffset.line - 1;
|
|
362
|
+
char = part.source.locationOffset.column - 1;
|
|
363
|
+
for (let i = 0; i < tokens.data.length; i += 5) {
|
|
364
|
+
const deltaLine = tokens.data[i],
|
|
365
|
+
deltaStartChar = tokens.data[i + 1];
|
|
366
|
+
|
|
367
|
+
// We need to run this loop fully to correctly calculate the `lastLine` and `lastChar`
|
|
368
|
+
// so for the next incoming tokens, we can adjust the delta correctly.
|
|
369
|
+
line = line + deltaLine;
|
|
370
|
+
char = deltaLine === 0 ? char + deltaStartChar : deltaStartChar;
|
|
371
|
+
// we just need to adjust the deltas only for the first token
|
|
372
|
+
if (i === 0) {
|
|
373
|
+
tokens.data[0] = line - lastLine;
|
|
374
|
+
tokens.data[1] = line === lastLine ? lastChar - char : char;
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
combinedTokens.push(...tokens.data);
|
|
378
|
+
lastLine = line;
|
|
379
|
+
lastChar = char;
|
|
380
|
+
}
|
|
381
|
+
return {
|
|
382
|
+
data: combinedTokens,
|
|
383
|
+
};
|
|
384
|
+
}
|
|
385
|
+
}
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
import { Source } from "graphql";
|
|
2
|
+
import { extractGraphQLSources } from "../../../document";
|
|
3
|
+
import { handleFilePartUpdates } from "../DocumentSynchronization";
|
|
4
|
+
import { TextDocument } from "vscode-languageserver-textdocument";
|
|
5
|
+
|
|
6
|
+
const initialFile = `
|
|
7
|
+
Test
|
|
8
|
+
gql\`
|
|
9
|
+
query Test1 {
|
|
10
|
+
test
|
|
11
|
+
}
|
|
12
|
+
\`
|
|
13
|
+
|
|
14
|
+
More test
|
|
15
|
+
|
|
16
|
+
gql\`
|
|
17
|
+
query Test2 {
|
|
18
|
+
test
|
|
19
|
+
}
|
|
20
|
+
\`
|
|
21
|
+
`;
|
|
22
|
+
|
|
23
|
+
const editedFile = `
|
|
24
|
+
Test edited
|
|
25
|
+
foo
|
|
26
|
+
bar
|
|
27
|
+
gql\`
|
|
28
|
+
query Test1 {
|
|
29
|
+
test
|
|
30
|
+
}
|
|
31
|
+
\`
|
|
32
|
+
|
|
33
|
+
More test lalala
|
|
34
|
+
|
|
35
|
+
gql\`
|
|
36
|
+
query Test2 {
|
|
37
|
+
test
|
|
38
|
+
}
|
|
39
|
+
\`
|
|
40
|
+
More stuff here
|
|
41
|
+
`;
|
|
42
|
+
|
|
43
|
+
const insertedFile = `
|
|
44
|
+
Test edited
|
|
45
|
+
foo
|
|
46
|
+
bar
|
|
47
|
+
gql\`
|
|
48
|
+
query Test1 {
|
|
49
|
+
test
|
|
50
|
+
}
|
|
51
|
+
\`
|
|
52
|
+
More test lalala
|
|
53
|
+
|
|
54
|
+
gql\`
|
|
55
|
+
query Test3 {
|
|
56
|
+
test
|
|
57
|
+
}
|
|
58
|
+
\`
|
|
59
|
+
More test lalala
|
|
60
|
+
|
|
61
|
+
gql\`
|
|
62
|
+
query Test2 {
|
|
63
|
+
test
|
|
64
|
+
}
|
|
65
|
+
\`
|
|
66
|
+
More stuff here
|
|
67
|
+
`;
|
|
68
|
+
|
|
69
|
+
const pushedFile = `
|
|
70
|
+
Test edited
|
|
71
|
+
foo
|
|
72
|
+
bar
|
|
73
|
+
gql\`
|
|
74
|
+
query Test1 {
|
|
75
|
+
test
|
|
76
|
+
}
|
|
77
|
+
\`
|
|
78
|
+
More test lalala
|
|
79
|
+
|
|
80
|
+
gql\`
|
|
81
|
+
query Test2 {
|
|
82
|
+
test
|
|
83
|
+
}
|
|
84
|
+
\`
|
|
85
|
+
More test lalala
|
|
86
|
+
|
|
87
|
+
gql\`
|
|
88
|
+
query Test3 {
|
|
89
|
+
test
|
|
90
|
+
}
|
|
91
|
+
\`
|
|
92
|
+
More stuff here
|
|
93
|
+
`;
|
|
94
|
+
|
|
95
|
+
const shiftedFile = `
|
|
96
|
+
Test
|
|
97
|
+
More test
|
|
98
|
+
|
|
99
|
+
gql\`
|
|
100
|
+
query Test2 {
|
|
101
|
+
test
|
|
102
|
+
}
|
|
103
|
+
\`
|
|
104
|
+
`;
|
|
105
|
+
|
|
106
|
+
const poppedFile = `
|
|
107
|
+
Test
|
|
108
|
+
gql\`
|
|
109
|
+
query Test1 {
|
|
110
|
+
test
|
|
111
|
+
}
|
|
112
|
+
\`
|
|
113
|
+
|
|
114
|
+
More test
|
|
115
|
+
|
|
116
|
+
`;
|
|
117
|
+
|
|
118
|
+
const query1 = `
|
|
119
|
+
query Test1 {
|
|
120
|
+
test
|
|
121
|
+
}
|
|
122
|
+
`;
|
|
123
|
+
const query2 = `
|
|
124
|
+
query Test2 {
|
|
125
|
+
test
|
|
126
|
+
}
|
|
127
|
+
`;
|
|
128
|
+
const query3 = `
|
|
129
|
+
query Test3 {
|
|
130
|
+
test
|
|
131
|
+
}
|
|
132
|
+
`;
|
|
133
|
+
|
|
134
|
+
describe("handleFilePartUpdates", () => {
|
|
135
|
+
const initialUpdates = handleFilePartUpdates(
|
|
136
|
+
extractGraphQLSources(
|
|
137
|
+
TextDocument.create("uri", "javascript", 1, initialFile),
|
|
138
|
+
)!,
|
|
139
|
+
[],
|
|
140
|
+
);
|
|
141
|
+
|
|
142
|
+
test("newly parsed file", () => {
|
|
143
|
+
expect(initialUpdates).toEqual([
|
|
144
|
+
{
|
|
145
|
+
fractionalIndex: "a0",
|
|
146
|
+
diagnostics: [],
|
|
147
|
+
source: new Source(query1, "uri", {
|
|
148
|
+
column: 5,
|
|
149
|
+
line: 3,
|
|
150
|
+
}),
|
|
151
|
+
},
|
|
152
|
+
{
|
|
153
|
+
fractionalIndex: "a1",
|
|
154
|
+
diagnostics: [],
|
|
155
|
+
source: new Source(query2, "uri", {
|
|
156
|
+
column: 5,
|
|
157
|
+
line: 11,
|
|
158
|
+
}),
|
|
159
|
+
},
|
|
160
|
+
]);
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
test("edited file", () => {
|
|
164
|
+
expect(
|
|
165
|
+
handleFilePartUpdates(
|
|
166
|
+
extractGraphQLSources(
|
|
167
|
+
TextDocument.create("uri", "javascript", 2, editedFile),
|
|
168
|
+
)!,
|
|
169
|
+
initialUpdates,
|
|
170
|
+
),
|
|
171
|
+
).toEqual([
|
|
172
|
+
{
|
|
173
|
+
fractionalIndex: "a0",
|
|
174
|
+
diagnostics: [],
|
|
175
|
+
source: new Source(query1, "uri", {
|
|
176
|
+
column: 5,
|
|
177
|
+
line: 5,
|
|
178
|
+
}),
|
|
179
|
+
},
|
|
180
|
+
{
|
|
181
|
+
fractionalIndex: "a1",
|
|
182
|
+
diagnostics: [],
|
|
183
|
+
source: new Source(query2, "uri", {
|
|
184
|
+
column: 5,
|
|
185
|
+
line: 13,
|
|
186
|
+
}),
|
|
187
|
+
},
|
|
188
|
+
]);
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
test("inserted file", () => {
|
|
192
|
+
expect(
|
|
193
|
+
handleFilePartUpdates(
|
|
194
|
+
extractGraphQLSources(
|
|
195
|
+
TextDocument.create("uri", "javascript", 2, insertedFile),
|
|
196
|
+
)!,
|
|
197
|
+
initialUpdates,
|
|
198
|
+
),
|
|
199
|
+
).toEqual([
|
|
200
|
+
{
|
|
201
|
+
fractionalIndex: "a0",
|
|
202
|
+
diagnostics: [],
|
|
203
|
+
source: new Source(query1, "uri", {
|
|
204
|
+
column: 5,
|
|
205
|
+
line: 5,
|
|
206
|
+
}),
|
|
207
|
+
},
|
|
208
|
+
{
|
|
209
|
+
fractionalIndex: "a0V",
|
|
210
|
+
diagnostics: [],
|
|
211
|
+
source: new Source(query3, "uri", {
|
|
212
|
+
column: 5,
|
|
213
|
+
line: 12,
|
|
214
|
+
}),
|
|
215
|
+
},
|
|
216
|
+
{
|
|
217
|
+
fractionalIndex: "a1",
|
|
218
|
+
diagnostics: [],
|
|
219
|
+
source: new Source(query2, "uri", {
|
|
220
|
+
column: 5,
|
|
221
|
+
line: 19,
|
|
222
|
+
}),
|
|
223
|
+
},
|
|
224
|
+
]);
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
test("pushed file", () => {
|
|
228
|
+
expect(
|
|
229
|
+
handleFilePartUpdates(
|
|
230
|
+
extractGraphQLSources(
|
|
231
|
+
TextDocument.create("uri", "javascript", 2, pushedFile),
|
|
232
|
+
)!,
|
|
233
|
+
initialUpdates,
|
|
234
|
+
),
|
|
235
|
+
).toEqual([
|
|
236
|
+
{
|
|
237
|
+
fractionalIndex: "a0",
|
|
238
|
+
diagnostics: [],
|
|
239
|
+
source: new Source(query1, "uri", {
|
|
240
|
+
column: 5,
|
|
241
|
+
line: 5,
|
|
242
|
+
}),
|
|
243
|
+
},
|
|
244
|
+
{
|
|
245
|
+
fractionalIndex: "a1",
|
|
246
|
+
diagnostics: [],
|
|
247
|
+
source: new Source(query2, "uri", {
|
|
248
|
+
column: 5,
|
|
249
|
+
line: 12,
|
|
250
|
+
}),
|
|
251
|
+
},
|
|
252
|
+
{
|
|
253
|
+
fractionalIndex: "a2",
|
|
254
|
+
diagnostics: [],
|
|
255
|
+
source: new Source(query3, "uri", {
|
|
256
|
+
column: 5,
|
|
257
|
+
line: 19,
|
|
258
|
+
}),
|
|
259
|
+
},
|
|
260
|
+
]);
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
test("shifted file", () => {
|
|
264
|
+
expect(
|
|
265
|
+
handleFilePartUpdates(
|
|
266
|
+
extractGraphQLSources(
|
|
267
|
+
TextDocument.create("uri", "javascript", 2, shiftedFile),
|
|
268
|
+
)!,
|
|
269
|
+
initialUpdates,
|
|
270
|
+
),
|
|
271
|
+
).toEqual([
|
|
272
|
+
{
|
|
273
|
+
fractionalIndex: "a1",
|
|
274
|
+
diagnostics: [],
|
|
275
|
+
source: new Source(query2, "uri", {
|
|
276
|
+
column: 5,
|
|
277
|
+
line: 5,
|
|
278
|
+
}),
|
|
279
|
+
},
|
|
280
|
+
]);
|
|
281
|
+
});
|
|
282
|
+
|
|
283
|
+
test("popped file", () => {
|
|
284
|
+
expect(
|
|
285
|
+
handleFilePartUpdates(
|
|
286
|
+
extractGraphQLSources(
|
|
287
|
+
TextDocument.create("uri", "javascript", 2, poppedFile),
|
|
288
|
+
)!,
|
|
289
|
+
initialUpdates,
|
|
290
|
+
),
|
|
291
|
+
).toEqual([
|
|
292
|
+
{
|
|
293
|
+
fractionalIndex: "a0",
|
|
294
|
+
diagnostics: [],
|
|
295
|
+
source: new Source(query1, "uri", {
|
|
296
|
+
column: 5,
|
|
297
|
+
line: 3,
|
|
298
|
+
}),
|
|
299
|
+
},
|
|
300
|
+
]);
|
|
301
|
+
});
|
|
302
|
+
});
|