@messenger-box/platform-client 10.0.3-alpha.7 → 10.0.3-alpha.74

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/CHANGELOG.md +92 -0
  2. package/lib/graphql/fragments/post-message.gql +36 -0
  3. package/lib/graphql/mutations/channel-mutation.gql +5 -0
  4. package/lib/graphql/mutations/messages-mutation.gql +37 -0
  5. package/lib/graphql/policies/channel-policies.d.ts.map +1 -1
  6. package/lib/graphql/policies/channel-policies.js +150 -2
  7. package/lib/graphql/policies/channel-policies.js.map +1 -1
  8. package/lib/graphql/policies/messages-policies.d.ts.map +1 -1
  9. package/lib/graphql/policies/messages-policies.js +229 -37
  10. package/lib/graphql/policies/messages-policies.js.map +1 -1
  11. package/lib/graphql/policies/post-thread-policies.d.ts.map +1 -1
  12. package/lib/graphql/policies/post-thread-policies.js +136 -31
  13. package/lib/graphql/policies/post-thread-policies.js.map +1 -1
  14. package/lib/graphql/policies/teams-policies.d.ts.map +1 -1
  15. package/lib/graphql/policies/teams-policies.js +13 -1
  16. package/lib/graphql/policies/teams-policies.js.map +1 -1
  17. package/lib/graphql/queries/channels-by-user.gql +26 -0
  18. package/lib/graphql/queries/messages.gql +2 -2
  19. package/lib/graphql/queries/organization-query.gql +78 -35
  20. package/lib/graphql/queries/post-thread-message.gql +4 -0
  21. package/lib/graphql/queries/teams-query.gql +79 -29
  22. package/lib/graphql/queries/user-account.gql +1 -1
  23. package/lib/graphql/queries/users.gql +1 -1
  24. package/lib/graphql/subscription/fileUpdated.gql +11 -0
  25. package/lib/graphql/subscription/fragmentUpdated.gql +7 -0
  26. package/lib/graphql/subscription/sandboxError.gql +17 -0
  27. package/lib/hooks/use-upload-file.hook.d.ts.map +1 -1
  28. package/lib/hooks/use-upload-file.hook.js +1 -1
  29. package/lib/hooks/use-upload-file.hook.js.map +1 -1
  30. package/lib/hooks/use-upload-file.hook.native.d.ts.map +1 -1
  31. package/lib/hooks/use-upload-file.hook.native.js +1 -1
  32. package/lib/hooks/use-upload-file.hook.native.js.map +1 -1
  33. package/lib/hooks/use-upload-files.hook.d.ts.map +1 -1
  34. package/lib/hooks/use-upload-files.hook.js +1 -1
  35. package/lib/hooks/use-upload-files.hook.js.map +1 -1
  36. package/lib/hooks/use-upload-files.hook.native.d.ts.map +1 -1
  37. package/lib/hooks/use-upload-files.hook.native.js +1 -1
  38. package/lib/hooks/use-upload-files.hook.native.js.map +1 -1
  39. package/package.json +4 -4
  40. package/src/graphql/fragments/post-message.gql +36 -0
  41. package/src/graphql/mutations/channel-mutation.gql +5 -0
  42. package/src/graphql/mutations/messages-mutation.gql +37 -0
  43. package/src/graphql/policies/channel-policies.ts +148 -2
  44. package/src/graphql/policies/messages-policies.ts +251 -39
  45. package/src/graphql/policies/post-thread-policies.ts +151 -31
  46. package/src/graphql/policies/teams-policies.ts +13 -1
  47. package/src/graphql/queries/channels-by-user.gql +26 -0
  48. package/src/graphql/queries/messages.gql +2 -2
  49. package/src/graphql/queries/organization-query.gql +78 -35
  50. package/src/graphql/queries/post-thread-message.gql +4 -0
  51. package/src/graphql/queries/teams-query.gql +79 -29
  52. package/src/graphql/queries/user-account.gql +1 -1
  53. package/src/graphql/queries/users.gql +1 -1
  54. package/src/graphql/subscription/fileUpdated.gql +11 -0
  55. package/src/graphql/subscription/fragmentUpdated.gql +7 -0
  56. package/src/graphql/subscription/sandboxError.gql +17 -0
  57. package/src/hooks/use-upload-file.hook.native.ts +1 -4
  58. package/src/hooks/use-upload-file.hook.ts +1 -4
  59. package/src/hooks/use-upload-files.hook.native.ts +1 -4
  60. package/src/hooks/use-upload-files.hook.ts +1 -4
@@ -19,27 +19,53 @@ export const messagesPolicies: TypePolicies = {
19
19
  },
20
20
  },
21
21
  data: {
22
- keyArgs: false,
23
- merge: (existing = [], incoming = [], { readField, mergeObjects }) => {
24
- // console.log('existing', existing);
25
- // console.log('incoming', incoming);
26
- const merged = [...incoming];
27
- const existingIds = existing.map((item) => readField<String>('id', item));
28
-
29
- merged.forEach((item, index) => {
30
- const itemId = readField<String>('id', item);
31
- const existingIndex = existingIds.findIndex((id) => id === itemId);
32
- if (existingIndex !== -1) {
33
- merged[index] = mergeObjects(existing[existingIndex], merged[index]);
22
+ keyArgs: ['channelId', 'parentId'],
23
+ merge: (existing = [], incoming = [], { readField }) => {
24
+ // Use a Map for O(1) lookups instead of array iterations
25
+ const existingMap = new Map();
26
+
27
+ // Populate map with existing messages
28
+ if (existing && existing.length > 0) {
29
+ for (let i = 0; i < existing.length; i++) {
30
+ const id = readField('id', existing[i]);
31
+ if (id) {
32
+ existingMap.set(id, existing[i]);
33
+ }
34
+ }
35
+ }
36
+
37
+ // Create result array with same capacity as total items
38
+ const result = [];
39
+
40
+ // Add incoming items, overwriting existing ones
41
+ if (incoming && incoming.length > 0) {
42
+ for (let i = 0; i < incoming.length; i++) {
43
+ const item = incoming[i];
44
+ const id = readField('id', item);
45
+ if (id) {
46
+ existingMap.set(id, item);
47
+ }
48
+ result.push(item);
34
49
  }
35
- });
36
- return merged;
50
+ }
51
+
52
+ // Add remaining existing items not in incoming
53
+ if (existing && existing.length > 0) {
54
+ for (let i = 0; i < existing.length; i++) {
55
+ const id = readField('id', existing[i]);
56
+ if (id && !result.some((item) => readField('id', item) === id)) {
57
+ result.push(existing[i]);
58
+ }
59
+ }
60
+ }
61
+
62
+ return result;
37
63
  },
38
64
  },
39
65
  totalCount: {
40
- keyArgs: false,
66
+ keyArgs: ['channelId', 'parentId'],
41
67
  merge(existing, incoming) {
42
- return existing && existing > incoming ? existing : incoming;
68
+ return incoming !== undefined ? incoming : existing;
43
69
  },
44
70
  },
45
71
  // data: {
@@ -57,7 +83,7 @@ export const messagesPolicies: TypePolicies = {
57
83
  },
58
84
  },
59
85
  PostThreadMessages: {
60
- // keyFields: ['threadmessagesRefId'],
86
+ // keyFields: ['threadmessagesRefId'],
61
87
  fields: {
62
88
  threadmessagesRefId: {
63
89
  read(existing, { variables }) {
@@ -76,33 +102,219 @@ export const messagesPolicies: TypePolicies = {
76
102
  Query: {
77
103
  fields: {
78
104
  messages: {
79
- // keyArgs: ['channelId', 'parentId', 'limit', 'skip'],
80
- // keyArgs: ['channelId', 'parentId'],
105
+ keyArgs: ['channelId', 'parentId'],
106
+ merge(existing, incoming, { args, readField }) {
107
+ if (!incoming) return existing;
108
+ if (!existing) return incoming;
109
+
110
+ // Fast return if incoming has no data
111
+ if (!incoming.data || incoming.data.length === 0) {
112
+ return {
113
+ ...existing,
114
+ totalCount: incoming.totalCount ?? existing.totalCount,
115
+ };
116
+ }
117
+
118
+ // Fast return if existing has no data
119
+ if (!existing.data || existing.data.length === 0) {
120
+ return incoming;
121
+ }
122
+
123
+ // Determine if this is likely a new message or pagination
124
+ const isNewMessage = args && args.skip === 0;
125
+
126
+ // Create a map for existing messages for fast lookups
127
+ const idSet = new Set();
128
+ const mergedData = [...existing.data];
129
+
130
+ // Mark all existing IDs
131
+ for (let i = 0; i < mergedData.length; i++) {
132
+ const id = readField('id', mergedData[i]);
133
+ if (id) idSet.add(id);
134
+ }
135
+
136
+ // Process incoming messages
137
+ for (let i = 0; i < incoming.data.length; i++) {
138
+ const incomingMsg = incoming.data[i];
139
+ const id = readField('id', incomingMsg);
140
+
141
+ if (!id) continue;
142
+
143
+ if (idSet.has(id)) {
144
+ // Replace existing message with same ID
145
+ const existingIndex = mergedData.findIndex((msg) => readField('id', msg) === id);
146
+ if (existingIndex >= 0) {
147
+ mergedData[existingIndex] = incomingMsg;
148
+ }
149
+ } else {
150
+ // Add new message
151
+ if (isNewMessage) {
152
+ // Add to beginning for new messages
153
+ mergedData.unshift(incomingMsg);
154
+ } else {
155
+ // Add to end for pagination
156
+ mergedData.push(incomingMsg);
157
+ }
158
+ idSet.add(id);
159
+ }
160
+ }
161
+
162
+ return {
163
+ ...existing,
164
+ totalCount: incoming.totalCount ?? existing.totalCount,
165
+ data: mergedData,
166
+ };
167
+ },
168
+ },
169
+ },
170
+ },
171
+ FilesInfo: {
172
+ merge: true, // Use default merging behavior for FilesInfo
173
+ fields: {
174
+ data: {
175
+ merge(existing = [], incoming = [], { readField }) {
176
+ // If no existing data, just return incoming
177
+ if (!existing || existing.length === 0) {
178
+ return incoming;
179
+ }
180
+
181
+ // If no incoming data, keep existing
182
+ if (!incoming || incoming.length === 0) {
183
+ return existing;
184
+ }
185
+
186
+ // Create a map for efficient lookup
187
+ const fileMap = new Map();
188
+
189
+ // Add existing files to map
190
+ for (let i = 0; i < existing.length; i++) {
191
+ const file = existing[i];
192
+ if (file) {
193
+ const id = readField('id', file);
194
+ if (id) {
195
+ fileMap.set(id, file);
196
+ }
197
+ }
198
+ }
199
+
200
+ // Merge in incoming files, overwriting existing ones
201
+ for (let i = 0; i < incoming.length; i++) {
202
+ const file = incoming[i];
203
+ if (file) {
204
+ const id = readField('id', file);
205
+ if (id) {
206
+ fileMap.set(id, file);
207
+ }
208
+ }
209
+ }
210
+
211
+ // Convert map values back to array
212
+ return Array.from(fileMap.values());
213
+ },
214
+ },
215
+ },
216
+ },
217
+ FileInfo: {
218
+ // Use ID as key field
219
+ keyFields: ['id'],
220
+ fields: {
221
+ url: {
222
+ // Ensure URL is always preserved and not normalized
81
223
  merge(existing, incoming) {
224
+ return incoming ?? existing;
225
+ },
226
+ },
227
+ },
228
+ },
229
+ Post: {
230
+ fields: {
231
+ replies: {
232
+ merge(existing, incoming) {
233
+ if (!incoming) return existing;
234
+ if (!existing) return incoming;
235
+
236
+ // Use a Set for fast duplicate checking
237
+ const uniqueIds = new Set();
238
+ const mergedData = [];
239
+
240
+ // Add all existing items to the result and track IDs
241
+ if (existing.data && existing.data.length > 0) {
242
+ for (let i = 0; i < existing.data.length; i++) {
243
+ const item = existing.data[i];
244
+ if (item && item.id && !uniqueIds.has(item.id)) {
245
+ uniqueIds.add(item.id);
246
+ mergedData.push(item);
247
+ }
248
+ }
249
+ }
250
+
251
+ // Add incoming items that don't exist yet
252
+ if (incoming.data && incoming.data.length > 0) {
253
+ for (let i = 0; i < incoming.data.length; i++) {
254
+ const item = incoming.data[i];
255
+ if (item && item.id && !uniqueIds.has(item.id)) {
256
+ uniqueIds.add(item.id);
257
+ mergedData.push(item);
258
+ }
259
+ }
260
+ }
261
+
82
262
  return {
83
263
  ...incoming,
84
- data: [...(existing?.data ?? []), ...(incoming.data ?? [])],
264
+ data: mergedData,
265
+ };
266
+ },
267
+ },
268
+ files: {
269
+ merge(existing, incoming, { readField }) {
270
+ if (!incoming) return existing;
271
+ if (!existing) return incoming;
272
+
273
+ // If either has no data or totalCount is 0, prefer the one with data
274
+ if (!existing.data || existing.data.length === 0) {
275
+ return incoming;
276
+ }
277
+
278
+ if (!incoming.data || incoming.data.length === 0) {
279
+ return existing;
280
+ }
281
+
282
+ // Create a map for efficient lookup
283
+ const fileMap = new Map();
284
+
285
+ // Add existing files to map
286
+ if (existing.data) {
287
+ for (let i = 0; i < existing.data.length; i++) {
288
+ const file = existing.data[i];
289
+ if (file) {
290
+ const id = readField('id', file);
291
+ if (id) {
292
+ fileMap.set(id, file);
293
+ }
294
+ }
295
+ }
296
+ }
297
+
298
+ // Merge in incoming files, overwriting existing ones
299
+ if (incoming.data) {
300
+ for (let i = 0; i < incoming.data.length; i++) {
301
+ const file = incoming.data[i];
302
+ if (file) {
303
+ const id = readField('id', file);
304
+ if (id) {
305
+ fileMap.set(id, file);
306
+ }
307
+ }
308
+ }
309
+ }
310
+
311
+ // Create merged result
312
+ return {
313
+ __typename: 'FilesInfo',
314
+ totalCount: Math.max(existing.totalCount || 0, incoming.totalCount || 0, fileMap.size),
315
+ data: Array.from(fileMap.values()),
85
316
  };
86
317
  },
87
- // merge(existing, incoming, { args, mergeObjects }) {
88
- // console.log('existing length', existing?.data?.length);
89
- // console.log('incoming length', incoming?.data?.length);
90
- // // console.log('existing', JSON.stringify(existing), 'incoming', JSON.stringify(incoming));
91
- // if (!incoming || incoming?.data?.length) return existing;
92
- // if (!existing || existing?.data?.length) return incoming;
93
- // //console.log('existing', JSON.stringify(existing), 'incoming', JSON.stringify(incoming));
94
- // const mergedData = existing ? existing?.data?.slice(0) : [];
95
- // // Insert the incoming elements in the right places, according to args.
96
- // const end = args?.skip + Math.min(args?.limit, incoming?.data?.length);
97
- // for (let i = args?.skip; i < end; ++i) {
98
- // mergedData[i] = incoming?.data[i - args?.skip];
99
- // }
100
-
101
- // const merged = { ...incoming, data: mergedData };
102
- // return merged;
103
- // // return mergeObjects(existing, incoming);
104
- // //return existing ? { ...existing, ...incoming } : incoming;
105
- // },
106
318
  },
107
319
  },
108
320
  },
@@ -1,57 +1,177 @@
1
1
  import { TypePolicies } from '@apollo/client';
2
+ import { gql } from '@apollo/client';
3
+
4
+ // Define the fragment we'll use for cache operations
5
+ const POST_THREAD_FRAGMENT = gql`
6
+ fragment PostThreadInfo on PostThread {
7
+ replies
8
+ replyCount
9
+ lastReplyAt
10
+ updatedAt
11
+ }
12
+ `;
2
13
 
3
14
  export const postThreadPolicies: TypePolicies = {
4
15
  ThreadMessages: {
5
- // keyFields: ['messagesRefId'],
6
- //keyFields: [],
7
16
  keyFields: ['data', ['channel', ['id']]],
8
17
  fields: {
9
18
  data: {
10
- // keyArgs: false,
11
- // merge: (existing = [], incoming = [], { readField, mergeObjects }) => {
12
- // return existing ? [...existing, ...incoming] : incoming;
13
- // // console.log('existing', existing);
14
- // // console.log('incoming', incoming);
15
- // // const merged = [...incoming];
16
- // // const existingIds = existing.map((item) => readField<String>('id', item));
17
- // // merged.forEach((item, index) => {
18
- // // const itemId = readField<String>('id', item);
19
- // // const existingIndex = existingIds.findIndex((id) => id === itemId);
20
- // // if (existingIndex !== -1) {
21
- // // merged[index] = mergeObjects(existing[existingIndex], merged[index]);
22
- // // }
23
- // // });
24
- // // return merged;
25
- // },
19
+ merge: (existing = [], incoming = [], { readField }) => {
20
+ // Create a map for efficient lookups
21
+ const threadMap = new Map();
22
+
23
+ // Store existing threads
24
+ if (existing && existing.length > 0) {
25
+ for (const item of existing) {
26
+ const id = readField('id', item);
27
+ if (id) threadMap.set(id, item);
28
+ }
29
+ }
30
+
31
+ // Add or update with incoming threads
32
+ if (incoming && incoming.length > 0) {
33
+ for (const item of incoming) {
34
+ const id = readField('id', item);
35
+ if (id) threadMap.set(id, item);
36
+ }
37
+ }
38
+
39
+ // Convert back to array
40
+ return Array.from(threadMap.values());
41
+ },
26
42
  },
27
43
  totalCount: {
28
- // keyArgs: false,
29
- // merge(existing, incoming) {
30
- // return existing && existing > incoming ? existing : incoming;
31
- // },
44
+ merge(existing, incoming) {
45
+ // Take the higher of the two counts
46
+ return incoming !== undefined ? Math.max(existing || 0, incoming) : existing;
47
+ },
32
48
  },
33
49
  },
34
50
  },
35
51
  Query: {
36
52
  fields: {
37
53
  threadMessages: {
38
- // keyArgs: ['channelId', 'parentId', 'limit', 'skip'],
39
54
  keyArgs: ['channelId'],
40
- //keyArgs: false,
41
- merge(existing, incoming) {
42
- // debugger;
55
+ merge(existing, incoming, { readField }) {
56
+ if (!existing) return incoming;
57
+ if (!incoming) return existing;
58
+
43
59
  return {
44
60
  ...incoming,
45
- data: [...(existing?.data ?? []), ...(incoming.data ?? [])],
61
+ data: [...(existing?.data || []), ...(incoming.data || [])].filter(
62
+ (item, index, self) =>
63
+ // Filter out duplicates
64
+ index === self.findIndex((t) => readField('id', t) === readField('id', item)),
65
+ ),
46
66
  };
47
67
  },
48
68
  },
49
69
  getPostThread: {
50
- // keyArgs: ['channelId', 'parentId', 'limit', 'skip'],
51
- keyArgs: ['channelId', 'postParentId', 'limit', 'skip', 'role'],
70
+ keyArgs: ['channelId', 'postParentId', 'role'],
52
71
  merge(existing, incoming, { mergeObjects }) {
53
- // debugger;
54
- return mergeObjects(existing, incoming);
72
+ if (!existing) return incoming;
73
+ if (!incoming) return existing;
74
+
75
+ // Carefully merge the two objects
76
+ const result = mergeObjects(existing, incoming);
77
+
78
+ // Special handling for replies to avoid duplicates
79
+ if (existing.replies && incoming.replies) {
80
+ const uniqueReplies = new Map();
81
+
82
+ // Add existing replies
83
+ for (const reply of existing.replies) {
84
+ uniqueReplies.set(reply.id, reply);
85
+ }
86
+
87
+ // Add incoming replies, overwriting existing ones
88
+ for (const reply of incoming.replies) {
89
+ uniqueReplies.set(reply.id, reply);
90
+ }
91
+
92
+ // Replace replies with deduplicated list
93
+ result.replies = Array.from(uniqueReplies.values());
94
+ }
95
+
96
+ return result;
97
+ },
98
+ },
99
+ },
100
+ },
101
+ // Mutation: {
102
+ // fields: {
103
+ // createPostThread: {
104
+ // merge(existing, incoming, { cache, args, readField }) {
105
+ // // Early return if not enough data
106
+ // if (!incoming?.lastMessage || !incoming?.data || !args?.channelId || !args?.postParentId) {
107
+ // return incoming;
108
+ // }
109
+
110
+ // try {
111
+ // // Use type policies to handle the cache update instead of direct manipulation
112
+ // const queryRef = cache.identify({
113
+ // __typename: 'Query',
114
+ // getPostThread: {
115
+ // channelId: args.channelId,
116
+ // postParentId: args.postParentId,
117
+ // role: args.threadMessageInput?.role
118
+ // }
119
+ // });
120
+
121
+ // // Use cache.modify which doesn't require fragments
122
+ // if (queryRef) {
123
+ // cache.modify({
124
+ // id: queryRef,
125
+ // fields: {
126
+ // getPostThread(existingThread = {}) {
127
+ // if (!existingThread) return existingThread;
128
+
129
+ // // Create a new object with the updated properties
130
+ // return {
131
+ // ...existingThread,
132
+ // replies: [incoming.lastMessage, ...(existingThread.replies || [])],
133
+ // replyCount: (existingThread.replyCount || 0) + 1,
134
+ // lastReplyAt: incoming.lastMessage.createdAt,
135
+ // updatedAt: incoming.lastMessage.createdAt
136
+ // };
137
+ // }
138
+ // }
139
+ // });
140
+ // }
141
+ // } catch (error) {
142
+ // console.error('Error updating cache in createPostThread policy:', error);
143
+ // }
144
+
145
+ // return incoming;
146
+ // },
147
+ // },
148
+ // },
149
+ // },
150
+ PostThread: {
151
+ fields: {
152
+ replies: {
153
+ merge(existing = [], incoming = [], { readField }) {
154
+ // Use a map for fast deduplication
155
+ const replyMap = new Map();
156
+
157
+ // Add existing replies
158
+ if (existing && existing.length > 0) {
159
+ for (const reply of existing) {
160
+ const id = readField('id', reply);
161
+ if (id) replyMap.set(id, reply);
162
+ }
163
+ }
164
+
165
+ // Add or update with incoming replies
166
+ if (incoming && incoming.length > 0) {
167
+ for (const reply of incoming) {
168
+ const id = readField('id', reply);
169
+ if (id) replyMap.set(id, reply);
170
+ }
171
+ }
172
+
173
+ // Convert back to array
174
+ return Array.from(replyMap.values());
55
175
  },
56
176
  },
57
177
  },
@@ -2,6 +2,18 @@ import { TypePolicies } from '@apollo/client';
2
2
 
3
3
  export const teamPolicies: TypePolicies = {
4
4
  Query: {
5
- fields: {},
5
+ fields: {
6
+ getOrganizationTeams: {
7
+ merge(existing = [], incoming) {
8
+ return incoming;
9
+ },
10
+ read(existing) {
11
+ return existing;
12
+ },
13
+ },
14
+ },
15
+ },
16
+ AccountTeam: {
17
+ keyFields: ['_id'],
6
18
  },
7
19
  };
@@ -14,6 +14,32 @@ query GetChannelsByUser($role: String, $criteria: AnyObject, $limit: Int, $skip:
14
14
  creator {
15
15
  ...MessengerUser
16
16
  }
17
+ lastPostAt
18
+ createdAt
19
+ updatedAt
20
+ }
21
+ }
22
+
23
+
24
+ query GetChannelsByUserWithLastMessage($role: String, $criteria: AnyObject, $limit: Int, $skip: Int, $sort: Sort) {
25
+ channelsByUser(role: $role, criteria: $criteria, limit: $limit, skip: $skip, sort: $sort) {
26
+ id
27
+ title
28
+ description
29
+ type
30
+ displayName
31
+ members {
32
+ id
33
+ user {
34
+ ...MessengerUser
35
+ }
36
+ }
37
+ creator {
38
+ ...MessengerUser
39
+ }
40
+ lastMessage {
41
+ ...Post
42
+ }
17
43
  createdAt
18
44
  updatedAt
19
45
  }
@@ -1,5 +1,5 @@
1
- query Messages($channelId: ID,$parentId: String, $limit: Int! = 10, $skip: Int! = 0, $sort: Sort) {
2
- messages(channelId: $channelId,parentId:$parentId, limit: $limit, skip: $skip, sort: $sort) @connection(key: "messages", filter: ["channelId","skip"]){
1
+ query Messages($channelId: ID,$parentId: String, $limit: Int! = 10, $skip: Int! = 0, $sort: Sort, $props: AnyObject) {
2
+ messages(channelId: $channelId,parentId:$parentId, limit: $limit, skip: $skip, sort: $sort, props: $props) @connection(key: "messages", filter: ["channelId","skip"]){
3
3
  messagesRefId @client
4
4
  data{
5
5
  ...Post