@messenger-box/platform-client 10.0.3-alpha.7 → 10.0.3-alpha.72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/CHANGELOG.md +88 -0
  2. package/lib/graphql/fragments/post-message.gql +36 -0
  3. package/lib/graphql/policies/channel-policies.d.ts.map +1 -1
  4. package/lib/graphql/policies/channel-policies.js +150 -2
  5. package/lib/graphql/policies/channel-policies.js.map +1 -1
  6. package/lib/graphql/policies/messages-policies.d.ts.map +1 -1
  7. package/lib/graphql/policies/messages-policies.js +229 -37
  8. package/lib/graphql/policies/messages-policies.js.map +1 -1
  9. package/lib/graphql/policies/post-thread-policies.d.ts.map +1 -1
  10. package/lib/graphql/policies/post-thread-policies.js +136 -31
  11. package/lib/graphql/policies/post-thread-policies.js.map +1 -1
  12. package/lib/graphql/policies/teams-policies.d.ts.map +1 -1
  13. package/lib/graphql/policies/teams-policies.js +13 -1
  14. package/lib/graphql/policies/teams-policies.js.map +1 -1
  15. package/lib/graphql/queries/channels-by-user.gql +26 -0
  16. package/lib/graphql/queries/organization-query.gql +78 -35
  17. package/lib/graphql/queries/post-thread-message.gql +4 -0
  18. package/lib/graphql/queries/teams-query.gql +79 -29
  19. package/lib/graphql/queries/user-account.gql +1 -1
  20. package/lib/graphql/queries/users.gql +1 -1
  21. package/lib/hooks/use-upload-file.hook.d.ts.map +1 -1
  22. package/lib/hooks/use-upload-file.hook.js +1 -1
  23. package/lib/hooks/use-upload-file.hook.js.map +1 -1
  24. package/lib/hooks/use-upload-file.hook.native.d.ts.map +1 -1
  25. package/lib/hooks/use-upload-file.hook.native.js +1 -1
  26. package/lib/hooks/use-upload-file.hook.native.js.map +1 -1
  27. package/lib/hooks/use-upload-files.hook.d.ts.map +1 -1
  28. package/lib/hooks/use-upload-files.hook.js +1 -1
  29. package/lib/hooks/use-upload-files.hook.js.map +1 -1
  30. package/lib/hooks/use-upload-files.hook.native.d.ts.map +1 -1
  31. package/lib/hooks/use-upload-files.hook.native.js +1 -1
  32. package/lib/hooks/use-upload-files.hook.native.js.map +1 -1
  33. package/package.json +4 -4
  34. package/src/graphql/fragments/post-message.gql +36 -0
  35. package/src/graphql/policies/channel-policies.ts +148 -2
  36. package/src/graphql/policies/messages-policies.ts +251 -39
  37. package/src/graphql/policies/post-thread-policies.ts +151 -31
  38. package/src/graphql/policies/teams-policies.ts +13 -1
  39. package/src/graphql/queries/channels-by-user.gql +26 -0
  40. package/src/graphql/queries/organization-query.gql +78 -35
  41. package/src/graphql/queries/post-thread-message.gql +4 -0
  42. package/src/graphql/queries/teams-query.gql +79 -29
  43. package/src/graphql/queries/user-account.gql +1 -1
  44. package/src/graphql/queries/users.gql +1 -1
  45. package/src/hooks/use-upload-file.hook.native.ts +1 -4
  46. package/src/hooks/use-upload-file.hook.ts +1 -4
  47. package/src/hooks/use-upload-files.hook.native.ts +1 -4
  48. package/src/hooks/use-upload-files.hook.ts +1 -4
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@messenger-box/platform-client",
3
- "version": "10.0.3-alpha.7",
3
+ "version": "10.0.3-alpha.72",
4
4
  "description": "Sample core for higher packages to depend on",
5
5
  "license": "ISC",
6
6
  "author": "CDMBase LLC",
@@ -20,8 +20,8 @@
20
20
  "watch": "yarn build:lib:watch"
21
21
  },
22
22
  "dependencies": {
23
- "@container-stack/file-info-client": "^5.4.1-alpha.6",
24
- "@messenger-box/core": "10.0.3-alpha.7",
23
+ "@container-stack/file-info-client": "^5.4.1-alpha.9",
24
+ "@messenger-box/core": "10.0.3-alpha.72",
25
25
  "key-mirror": "1.0.1",
26
26
  "moment-timezone": "0.5.33"
27
27
  },
@@ -35,5 +35,5 @@
35
35
  "typescript": {
36
36
  "definition": "lib/index.d.ts"
37
37
  },
38
- "gitHead": "6c95ed48d37a8f95b0c3db71c1922c0e9e76bcfe"
38
+ "gitHead": "b4e54a34578a1a63afbe7ab48644b86e5cc8858f"
39
39
  }
@@ -19,6 +19,18 @@ fragment Post on Post {
19
19
  # propsConfiguration {
20
20
  # ...Configuration
21
21
  # }
22
+ propsConfiguration {
23
+ contents
24
+ id
25
+ keys
26
+ resource
27
+ target
28
+ overrides {
29
+ contents
30
+ identifiers
31
+ keys
32
+ }
33
+ }
22
34
  props
23
35
  files {
24
36
  totalCount
@@ -58,6 +70,18 @@ fragment PostReplies on Messages {
58
70
  # propsConfiguration {
59
71
  # ...Configuration
60
72
  # }
73
+ propsConfiguration {
74
+ contents
75
+ id
76
+ keys
77
+ resource
78
+ target
79
+ overrides {
80
+ contents
81
+ identifiers
82
+ keys
83
+ }
84
+ }
61
85
  props
62
86
  files {
63
87
  totalCount
@@ -98,6 +122,18 @@ fragment PostWithoutReplies on Post {
98
122
  # propsConfiguration {
99
123
  # ...Configuration
100
124
  # }
125
+ propsConfiguration {
126
+ contents
127
+ id
128
+ keys
129
+ resource
130
+ target
131
+ overrides {
132
+ contents
133
+ identifiers
134
+ keys
135
+ }
136
+ }
101
137
  props
102
138
  files {
103
139
  totalCount
@@ -1,7 +1,153 @@
1
- import { TypePolicies } from '@apollo/client';
1
+ import { TypePolicies, gql } from '@apollo/client';
2
2
 
3
3
  export const channelPolicies: TypePolicies = {
4
4
  Query: {
5
- fields: {},
5
+ fields: {
6
+ channelsByUser: {
7
+ keyArgs: ['role', 'criteria', 'limit', 'skip', 'sort'],
8
+ merge(existing = [], incoming = [], { readField }) {
9
+ // If no existing data, just return incoming
10
+ if (!existing || existing.length === 0) {
11
+ return incoming;
12
+ }
13
+
14
+ // If no incoming data, keep existing
15
+ if (!incoming || incoming.length === 0) {
16
+ return existing;
17
+ }
18
+
19
+ // Create a map for efficient lookup
20
+ const channelMap = new Map();
21
+
22
+ // Add existing channels to map
23
+ for (let i = 0; i < existing.length; i++) {
24
+ const channel = existing[i];
25
+ if (channel) {
26
+ const id = readField('id', channel);
27
+ if (id) {
28
+ channelMap.set(id, channel);
29
+ }
30
+ }
31
+ }
32
+
33
+ // Merge in incoming channels, overwriting existing ones
34
+ for (let i = 0; i < incoming.length; i++) {
35
+ const channel = incoming[i];
36
+ if (channel) {
37
+ const id = readField('id', channel);
38
+ if (id) {
39
+ channelMap.set(id, channel);
40
+ }
41
+ }
42
+ }
43
+
44
+ // Convert map values back to array
45
+ return Array.from(channelMap.values());
46
+ },
47
+ // Add a read function for more control over cache reads
48
+ read(existing, { args }) {
49
+ // Return undefined to force a network request if data doesn't exist
50
+ if (!existing) return undefined;
51
+ return existing;
52
+ },
53
+ },
54
+ },
55
+ },
56
+ Channel: {
57
+ keyFields: ['id'],
58
+ fields: {
59
+ members: {
60
+ merge(existing = [], incoming) {
61
+ return incoming;
62
+ },
63
+ },
64
+ // Add field policies for other Channel fields if needed
65
+ title: {
66
+ read(title) {
67
+ return title || '';
68
+ },
69
+ },
70
+ displayName: {
71
+ read(displayName) {
72
+ return displayName || '';
73
+ },
74
+ },
75
+ // Add computed fields if needed
76
+ memberCount: {
77
+ read(_, { readField }) {
78
+ const members = readField('members') as any[];
79
+ return members && Array.isArray(members) ? members.length : 0;
80
+ },
81
+ },
82
+ lastMessage: {
83
+ merge(existing, incoming, { readField }) {
84
+ // If no incoming message, keep existing
85
+ if (!incoming) return existing;
86
+
87
+ // If no existing message, use incoming
88
+ if (!existing) return incoming;
89
+
90
+ // Compare timestamps to determine which is newer
91
+ const existingCreatedAt = readField('createdAt', existing) as string | number | Date;
92
+ const incomingCreatedAt = readField('createdAt', incoming) as string | number | Date;
93
+
94
+ if (existingCreatedAt && incomingCreatedAt) {
95
+ // Use the more recent message
96
+ return new Date(incomingCreatedAt) > new Date(existingCreatedAt) ? incoming : existing;
97
+ }
98
+
99
+ // If timestamps are not available, prefer incoming
100
+ return incoming;
101
+ },
102
+ read(lastMessage, { readField, cache, args }) {
103
+ // If lastMessage is already available, return it
104
+ if (lastMessage) return lastMessage;
105
+
106
+ // Try to get the channel ID to reference messages cache
107
+ const channelId = readField('id');
108
+ if (!channelId) return null;
109
+
110
+ try {
111
+ // Read messages from cache for this channel
112
+ const messagesQuery = cache.readQuery({
113
+ query: gql`
114
+ query GetChannelMessages($channelId: String!) {
115
+ messages(channelId: $channelId, skip: 0, limit: 1) {
116
+ data {
117
+ id
118
+ content
119
+ createdAt
120
+ updatedAt
121
+ user {
122
+ id
123
+ username
124
+ displayName
125
+ }
126
+ }
127
+ }
128
+ }
129
+ `,
130
+ variables: { channelId },
131
+ }) as any;
132
+
133
+ // Return the first (latest) message if available
134
+ return messagesQuery?.messages?.data?.[0] || null;
135
+ } catch (error) {
136
+ // If messages query fails, return null
137
+ return null;
138
+ }
139
+ },
140
+ },
141
+ },
142
+ },
143
+ // Add policies for ChannelMember type
144
+ ChannelMember: {
145
+ keyFields: ['id'],
146
+ fields: {
147
+ user: {
148
+ // Ensure user references are properly merged
149
+ merge: true,
150
+ },
151
+ },
6
152
  },
7
153
  };
@@ -19,27 +19,53 @@ export const messagesPolicies: TypePolicies = {
19
19
  },
20
20
  },
21
21
  data: {
22
- keyArgs: false,
23
- merge: (existing = [], incoming = [], { readField, mergeObjects }) => {
24
- // console.log('existing', existing);
25
- // console.log('incoming', incoming);
26
- const merged = [...incoming];
27
- const existingIds = existing.map((item) => readField<String>('id', item));
28
-
29
- merged.forEach((item, index) => {
30
- const itemId = readField<String>('id', item);
31
- const existingIndex = existingIds.findIndex((id) => id === itemId);
32
- if (existingIndex !== -1) {
33
- merged[index] = mergeObjects(existing[existingIndex], merged[index]);
22
+ keyArgs: ['channelId', 'parentId'],
23
+ merge: (existing = [], incoming = [], { readField }) => {
24
+ // Use a Map for O(1) lookups instead of array iterations
25
+ const existingMap = new Map();
26
+
27
+ // Populate map with existing messages
28
+ if (existing && existing.length > 0) {
29
+ for (let i = 0; i < existing.length; i++) {
30
+ const id = readField('id', existing[i]);
31
+ if (id) {
32
+ existingMap.set(id, existing[i]);
33
+ }
34
+ }
35
+ }
36
+
37
+ // Create result array with same capacity as total items
38
+ const result = [];
39
+
40
+ // Add incoming items, overwriting existing ones
41
+ if (incoming && incoming.length > 0) {
42
+ for (let i = 0; i < incoming.length; i++) {
43
+ const item = incoming[i];
44
+ const id = readField('id', item);
45
+ if (id) {
46
+ existingMap.set(id, item);
47
+ }
48
+ result.push(item);
34
49
  }
35
- });
36
- return merged;
50
+ }
51
+
52
+ // Add remaining existing items not in incoming
53
+ if (existing && existing.length > 0) {
54
+ for (let i = 0; i < existing.length; i++) {
55
+ const id = readField('id', existing[i]);
56
+ if (id && !result.some((item) => readField('id', item) === id)) {
57
+ result.push(existing[i]);
58
+ }
59
+ }
60
+ }
61
+
62
+ return result;
37
63
  },
38
64
  },
39
65
  totalCount: {
40
- keyArgs: false,
66
+ keyArgs: ['channelId', 'parentId'],
41
67
  merge(existing, incoming) {
42
- return existing && existing > incoming ? existing : incoming;
68
+ return incoming !== undefined ? incoming : existing;
43
69
  },
44
70
  },
45
71
  // data: {
@@ -57,7 +83,7 @@ export const messagesPolicies: TypePolicies = {
57
83
  },
58
84
  },
59
85
  PostThreadMessages: {
60
- // keyFields: ['threadmessagesRefId'],
86
+ // keyFields: ['threadmessagesRefId'],
61
87
  fields: {
62
88
  threadmessagesRefId: {
63
89
  read(existing, { variables }) {
@@ -76,33 +102,219 @@ export const messagesPolicies: TypePolicies = {
76
102
  Query: {
77
103
  fields: {
78
104
  messages: {
79
- // keyArgs: ['channelId', 'parentId', 'limit', 'skip'],
80
- // keyArgs: ['channelId', 'parentId'],
105
+ keyArgs: ['channelId', 'parentId'],
106
+ merge(existing, incoming, { args, readField }) {
107
+ if (!incoming) return existing;
108
+ if (!existing) return incoming;
109
+
110
+ // Fast return if incoming has no data
111
+ if (!incoming.data || incoming.data.length === 0) {
112
+ return {
113
+ ...existing,
114
+ totalCount: incoming.totalCount ?? existing.totalCount,
115
+ };
116
+ }
117
+
118
+ // Fast return if existing has no data
119
+ if (!existing.data || existing.data.length === 0) {
120
+ return incoming;
121
+ }
122
+
123
+ // Determine if this is likely a new message or pagination
124
+ const isNewMessage = args && args.skip === 0;
125
+
126
+ // Create a map for existing messages for fast lookups
127
+ const idSet = new Set();
128
+ const mergedData = [...existing.data];
129
+
130
+ // Mark all existing IDs
131
+ for (let i = 0; i < mergedData.length; i++) {
132
+ const id = readField('id', mergedData[i]);
133
+ if (id) idSet.add(id);
134
+ }
135
+
136
+ // Process incoming messages
137
+ for (let i = 0; i < incoming.data.length; i++) {
138
+ const incomingMsg = incoming.data[i];
139
+ const id = readField('id', incomingMsg);
140
+
141
+ if (!id) continue;
142
+
143
+ if (idSet.has(id)) {
144
+ // Replace existing message with same ID
145
+ const existingIndex = mergedData.findIndex((msg) => readField('id', msg) === id);
146
+ if (existingIndex >= 0) {
147
+ mergedData[existingIndex] = incomingMsg;
148
+ }
149
+ } else {
150
+ // Add new message
151
+ if (isNewMessage) {
152
+ // Add to beginning for new messages
153
+ mergedData.unshift(incomingMsg);
154
+ } else {
155
+ // Add to end for pagination
156
+ mergedData.push(incomingMsg);
157
+ }
158
+ idSet.add(id);
159
+ }
160
+ }
161
+
162
+ return {
163
+ ...existing,
164
+ totalCount: incoming.totalCount ?? existing.totalCount,
165
+ data: mergedData,
166
+ };
167
+ },
168
+ },
169
+ },
170
+ },
171
+ FilesInfo: {
172
+ merge: true, // Use default merging behavior for FilesInfo
173
+ fields: {
174
+ data: {
175
+ merge(existing = [], incoming = [], { readField }) {
176
+ // If no existing data, just return incoming
177
+ if (!existing || existing.length === 0) {
178
+ return incoming;
179
+ }
180
+
181
+ // If no incoming data, keep existing
182
+ if (!incoming || incoming.length === 0) {
183
+ return existing;
184
+ }
185
+
186
+ // Create a map for efficient lookup
187
+ const fileMap = new Map();
188
+
189
+ // Add existing files to map
190
+ for (let i = 0; i < existing.length; i++) {
191
+ const file = existing[i];
192
+ if (file) {
193
+ const id = readField('id', file);
194
+ if (id) {
195
+ fileMap.set(id, file);
196
+ }
197
+ }
198
+ }
199
+
200
+ // Merge in incoming files, overwriting existing ones
201
+ for (let i = 0; i < incoming.length; i++) {
202
+ const file = incoming[i];
203
+ if (file) {
204
+ const id = readField('id', file);
205
+ if (id) {
206
+ fileMap.set(id, file);
207
+ }
208
+ }
209
+ }
210
+
211
+ // Convert map values back to array
212
+ return Array.from(fileMap.values());
213
+ },
214
+ },
215
+ },
216
+ },
217
+ FileInfo: {
218
+ // Use ID as key field
219
+ keyFields: ['id'],
220
+ fields: {
221
+ url: {
222
+ // Ensure URL is always preserved and not normalized
81
223
  merge(existing, incoming) {
224
+ return incoming ?? existing;
225
+ },
226
+ },
227
+ },
228
+ },
229
+ Post: {
230
+ fields: {
231
+ replies: {
232
+ merge(existing, incoming) {
233
+ if (!incoming) return existing;
234
+ if (!existing) return incoming;
235
+
236
+ // Use a Set for fast duplicate checking
237
+ const uniqueIds = new Set();
238
+ const mergedData = [];
239
+
240
+ // Add all existing items to the result and track IDs
241
+ if (existing.data && existing.data.length > 0) {
242
+ for (let i = 0; i < existing.data.length; i++) {
243
+ const item = existing.data[i];
244
+ if (item && item.id && !uniqueIds.has(item.id)) {
245
+ uniqueIds.add(item.id);
246
+ mergedData.push(item);
247
+ }
248
+ }
249
+ }
250
+
251
+ // Add incoming items that don't exist yet
252
+ if (incoming.data && incoming.data.length > 0) {
253
+ for (let i = 0; i < incoming.data.length; i++) {
254
+ const item = incoming.data[i];
255
+ if (item && item.id && !uniqueIds.has(item.id)) {
256
+ uniqueIds.add(item.id);
257
+ mergedData.push(item);
258
+ }
259
+ }
260
+ }
261
+
82
262
  return {
83
263
  ...incoming,
84
- data: [...(existing?.data ?? []), ...(incoming.data ?? [])],
264
+ data: mergedData,
265
+ };
266
+ },
267
+ },
268
+ files: {
269
+ merge(existing, incoming, { readField }) {
270
+ if (!incoming) return existing;
271
+ if (!existing) return incoming;
272
+
273
+ // If either has no data or totalCount is 0, prefer the one with data
274
+ if (!existing.data || existing.data.length === 0) {
275
+ return incoming;
276
+ }
277
+
278
+ if (!incoming.data || incoming.data.length === 0) {
279
+ return existing;
280
+ }
281
+
282
+ // Create a map for efficient lookup
283
+ const fileMap = new Map();
284
+
285
+ // Add existing files to map
286
+ if (existing.data) {
287
+ for (let i = 0; i < existing.data.length; i++) {
288
+ const file = existing.data[i];
289
+ if (file) {
290
+ const id = readField('id', file);
291
+ if (id) {
292
+ fileMap.set(id, file);
293
+ }
294
+ }
295
+ }
296
+ }
297
+
298
+ // Merge in incoming files, overwriting existing ones
299
+ if (incoming.data) {
300
+ for (let i = 0; i < incoming.data.length; i++) {
301
+ const file = incoming.data[i];
302
+ if (file) {
303
+ const id = readField('id', file);
304
+ if (id) {
305
+ fileMap.set(id, file);
306
+ }
307
+ }
308
+ }
309
+ }
310
+
311
+ // Create merged result
312
+ return {
313
+ __typename: 'FilesInfo',
314
+ totalCount: Math.max(existing.totalCount || 0, incoming.totalCount || 0, fileMap.size),
315
+ data: Array.from(fileMap.values()),
85
316
  };
86
317
  },
87
- // merge(existing, incoming, { args, mergeObjects }) {
88
- // console.log('existing length', existing?.data?.length);
89
- // console.log('incoming length', incoming?.data?.length);
90
- // // console.log('existing', JSON.stringify(existing), 'incoming', JSON.stringify(incoming));
91
- // if (!incoming || incoming?.data?.length) return existing;
92
- // if (!existing || existing?.data?.length) return incoming;
93
- // //console.log('existing', JSON.stringify(existing), 'incoming', JSON.stringify(incoming));
94
- // const mergedData = existing ? existing?.data?.slice(0) : [];
95
- // // Insert the incoming elements in the right places, according to args.
96
- // const end = args?.skip + Math.min(args?.limit, incoming?.data?.length);
97
- // for (let i = args?.skip; i < end; ++i) {
98
- // mergedData[i] = incoming?.data[i - args?.skip];
99
- // }
100
-
101
- // const merged = { ...incoming, data: mergedData };
102
- // return merged;
103
- // // return mergeObjects(existing, incoming);
104
- // //return existing ? { ...existing, ...incoming } : incoming;
105
- // },
106
318
  },
107
319
  },
108
320
  },