abmp-npm 2.0.7 → 2.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/backend/consts.js CHANGED
@@ -1,4 +1,5 @@
1
1
  const PAC_API_URL = 'https://members.abmp.com/eweb/api/Wix';
2
+ const BACKUP_API_URL = 'https://psdevteamenterpris.wixstudio.com/abmp-backup/_functions';
2
3
  const SSO_TOKEN_AUTH_API_URL = 'https://members.professionalassistcorp.com/';
3
4
 
4
5
  /**
@@ -40,4 +41,5 @@ module.exports = {
40
41
  COMPILED_FILTERS_FIELDS,
41
42
  MEMBERSHIPS_TYPES,
42
43
  SSO_TOKEN_AUTH_API_URL,
44
+ BACKUP_API_URL,
43
45
  };
@@ -1,7 +1,123 @@
1
- const { bulkSaveMembers } = require('../members-data-methods');
1
+ const { bulkSaveMembers, getMemberBySlug } = require('../members-data-methods');
2
2
 
3
3
  const { generateUpdatedMemberData } = require('./process-member-methods');
4
- const { changeWixMembersEmails } = require('./utils');
4
+ const {
5
+ changeWixMembersEmails,
6
+ extractUrlCounter,
7
+ incrementUrlCounter,
8
+ extractBaseUrl,
9
+ } = require('./utils');
10
+
11
+ /**
12
+ * Ensures unique URLs within a batch of members by deduplicating URLs
13
+ * Groups members by their base URL (normalized) and assigns unique counters
14
+ * Also checks database to handle cross-page conflicts
15
+ * @param {Array} memberDataList - Array of processed member data
16
+ * @returns {Promise<Array>} - Array of members with unique URLs assigned
17
+ */
18
+ async function ensureUniqueUrlsInBatch(memberDataList) {
19
+ if (!Array.isArray(memberDataList) || memberDataList.length === 0) {
20
+ return memberDataList;
21
+ }
22
+
23
+ // Group members by their normalized base URL
24
+ const urlGroups = new Map();
25
+
26
+ memberDataList.forEach(member => {
27
+ if (!member || !member.url) {
28
+ return;
29
+ }
30
+
31
+ // Extract the base URL (without any counter) for grouping
32
+ const baseUrl = extractBaseUrl(member.url);
33
+ if (!urlGroups.has(baseUrl)) {
34
+ urlGroups.set(baseUrl, []);
35
+ }
36
+ urlGroups.get(baseUrl).push(member);
37
+ });
38
+
39
+ // For each group, check database and assign unique URLs sequentially
40
+ for (const [baseUrl, members] of urlGroups.entries()) {
41
+ if (members.length <= 1) {
42
+ // Single member - still check DB to ensure it doesn't conflict with other pages
43
+ const member = members[0];
44
+ if (member) {
45
+ const dbMember = await getMemberBySlug({
46
+ slug: baseUrl,
47
+ excludeDropped: false,
48
+ normalizeSlugForComparison: true,
49
+ });
50
+
51
+ if (dbMember && dbMember.url) {
52
+ // Conflict found in DB, need to add counter
53
+ member.url = incrementUrlCounter(dbMember.url, baseUrl);
54
+ console.log(
55
+ `Found DB conflict for single member with base URL "${baseUrl}", assigned: ${member.url}`
56
+ );
57
+ }
58
+ }
59
+ continue;
60
+ }
61
+
62
+ // Sort members to ensure consistent ordering
63
+ members.sort((a, b) => {
64
+ if (a.url && b.url) {
65
+ return String(a.url).localeCompare(String(b.url));
66
+ }
67
+ return 0;
68
+ });
69
+
70
+ // Check database for existing members with this base URL to find highest counter
71
+ const dbMember = await getMemberBySlug({
72
+ slug: baseUrl,
73
+ excludeDropped: false,
74
+ normalizeSlugForComparison: true,
75
+ });
76
+
77
+ const dbMaxCounter = extractUrlCounter(dbMember?.url);
78
+
79
+ // Find the highest existing counter among all members in this batch group
80
+ let batchMaxCounter = -1;
81
+ members.forEach(member => {
82
+ const originalUrl = member.url;
83
+ const urlParts = originalUrl.split('-');
84
+ const lastSegment = urlParts[urlParts.length - 1];
85
+ const isNumeric = /^\d+$/.test(lastSegment);
86
+ if (isNumeric) {
87
+ const counter = parseInt(lastSegment, 10);
88
+ if (counter > batchMaxCounter) {
89
+ batchMaxCounter = counter;
90
+ }
91
+ }
92
+ });
93
+
94
+ // Start index from the maximum of DB counter and batch counter + 1
95
+ const maxCounter = Math.max(dbMaxCounter, batchMaxCounter);
96
+ const startIndex = maxCounter + 1;
97
+
98
+ // Assign unique URLs: start from the appropriate index
99
+ members.forEach((member, index) => {
100
+ const assignedIndex = startIndex + index;
101
+ if (assignedIndex === 0) {
102
+ // Index 0 means no counter, use baseUrl
103
+ member.url = baseUrl;
104
+ } else {
105
+ // Index > 0 means add counter
106
+ member.url = `${baseUrl}-${assignedIndex}`;
107
+ }
108
+ });
109
+
110
+ console.log(
111
+ `Deduplicated ${
112
+ members.length
113
+ } members with base URL "${baseUrl}" (DB max: ${dbMaxCounter}, batch max: ${batchMaxCounter}, start: ${startIndex}): ${members
114
+ .map(m => m.url)
115
+ .join(', ')}`
116
+ );
117
+ }
118
+
119
+ return memberDataList;
120
+ }
5
121
 
6
122
  /**
7
123
  * Processes and saves multiple member records in bulk
@@ -36,7 +152,6 @@ const bulkProcessAndSaveMemberData = async ({
36
152
  const validMemberData = processedMemberDataList.filter(
37
153
  data => data !== null && data !== undefined
38
154
  );
39
-
40
155
  if (validMemberData.length === 0) {
41
156
  return {
42
157
  totalProcessed: memberDataList.length,
@@ -45,9 +160,14 @@ const bulkProcessAndSaveMemberData = async ({
45
160
  processingTime: Date.now() - startTime,
46
161
  };
47
162
  }
163
+ const newMembers = validMemberData.filter(data => data.isNewToDb);
164
+ const existingMembers = validMemberData.filter(data => !data.isNewToDb);
165
+ // Ensure unique URLs within the batch to prevent duplicates (also checks DB for cross-page conflicts)
166
+ const uniqueUrlsNewToDBMembersList = await ensureUniqueUrlsInBatch(newMembers);
167
+ const uniqueUrlsMembersData = [...uniqueUrlsNewToDBMembersList, ...existingMembers];
48
168
  const toChangeWixMembersEmails = [];
49
- const toSaveMembersData = validMemberData.map(member => {
50
- const { isLoginEmailChanged, ...restMemberData } = member;
169
+ const toSaveMembersData = uniqueUrlsMembersData.map(member => {
170
+ const { isLoginEmailChanged, isNewToDb: _isNewToDb, ...restMemberData } = member;
51
171
  if (member.contactId && isLoginEmailChanged) {
52
172
  toChangeWixMembersEmails.push(member);
53
173
  }
@@ -73,4 +193,4 @@ const bulkProcessAndSaveMemberData = async ({
73
193
  }
74
194
  };
75
195
 
76
- module.exports = { bulkProcessAndSaveMemberData };
196
+ module.exports = { bulkProcessAndSaveMemberData, ensureUniqueUrlsInBatch };
@@ -1,4 +1,5 @@
1
1
  module.exports = {
2
2
  ...require('./sync-to-cms-methods'),
3
3
  ...require('./consts'), //TODO: remove it once we finish NPM movement
4
+ ...require('./bulk-process-methods'),
4
5
  };
@@ -39,7 +39,7 @@ const ensureUniqueUrl = async ({ url, memberId, fullName }) => {
39
39
 
40
40
  const existingMember = await getMemberBySlug({
41
41
  slug: uniqueUrl,
42
- excludeDropped: true,
42
+ excludeDropped: false,
43
43
  excludeSearchedMember: true,
44
44
  memberId,
45
45
  normalizeSlugForComparison: true,
@@ -105,7 +105,7 @@ async function generateUpdatedMemberData({
105
105
  );
106
106
  }
107
107
 
108
- return updatedMemberData;
108
+ return { ...updatedMemberData, isNewToDb: !existingDbMember };
109
109
  }
110
110
 
111
111
  /**
@@ -15,6 +15,36 @@ const changeWixMembersEmails = async toChangeWixMembersEmails => {
15
15
  );
16
16
  };
17
17
 
18
+ const extractUrlCounter = url => {
19
+ if (!url) return -1;
20
+ const lastSegment = url.split('-').pop() || '0';
21
+ const isNumeric = /^\d+$/.test(lastSegment);
22
+ return isNumeric ? parseInt(lastSegment, 10) : -1;
23
+ };
24
+
25
+ const extractBaseUrl = url => {
26
+ if (!url) return url;
27
+ const urlParts = url.split('-');
28
+ const lastSegment = urlParts[urlParts.length - 1];
29
+ const isNumeric = /^\d+$/.test(lastSegment);
30
+ if (isNumeric && urlParts.length > 1) {
31
+ // Remove the numeric counter to get the base URL
32
+ return urlParts.slice(0, -1).join('-');
33
+ }
34
+ // No counter found, return the URL as-is
35
+ return url;
36
+ };
37
+ const incrementUrlCounter = (existingUrl, baseUrl) => {
38
+ if (existingUrl && existingUrl === baseUrl) {
39
+ console.log(
40
+ `Found member with same url ${existingUrl} for baseUrl ${baseUrl}, increasing counter by 1`
41
+ );
42
+ const lastSegment = existingUrl.split('-').pop() || '0';
43
+ const isNumeric = /^\d+$/.test(lastSegment);
44
+ const lastCounter = isNumeric ? parseInt(lastSegment, 10) : 0;
45
+ return `${baseUrl}-${lastCounter + 1}`;
46
+ }
47
+ };
18
48
  /**
19
49
  * Validates core member data requirements
20
50
  * @param {Object} inputMemberData - Raw member data from API to validate
@@ -75,4 +105,7 @@ module.exports = {
75
105
  validateCoreMemberData,
76
106
  containsNonEnglish,
77
107
  createFullName,
108
+ extractUrlCounter,
109
+ incrementUrlCounter,
110
+ extractBaseUrl,
78
111
  };
@@ -0,0 +1,18 @@
1
+ const { ensureUniqueUrlsInBatch } = require('./daily-pull/bulk-process-methods');
2
+ const { wixData } = require('./elevated-modules');
3
+ const { bulkSaveMembers } = require('./members-data-methods');
4
+ const { queryAllItems } = require('./utils');
5
+
6
+ async function deduplicateURls(collectionName, duplicateUrlsList) {
7
+ const query = await wixData.query(collectionName).hasSome('url', duplicateUrlsList).limit(1000);
8
+ const membersWithSameUrl = await queryAllItems(query);
9
+
10
+ console.log({ membersWithSameUrl });
11
+ const membersWithUniqueUrls = await ensureUniqueUrlsInBatch(membersWithSameUrl);
12
+ console.log({ membersWithUniqueUrls });
13
+ const deduplicatedUrls = membersWithUniqueUrls.map(m => m.url);
14
+ console.log({ deduplicatedUrls });
15
+ return await bulkSaveMembers(membersWithUniqueUrls, collectionName);
16
+ }
17
+
18
+ module.exports = { deduplicateURls };
@@ -12,7 +12,7 @@ const wixData = {
12
12
  get: auth.elevate(items.get),
13
13
  truncate: auth.elevate(items.truncate),
14
14
  bulkSave: auth.elevate(items.bulkSave),
15
+ search: auth.elevate(items.search),
15
16
  //TODO: add other methods here as needed
16
17
  };
17
-
18
18
  module.exports = { wixData };
package/backend/index.js CHANGED
@@ -13,4 +13,5 @@ module.exports = {
13
13
  ...require('./login'),
14
14
  ...require('./data-hooks'),
15
15
  ...require('./http-functions'),
16
+ ...require('./dev-only-methods'),
16
17
  };
@@ -10,6 +10,7 @@ const {
10
10
  normalizeUrlForComparison,
11
11
  queryAllItems,
12
12
  generateGeoHash,
13
+ searchAllItems,
13
14
  } = require('./utils');
14
15
 
15
16
  /**
@@ -60,9 +61,10 @@ async function createContactAndMemberIfNew(memberData) {
60
61
 
61
62
  /** Performs bulk save operation for member data
62
63
  * @param { Array } memberDataList - Array of member data objects to save
64
+ * @param { string } [collectionName] - The collection name to save the members to (default: COLLECTIONS.MEMBERS_DATA)
63
65
  * @returns { Promise < Object >} - Bulk save operation result
64
66
  */
65
- async function bulkSaveMembers(memberDataList) {
67
+ async function bulkSaveMembers(memberDataList, collectionName = COLLECTIONS.MEMBERS_DATA) {
66
68
  if (!Array.isArray(memberDataList) || memberDataList.length === 0) {
67
69
  throw new Error('Invalid member data list provided for bulk save');
68
70
  }
@@ -70,9 +72,7 @@ async function bulkSaveMembers(memberDataList) {
70
72
  try {
71
73
  // bulkSave all with batches of 1000 items as this is the Velo limit for bulkSave
72
74
  const batches = chunkArray(memberDataList, 1000);
73
- return await Promise.all(
74
- batches.map(batch => wixData.bulkSave(COLLECTIONS.MEMBERS_DATA, batch))
75
- );
75
+ return await Promise.all(batches.map(batch => wixData.bulkSave(collectionName, batch)));
76
76
  } catch (error) {
77
77
  console.error('Error bulk saving members:', error);
78
78
  throw new Error(`Bulk save failed: ${error.message}`);
@@ -121,7 +121,7 @@ async function getMemberBySlug({
121
121
  if (!slug) return null;
122
122
 
123
123
  try {
124
- let query = wixData.query(COLLECTIONS.MEMBERS_DATA).contains('url', slug);
124
+ let query = wixData.search(COLLECTIONS.MEMBERS_DATA).expression(slug);
125
125
 
126
126
  if (excludeDropped) {
127
127
  query = query.ne('action', 'drop');
@@ -131,7 +131,8 @@ async function getMemberBySlug({
131
131
  query = query.ne('memberId', memberId);
132
132
  }
133
133
  query = query.limit(1000);
134
- const membersList = await queryAllItems(query);
134
+ const searchResult = await searchAllItems(query);
135
+ const membersList = searchResult.filter(item => item.url && item.url.includes(slug)); //replacement for contains
135
136
  let matchingMembers = membersList.filter(
136
137
  item => item.url && item.url.toLowerCase() === slug.toLowerCase()
137
138
  );
@@ -268,23 +269,13 @@ async function urlExists(url, excludeMemberId) {
268
269
  if (!url) return false;
269
270
 
270
271
  try {
271
- let query = wixData
272
- .query(COLLECTIONS.MEMBERS_DATA)
273
- .contains('url', url)
274
- .ne('action', MEMBER_ACTIONS.DROP);
275
-
276
- if (excludeMemberId) {
277
- query = query.ne('memberId', excludeMemberId);
278
- }
279
-
280
- const { items } = await query.find();
281
-
282
- // Case-insensitive comparison
283
- const matchingMembers = items.filter(
284
- item => item.url && item.url.toLowerCase() === url.toLowerCase()
285
- );
286
-
287
- return matchingMembers.length > 0;
272
+ const member = await getMemberBySlug({
273
+ slug: url,
274
+ excludeDropped: false,
275
+ excludeSearchedMember: true,
276
+ memberId: excludeMemberId,
277
+ });
278
+ return member !== null;
288
279
  } catch (error) {
289
280
  console.error('Error checking URL existence:', error);
290
281
  return false;
package/backend/utils.js CHANGED
@@ -107,18 +107,28 @@ function getAddressesByStatus(addresses = [], addressDisplayOption = []) {
107
107
  })
108
108
  .filter(Boolean);
109
109
  }
110
- const queryAllItems = async query => {
111
- console.log('start query');
112
- let oldResults = await query.find();
110
+ const getAllItems = async querySearchResult => {
111
+ let oldResults = querySearchResult;
113
112
  console.log(`found items: ${oldResults.items.length}`);
114
113
  const allItems = oldResults.items;
115
114
  while (oldResults.hasNext()) {
116
115
  oldResults = await oldResults.next();
117
116
  allItems.push(...oldResults.items);
118
117
  }
119
- console.log(`all items: ${allItems.length}`);
118
+ console.log(`all items count : ${allItems.length}`);
120
119
  return allItems;
121
120
  };
121
+ const searchAllItems = async searchQuery => {
122
+ console.log('start search');
123
+ const searchResults = await searchQuery.run();
124
+ return getAllItems(searchResults);
125
+ };
126
+
127
+ const queryAllItems = async query => {
128
+ console.log('start query');
129
+ const queryResults = await query.find();
130
+ return getAllItems(queryResults);
131
+ };
122
132
  /**
123
133
  * Chunks large arrays into smaller chunks for processing
124
134
  * @param {Array} array - Array to chunk
@@ -206,4 +216,5 @@ module.exports = {
206
216
  formatDateOnly,
207
217
  getAddressesByStatus,
208
218
  isPAC_STAFF,
219
+ searchAllItems,
209
220
  };
@@ -0,0 +1,201 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ const csv = require('csv-parser');
5
+
6
+ /**
7
+ * Finds duplicate URLs in a CSV file and generates a JSON report
8
+ * Usage: node scripts/find-duplicate-urls.js <path-to-csv-file>
9
+ */
10
+ function findDuplicateUrls(csvFilePath) {
11
+ // Validate command-line argument
12
+ if (!csvFilePath) {
13
+ console.error('Error: CSV file path is required');
14
+ console.error('Usage: node scripts/find-duplicate-urls.js <path-to-csv-file>');
15
+ process.exit(1);
16
+ }
17
+
18
+ // Validate file exists and is readable
19
+ if (!fs.existsSync(csvFilePath)) {
20
+ console.error(`Error: File not found: ${csvFilePath}`);
21
+ process.exit(1);
22
+ }
23
+
24
+ const urlMap = new Map(); // url -> [memberId1, memberId2, ...]
25
+ let totalMembers = 0;
26
+ let rowNumber = 0;
27
+ let headersValidated = false;
28
+ let headers = null;
29
+ let urlColumnName = null;
30
+ let memberIdColumnName = null;
31
+
32
+ return new Promise((resolve, reject) => {
33
+ fs.createReadStream(csvFilePath)
34
+ .pipe(csv())
35
+ .on('headers', receivedHeaders => {
36
+ headers = receivedHeaders;
37
+ // Validate required columns exist - normalize by removing quotes, trimming, and lowercasing
38
+ const normalizedHeaders = headers.map(h => {
39
+ let normalized = String(h).trim();
40
+ // Remove all quotes (single and double) from the string
41
+ normalized = normalized.replace(/["']/g, '');
42
+ return normalized.toLowerCase().trim();
43
+ });
44
+
45
+ // Find the actual column names for url and memberId
46
+ const urlIndex = normalizedHeaders.indexOf('url');
47
+ const memberIdIndex = normalizedHeaders.indexOf('memberid');
48
+
49
+ if (urlIndex === -1 || memberIdIndex === -1) {
50
+ console.error('Error: CSV must contain "url" and "memberId" columns (case-insensitive)');
51
+ console.error(`Found columns: ${headers.join(', ')}`);
52
+ console.error(`Normalized columns: ${normalizedHeaders.join(', ')}`);
53
+ process.exit(1);
54
+ }
55
+
56
+ // Store the actual column names (with original casing/quotes)
57
+ urlColumnName = headers[urlIndex];
58
+ memberIdColumnName = headers[memberIdIndex];
59
+ headersValidated = true;
60
+ })
61
+ .on('data', row => {
62
+ // Validate headers on first data row if headers event didn't fire
63
+ if (!headersValidated) {
64
+ headers = Object.keys(row);
65
+ // Normalize by removing quotes, trimming, and lowercasing
66
+ const normalizedHeaders = headers.map(h => {
67
+ let normalized = String(h).trim();
68
+ // Remove all quotes (single and double) from the string
69
+ normalized = normalized.replace(/["']/g, '');
70
+ return normalized.toLowerCase().trim();
71
+ });
72
+
73
+ const urlIndex = normalizedHeaders.indexOf('url');
74
+ const memberIdIndex = normalizedHeaders.indexOf('memberid');
75
+
76
+ if (urlIndex === -1 || memberIdIndex === -1) {
77
+ console.error(
78
+ 'Error: CSV must contain "url" and "memberId" columns (case-insensitive)'
79
+ );
80
+ console.error(`Found columns: ${headers.join(', ')}`);
81
+ console.error(`Normalized columns: ${normalizedHeaders.join(', ')}`);
82
+ process.exit(1);
83
+ }
84
+
85
+ // Store the actual column names
86
+ urlColumnName = headers[urlIndex];
87
+ memberIdColumnName = headers[memberIdIndex];
88
+ headersValidated = true;
89
+ }
90
+
91
+ rowNumber++;
92
+ totalMembers++;
93
+
94
+ // Get URL and memberId using the actual column names from headers
95
+ const url = row[urlColumnName];
96
+ const memberId = row[memberIdColumnName];
97
+
98
+ // Skip rows with missing URL or memberId
99
+ if (!url || !memberId) {
100
+ console.warn(
101
+ `Warning: Row ${rowNumber} skipped - missing url or memberId (url: ${url}, memberId: ${memberId})`
102
+ );
103
+ return;
104
+ }
105
+
106
+ const trimmedUrl = url.trim();
107
+ const trimmedMemberId = memberId.trim();
108
+
109
+ // Track URL occurrences
110
+ if (!urlMap.has(trimmedUrl)) {
111
+ urlMap.set(trimmedUrl, []);
112
+ }
113
+ urlMap.get(trimmedUrl).push(trimmedMemberId);
114
+ })
115
+ .on('error', error => {
116
+ console.error('Error reading CSV file:', error.message);
117
+ reject(error);
118
+ })
119
+ .on('end', () => {
120
+ if (!headersValidated) {
121
+ console.error('Error: Could not read CSV headers');
122
+ process.exit(1);
123
+ }
124
+
125
+ // Find duplicates (URLs with count > 1)
126
+ const duplicateUrls = [];
127
+ let totalDuplicates = 0;
128
+
129
+ for (const [url, memberIds] of urlMap.entries()) {
130
+ if (memberIds.length > 1) {
131
+ duplicateUrls.push({
132
+ url: url,
133
+ count: memberIds.length,
134
+ memberIds: memberIds,
135
+ });
136
+ totalDuplicates += memberIds.length;
137
+ }
138
+ }
139
+
140
+ // Sort by count (descending) then by URL (ascending)
141
+ duplicateUrls.sort((a, b) => {
142
+ if (b.count !== a.count) {
143
+ return b.count - a.count;
144
+ }
145
+ return a.url.localeCompare(b.url);
146
+ });
147
+
148
+ const totalUniqueUrls = urlMap.size;
149
+ const uniqueDuplicateUrls = duplicateUrls.length;
150
+
151
+ // Create a simple list of duplicated URLs (just the URL strings)
152
+ const duplicatedUrlsList = duplicateUrls.map(item => item.url);
153
+
154
+ // Generate report
155
+ const report = {
156
+ totalMembers: totalMembers,
157
+ totalUniqueUrls: totalUniqueUrls,
158
+ duplicateUrls: duplicateUrls,
159
+ duplicatedUrlsList: duplicatedUrlsList,
160
+ summary: {
161
+ totalDuplicates: totalDuplicates,
162
+ uniqueDuplicateUrls: uniqueDuplicateUrls,
163
+ },
164
+ };
165
+
166
+ // Generate output filename
167
+ const csvDir = path.dirname(csvFilePath);
168
+ const csvBasename = path.basename(csvFilePath, path.extname(csvFilePath));
169
+ const outputPath = path.join(csvDir, `${csvBasename}-duplicate-urls-report.json`);
170
+
171
+ // Write JSON report
172
+ fs.writeFileSync(outputPath, JSON.stringify(report, null, 2), 'utf8');
173
+
174
+ console.log('\n=== Duplicate URL Report ===');
175
+ console.log(`Total members processed: ${totalMembers}`);
176
+ console.log(`Total unique URLs: ${totalUniqueUrls}`);
177
+ console.log(`Unique URLs with duplicates: ${uniqueDuplicateUrls}`);
178
+ console.log(`Total duplicate entries: ${totalDuplicates}`);
179
+ console.log(`\nReport saved to: ${outputPath}`);
180
+ console.log(`\nTop 10 most duplicated URLs:`);
181
+ duplicateUrls.slice(0, 10).forEach((item, index) => {
182
+ console.log(
183
+ ` ${index + 1}. "${item.url}" - appears ${item.count} times (memberIds: ${item.memberIds.join(', ')})`
184
+ );
185
+ });
186
+
187
+ resolve(report);
188
+ });
189
+ });
190
+ }
191
+
192
+ // Run if executed directly
193
+ if (require.main === module) {
194
+ const csvFilePath = process.argv[2];
195
+ findDuplicateUrls(csvFilePath).catch(error => {
196
+ console.error('Fatal error:', error.message);
197
+ process.exit(1);
198
+ });
199
+ }
200
+
201
+ module.exports = { findDuplicateUrls };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "abmp-npm",
3
- "version": "2.0.7",
3
+ "version": "2.0.10",
4
4
  "main": "index.js",
5
5
  "scripts": {
6
6
  "check-cycles": "madge --circular .",
@@ -9,7 +9,8 @@
9
9
  "lint:fix": "eslint . --fix",
10
10
  "format": "prettier --write \"**/*.{js,json,md}\"",
11
11
  "format:check": "prettier --check \"**/*.{js,json,md}\"",
12
- "prepare": "husky"
12
+ "prepare": "husky",
13
+ "find-duplicates": "node dev-only-scripts/find-duplicate-urls.js"
13
14
  },
14
15
  "author": "",
15
16
  "license": "ISC",
@@ -46,6 +47,7 @@
46
47
  "crypto": "^1.0.1",
47
48
  "jwt-js-decode": "^1.9.0",
48
49
  "lodash": "^4.17.21",
50
+ "csv-parser": "^3.0.0",
49
51
  "ngeohash": "^0.6.3",
50
52
  "phone": "^3.1.67",
51
53
  "psdev-task-manager": "1.1.7",
@@ -1106,6 +1106,7 @@ async function personalDetailsOnReady({
1106
1106
  });
1107
1107
  _$w('#profileLink').text = newProfileLink;
1108
1108
  _$w('#profileLink').link = newProfileLink;
1109
+ _$w('#urlWebsiteText').text = newProfileLink;
1109
1110
 
1110
1111
  _$w(SLUG_FLAGS.VALID).collapse();
1111
1112
  _$w(SLUG_FLAGS.INVALID).collapse();
@@ -1858,8 +1859,15 @@ async function personalDetailsOnReady({
1858
1859
  itemMemberObj.toShowPhone = null;
1859
1860
  }
1860
1861
 
1862
+ if (itemMemberObj.phones) {
1863
+ itemMemberObj.phones = itemMemberObj.phones.filter(
1864
+ phone => phone !== phoneToRemove.phoneNumber
1865
+ );
1866
+ }
1867
+
1861
1868
  const updatedData = currentData.filter(item => item._id !== phoneId);
1862
1869
  renderPhonesList(updatedData);
1870
+ checkFormChanges(FORM_SECTION_HANDLER_MAP.CONTACT_BOOKING);
1863
1871
  }
1864
1872
  }
1865
1873