apps-sdk 2.1.0 → 2.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/config.js CHANGED
@@ -1,27 +1,61 @@
1
1
  const config = {
2
- ENDPOINTS: {
3
- CONTENT: "https://backend.ailandsapp.com/content",
4
- PAYMENT_CARD: "https://backend.ailandsapp.com/payment-card",
5
- EVENTS_PUSH: "https://backend.ailandsapp.com/event/push",
6
- ATTRIBUTION_SET_ID: "https://backend.ailandsapp.com/user/set-attribution-data",
7
- ATTRIBUTION_SET_DATA: "https://backend.ailandsapp.com/user/set-adjust-data",
8
- USER_CREATE_ID: "https://backend.ailandsapp.com/user/create-id",
9
- NOTIFICATION_SET_TOKEN: "https://backend.ailandsapp.com/user/notification-token",
10
- CONFIG: "https://backend.ailandsapp.com/core/config",
11
- SUB_NEW: "https://backend.ailandsapp.com/core/sub-new",
12
- SUB_STATUS: "https://backend.ailandsapp.com/core/sub-status",
13
- LOCALIZE: "https://backend.ailandsapp.com",
14
- AUDIENCES: "https://backend.ailandsapp.com",
15
- CONTENTS: "https://backend.ailandsapp.com",
2
+ // ========================================
3
+ // BASE URLS (Dynamic - configurable)
4
+ // ========================================
5
+ BASE_URLS: {
6
+ CORE: "https://backend.ailandsapp.com",// Default, can be overridden from app
16
7
  EVENTS: "https://ap0404.gways.org",
17
- LEGAL_BASE: "https://bc1742.gways.org/legal",
18
- WEBAPP: null, // Will be set from app config
8
+ WEBAPP: "https://bc1742.gways.org", // Default, can be overridden from app
9
+ },
10
+
11
+ // ========================================
12
+ // PATHS -
13
+ // ========================================
14
+ CORE_PATHS: {
15
+ CONTENT: "/content",
16
+ PAYMENT_CARD: "/payment-card",
17
+ EVENTS_PUSH: "/event/push",
18
+ ATTRIBUTION_SET_ID: "/user/set-attribution-data",
19
+ ATTRIBUTION_SET_DATA: "/user/set-adjust-data",
20
+ USER_CREATE_ID: "/user/create-id",
21
+ NOTIFICATION_SET_TOKEN: "/user/notification-token",
22
+ CONFIG: "/core/config",
23
+ SUB_NEW: "/core/sub-new",
24
+ SUB_STATUS: "/core/sub-status",
19
25
  },
20
26
 
21
27
  WEBAPP_PATHS: {
22
28
  GET_CREDITS: "/user/get-credits",
23
29
  },
24
30
 
31
+ LEGAL_PATHS: {
32
+ BASE: "/legal",
33
+ },
34
+
35
+ // ========================================
36
+ // COMPUTED ENDPOINTS (dynamic getter)
37
+ // ========================================
38
+ get ENDPOINTS() {
39
+ return {
40
+ CONTENT: `${this.BASE_URLS.CORE}${this.CORE_PATHS.CONTENT}`,
41
+ PAYMENT_CARD: `${this.BASE_URLS.CORE}${this.CORE_PATHS.PAYMENT_CARD}`,
42
+ EVENTS_PUSH: `${this.BASE_URLS.CORE}${this.CORE_PATHS.EVENTS_PUSH}`,
43
+ ATTRIBUTION_SET_ID: `${this.BASE_URLS.CORE}${this.CORE_PATHS.ATTRIBUTION_SET_ID}`,
44
+ ATTRIBUTION_SET_DATA: `${this.BASE_URLS.CORE}${this.CORE_PATHS.ATTRIBUTION_SET_DATA}`,
45
+ USER_CREATE_ID: `${this.BASE_URLS.CORE}${this.CORE_PATHS.USER_CREATE_ID}`,
46
+ NOTIFICATION_SET_TOKEN: `${this.BASE_URLS.CORE}${this.CORE_PATHS.NOTIFICATION_SET_TOKEN}`,
47
+ CONFIG: `${this.BASE_URLS.CORE}${this.CORE_PATHS.CONFIG}`,
48
+ SUB_NEW: `${this.BASE_URLS.CORE}${this.CORE_PATHS.SUB_NEW}`,
49
+ SUB_STATUS: `${this.BASE_URLS.CORE}${this.CORE_PATHS.SUB_STATUS}`,
50
+ LOCALIZE: this.BASE_URLS.CORE,
51
+ AUDIENCES: this.BASE_URLS.CORE,
52
+ CONTENTS: this.BASE_URLS.CORE,
53
+ EVENTS: this.BASE_URLS.EVENTS,
54
+ LEGAL_BASE: `${this.BASE_URLS.WEBAPP}${this.LEGAL_PATHS.BASE}`, // LEGAL uses WEBAPP base
55
+ WEBAPP: this.BASE_URLS.WEBAPP,
56
+ };
57
+ },
58
+
25
59
  EVENTS: {},
26
60
  EVENTS_MIXPANEL: {},
27
61
 
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "apps-sdk",
3
- "version": "2.1.0",
4
- "description": "Apps SDK - Compatible with Expo SDK 54 + React 19 - Firebase optional",
3
+ "version": "2.1.2",
4
+ "description": "Apps SDK - Compatible with Expo SDK 54 + React 19 - create User pre url",
5
5
  "main": "index.js",
6
6
  "author": "ASD",
7
7
  "license": "ISC",
@@ -126,12 +126,23 @@ class Networking {
126
126
  }
127
127
 
128
128
  setEndpoints(domains) {
129
+ // Update BASE_URLS instead of ENDPOINTS directly
129
130
  for (let key in domains) {
130
131
  if (domains.hasOwnProperty(key)) {
131
- config.ENDPOINTS[key.toUpperCase()] = domains[key];
132
+ const upperKey = key.toUpperCase();
133
+ // Map backend domains to BASE_URLS
134
+ if (upperKey === 'CONTENTS') {
135
+ config.BASE_URLS.CORE = domains[key];
136
+ } else if (upperKey === 'EVENTS') {
137
+ config.BASE_URLS.EVENTS = domains[key];
138
+ } else if (upperKey === 'LEGAL') {
139
+ // LEGAL uses WEBAPP, no separate BASE_URL
140
+ config.BASE_URLS.WEBAPP = domains[key];
141
+ }
132
142
  }
133
143
  }
134
- storage.storeData("ENDPOINTS", config.ENDPOINTS);
144
+ // Store updated BASE_URLS
145
+ storage.storeData("BASE_URLS", config.BASE_URLS);
135
146
  }
136
147
 
137
148
  setImageCompression(compression) {
@@ -26,10 +26,20 @@ class Session {
26
26
  }
27
27
 
28
28
  setConfigEndpoint = (endpoint) => {
29
- config.DEBUG_MODE && console.debug("setInitEndpoint - endpoint: ", endpoint);
29
+ config.DEBUG_MODE && console.debug("setConfigEndpoint - endpoint: ", endpoint);
30
30
  config.ENDPOINTS.CONFIG = endpoint;
31
31
  }
32
32
 
33
+ setUserCreateEndpoint = (endpoint) => {
34
+ config.DEBUG_MODE && console.debug("setUserCreateEndpoint - endpoint: ", endpoint);
35
+ config.ENDPOINTS.USER_CREATE_ID = endpoint;
36
+ }
37
+
38
+ setBaseUrl = (baseUrl) => {
39
+ config.DEBUG_MODE && console.debug("setBaseUrl - baseUrl: ", baseUrl);
40
+ config.BASE_URLS.CORE = baseUrl;
41
+ }
42
+
33
43
  initSession = async () => {
34
44
  config.DEBUG_MODE && console.debug("initSession");
35
45
  await Networking.sendEvent(config.EVENT_TYPES.OTHER, 'init_session');
@@ -5,16 +5,75 @@ import * as Speech from 'expo-speech';
5
5
  import { franc } from 'franc-min';
6
6
  import Networking from './Networking';
7
7
  import MixPanel from './MixPanel';
8
+ import { Platform } from 'react-native';
9
+
10
+ const ANDROID_STOP_DELAY_MS = 400;
8
11
 
9
12
  class VoiceService {
10
13
  constructor() {
11
14
  this.inactivityTimeout = null;
12
- this.resultListener = null;
13
- this.volumeListener = null;
15
+
16
+ // Tracked listeners — always removed before re-registering
17
+ this.startListener = null;
18
+ this.resultListener = null;
19
+ this.errorListener = null;
20
+ this.volumeListener = null;
21
+
22
+ // State flags used to prevent race conditions
14
23
  this.isRecognizing = false;
24
+ this.isStarting = false;
25
+ this.isStopping = false;
26
+
27
+ // Android delivers speech in segments; we accumulate finalized segments here
28
+ // so callers always receive the full transcript, not just the latest chunk.
29
+ this._committedTranscript = '';
30
+ }
31
+
32
+ // ─────────────────────────────────────────────
33
+ // Internal helpers
34
+ // ─────────────────────────────────────────────
35
+
36
+ /** Remove ALL tracked listeners in one call. */
37
+ _removeAllListeners() {
38
+ const listeners = ['startListener', 'resultListener', 'errorListener', 'volumeListener'];
39
+ listeners.forEach(key => {
40
+ if (this[key]) {
41
+ try { this[key].remove(); } catch (_) {}
42
+ this[key] = null;
43
+ }
44
+ });
45
+ }
46
+
47
+ /** Wait for a given number of milliseconds. */
48
+ _delay(ms) {
49
+ return new Promise(resolve => setTimeout(resolve, ms));
15
50
  }
16
51
 
17
- async startRecognizing(onSpeechStart, onSpeechRecognized, onSpeechResults, onInactivityTimeout, inactivitySeconds = 3, onVolumeChange = null, onNoSpeech = null) {
52
+ // ─────────────────────────────────────────────
53
+ // Public API
54
+ // ─────────────────────────────────────────────
55
+
56
+ async startRecognizing(
57
+ onSpeechStart,
58
+ onSpeechRecognized,
59
+ onSpeechResults,
60
+ onInactivityTimeout,
61
+ inactivitySeconds = 3,
62
+ onVolumeChange = null,
63
+ onNoSpeech = null
64
+ ) {
65
+ // Prevent concurrent start calls
66
+ if (this.isStarting || this.isStopping) {
67
+ config.DEBUG_MODE && console.log('Voice: startRecognizing skipped — transition in progress');
68
+ return;
69
+ }
70
+ if (this.isRecognizing) {
71
+ config.DEBUG_MODE && console.log('Voice: startRecognizing skipped — already recognizing');
72
+ return;
73
+ }
74
+
75
+ this.isStarting = true;
76
+
18
77
  try {
19
78
  const { status } = await ExpoSpeechRecognitionModule.requestPermissionsAsync();
20
79
  if (status !== 'granted') {
@@ -33,36 +92,56 @@ class VoiceService {
33
92
  let language = Session.getDeviceLanguageAndRegion();
34
93
  language = this.normalizeLocale(language);
35
94
 
36
- if (this.resultListener) {
37
- this.resultListener.remove();
38
- }
95
+ // Always remove stale listeners before registering new ones
96
+ this._removeAllListeners();
39
97
 
40
- this.volumeListener = ExpoSpeechRecognitionModule.addListener("volumechange" , ({value}) => {
41
- if (onVolumeChange) {
42
- onVolumeChange(value);
43
- }
44
- })
98
+ // ── volume ──────────────────────────────────────────────────
99
+ this.volumeListener = ExpoSpeechRecognitionModule.addListener('volumechange', ({ value }) => {
100
+ if (onVolumeChange) onVolumeChange(value);
101
+ });
45
102
 
46
- ExpoSpeechRecognitionModule.addListener('start', () => {
103
+ // ── start ───────────────────────────────────────────────────
104
+ this.startListener = ExpoSpeechRecognitionModule.addListener('start', () => {
105
+ console.log('SPEECH - start');
106
+ // Reset accumulator at the beginning of each new recognition session
107
+ this._committedTranscript = '';
47
108
  onSpeechStart();
48
109
  this.resetInactivityTimeout(inactivitySeconds, onInactivityTimeout);
49
110
  });
50
111
 
112
+ // ── result ──────────────────────────────────────────────────
51
113
  this.resultListener = ExpoSpeechRecognitionModule.addListener('result', (event) => {
114
+ console.log('SPEECH - result', event);
52
115
  if (event.results && event.results.length > 0) {
53
- const results = event.results.map(r => r.transcript);
54
- onSpeechResults(results);
116
+ // event.results[0] is the best hypothesis for the current segment
117
+ const currentSegment = event.results[0].transcript || '';
118
+ const isFinal = event.isFinal ?? false;
119
+
120
+ // Build the full transcript: everything committed so far + current segment
121
+ const fullTranscript = (this._committedTranscript + ' ' + currentSegment).trim();
122
+
123
+
124
+ // Always pass the full accumulated transcript to the caller
125
+ onSpeechResults([fullTranscript]);
126
+
127
+ // When the engine finalises a segment, commit it
128
+ if (isFinal) {
129
+ this._committedTranscript = fullTranscript;
130
+ console.log('SPEECH - segment committed:', this._committedTranscript);
131
+ }
132
+
55
133
  this.resetInactivityTimeout(inactivitySeconds, onInactivityTimeout);
56
134
  }
57
135
  });
58
136
 
59
- ExpoSpeechRecognitionModule.addListener('error', (event) => {
137
+ // ── error ───────────────────────────────────────────────────
138
+ this.errorListener = ExpoSpeechRecognitionModule.addListener('error', (event) => {
139
+ config.DEBUG_MODE && console.log('Voice - ERROR', event);
60
140
  console.error('Speech recognition error:', event.error, event.message);
61
-
62
- // Handle "no-speech" error specifically
141
+
63
142
  if (event.error === 'no-speech' && onNoSpeech) {
64
143
  onNoSpeech();
65
- // Don't stop recognition, just notify user
144
+ // Non-fatal just notify, recognition keeps running
66
145
  }
67
146
  });
68
147
 
@@ -71,51 +150,81 @@ class VoiceService {
71
150
  interimResults: true,
72
151
  maxAlternatives: 1,
73
152
  continuous: true,
74
- volumeChangeEventOptions : {
75
- enabled : true,
76
- intervalMillis:150
77
- }
153
+ volumeChangeEventOptions: {
154
+ enabled: true,
155
+ intervalMillis: 150,
156
+ },
78
157
  });
79
158
 
80
159
  this.isRecognizing = true;
81
160
 
82
161
  } catch (e) {
83
- console.error('Speech recognition error:', e);
162
+ console.error('Speech recognition start error:', e);
163
+ this._removeAllListeners();
84
164
  this.isRecognizing = false;
165
+ } finally {
166
+ this.isStarting = false;
85
167
  }
86
168
  }
87
169
 
88
170
  resetInactivityTimeout(seconds, onInactivityTimeout) {
171
+ // Always clear any existing timer first
89
172
  if (this.inactivityTimeout) {
90
173
  clearTimeout(this.inactivityTimeout);
174
+ this.inactivityTimeout = null;
91
175
  }
92
- this.inactivityTimeout = setTimeout(() => {
93
- this.stopRecognizing();
94
- if (onInactivityTimeout) {
95
- onInactivityTimeout();
96
- }
176
+
177
+ // A falsy / zero value means "run indefinitely — no auto-stop"
178
+ if (!seconds) {
179
+ config.DEBUG_MODE && console.log('Voice: inactivity timeout disabled (indefinite recognition)');
180
+ return;
181
+ }
182
+
183
+ this.inactivityTimeout = setTimeout(async () => {
184
+ // Await full stop (incl. Android delay) before notifying the caller.
185
+ // Without this await, onInactivityTimeout fires while isStopping=true
186
+ // and a secondary destroyVoice() call gets skipped by the guard.
187
+ await this.stopRecognizing();
188
+ if (onInactivityTimeout) onInactivityTimeout();
97
189
  }, seconds * 1000);
98
190
  }
99
191
 
100
192
  async stopRecognizing() {
193
+ // Prevent concurrent stop calls or stopping when already idle
194
+ if (this.isStopping) {
195
+ config.DEBUG_MODE && console.log('Voice: stopRecognizing skipped — already stopping');
196
+ return;
197
+ }
198
+ if (!this.isRecognizing && !this.isStarting) {
199
+ config.DEBUG_MODE && console.log('Voice: stopRecognizing skipped — not active');
200
+ return;
201
+ }
202
+
203
+ this.isStopping = true;
204
+
101
205
  try {
206
+ // Clear the inactivity timer first
102
207
  if (this.inactivityTimeout) {
103
208
  clearTimeout(this.inactivityTimeout);
209
+ this.inactivityTimeout = null;
104
210
  }
105
- if (this.resultListener) {
106
- this.resultListener.remove();
107
- this.resultListener = null;
108
- }
109
- if (this.volumeListener) {
110
- this.volumeListener.remove();
111
- this.volumeListener = null;
112
- }
211
+
212
+ // Remove all event listeners before stopping the engine
213
+ this._removeAllListeners();
214
+
113
215
  if (this.isRecognizing) {
216
+ // Android needs a short pause before stop() is accepted reliably
217
+ if (Platform.OS === 'android') {
218
+ await this._delay(ANDROID_STOP_DELAY_MS);
219
+ }
114
220
  await ExpoSpeechRecognitionModule.stop();
115
221
  this.isRecognizing = false;
116
222
  }
117
223
  } catch (e) {
118
224
  console.error('Stop recognition error:', e);
225
+ this.isRecognizing = false;
226
+ } finally {
227
+ this.isStopping = false;
119
228
  }
120
229
  }
121
230
 
@@ -129,7 +238,7 @@ class VoiceService {
129
238
 
130
239
  normalizeLocale(locale) {
131
240
  const [lang, region] = locale.split('-');
132
-
241
+
133
242
  const validCombinations = {
134
243
  'en': ['US', 'GB', 'AU', 'CA', 'NZ', 'ZA', 'AE', 'IN', 'SG', 'IE', 'PH', 'SA', 'ID'],
135
244
  'es': ['ES', 'MX', 'CL', 'CO', '419', 'US'],
@@ -170,7 +279,7 @@ class VoiceService {
170
279
  }
171
280
 
172
281
  const validRegions = validCombinations[lang];
173
-
282
+
174
283
  if (validRegions.includes(region)) {
175
284
  return locale;
176
285
  }
@@ -205,21 +314,19 @@ class VoiceService {
205
314
  const detectedLanguage = languageMap[detectedLanguageCode] || 'en_US';
206
315
  const finalLanguage = detectedLanguage || Session.getDeviceLanguageAndRegion();
207
316
  const finalVoice = voice || `${finalLanguage}-voice`;
208
- config.DEBUG_MODE && console.log('Speech detected language:', detectedLanguage, 'final language:', finalLanguage, 'final voice:', finalVoice);
317
+ config.DEBUG_MODE && console.log(
318
+ 'Speech detected language:', detectedLanguage,
319
+ 'final language:', finalLanguage,
320
+ 'final voice:', finalVoice
321
+ );
209
322
 
210
323
  Speech.speak(message, {
211
324
  language: finalLanguage,
212
325
  rate: 1,
213
326
  pitch: 1,
214
- onStart: () => {
215
- if (onStart) onStart();
216
- },
217
- onDone: () => {
218
- if (onDone) onDone();
219
- },
220
- onError: (error) => {
221
- if (onError) onError(error);
222
- }
327
+ onStart: () => { if (onStart) onStart(); },
328
+ onDone: () => { if (onDone) onDone(); },
329
+ onError: (error) => { if (onError) onError(error); },
223
330
  });
224
331
  }
225
332
 
@@ -228,7 +335,7 @@ class VoiceService {
228
335
  await Speech.stop();
229
336
  if (onStop) onStop();
230
337
  } catch (error) {
231
- console.error("Error stopping speech:", error);
338
+ console.error('Error stopping speech:', error);
232
339
  }
233
340
  }
234
341
  }