maidr 2.9.2 → 2.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -8,7 +8,7 @@
8
8
 
9
9
  # maidr: Multimodal Access and Interactive Data Representation
10
10
 
11
- maidr (pronounced as 'mader') is a system for non-visual access and control of statistical plots. It aims to provide an inclusive experience for users with visual impairments by offering multiple modes of interaction: braille, text, and sonification (BTS). This comprehensive approach enhances the accessibility of data visualization and encourages a multi-modal exploration on visualization. Check out the current build: [maidr Demo](https://xability.github.io/maidr/user_study_pilot/intro.html). You may also clone or download the GitHub repo, navigate to the ./user_study_pilot folder, and open any of the html files in your browser.
11
+ maidr (pronounced as 'mader') is a system for non-visual access and control of statistical plots. It aims to provide an inclusive experience for users with visual impairments by offering multiple modes of interaction: braille, text, and sonification (BTS). This comprehensive approach enhances the accessibility of data visualization and encourages a multi-modal exploration on visualization. Check out the current build: [maidr Demo](https://xability.github.io/maidr/galleries/index.html). You may also clone or download the GitHub repo, navigate to the ./user_study_pilot folder, and open any of the html files in your browser.
12
12
 
13
13
  ## Table of Contents
14
14
 
package/dist/maidr.js CHANGED
@@ -109,6 +109,7 @@ class Constants {
109
109
  skillLevelOther = ''; // custom skill level
110
110
  autoInitLLM = true; // auto initialize LLM on page load
111
111
  verboseText = '';
112
+ waitingQueue = 0;
112
113
 
113
114
  // user controls (not exposed to menu, with shortcuts usually)
114
115
  showDisplay = 1; // true / false
@@ -462,11 +463,11 @@ class Menu {
462
463
  </p>
463
464
  <p id="openai_auth_key_container" class="multi_container hidden">
464
465
  <span id="openai_multi_container" class="hidden"><input type="checkbox" id="openai_multi" name="openai_multi" aria-label="Use OpenAI in Multi modal mode"></span>
465
- <input type="password" id="openai_auth_key"><button aria-label="Delete OpenAI key" title="Delete OpenAI key" id="delete_openai_key" class="invis_button">&times;</button><label for="openai_auth_key">OpenAI Authentication Key</label>
466
+ <input type="text" size="50" id="openai_auth_key"><button aria-label="Delete OpenAI key" title="Delete OpenAI key" id="delete_openai_key" class="invis_button">&times;</button><label for="openai_auth_key">OpenAI Authentication Key</label>
466
467
  </p>
467
468
  <p id="gemini_auth_key_container" class="multi_container hidden">
468
469
  <span id="gemini_multi_container" class="hidden"><input type="checkbox" id="gemini_multi" name="gemini_multi" aria-label="Use Gemini in Multi modal mode"></span>
469
- <input type="password" id="gemini_auth_key"><button aria-label="Delete Gemini key" title="Delete Gemini key" id="delete_gemini_key" class="invis_button">&times;</button><label for="gemini_auth_key">Gemini Authentication Key</label>
470
+ <input type="text" size="50" id="gemini_auth_key"><button aria-label="Delete Gemini key" title="Delete Gemini key" id="delete_gemini_key" class="invis_button">&times;</button><label for="gemini_auth_key">Gemini Authentication Key</label>
470
471
  </p>
471
472
  <p><input type="checkbox" ${
472
473
  constants.autoInitLLM ? 'checked' : ''
@@ -476,12 +477,12 @@ class Menu {
476
477
  <option value="basic">Basic</option>
477
478
  <option value="intermediate">Intermediate</option>
478
479
  <option value="expert">Expert</option>
479
- <option value="other">other</option>
480
+ <option value="other">Other: describe in your own words</option>
480
481
  </select>
481
482
  <label for="skill_level">Level of skill in statistical charts</label>
482
483
  </p>
483
484
  <p id="skill_level_other_container" class="hidden"><input type="text" placeholder="Very basic" id="skill_level_other"> <label for="skill_level_other">Describe your level of skill in statistical charts</label></p>
484
- <p><label for="LLM_preferences">LLM Preferences</label></p>
485
+ <p><label for="LLM_preferences">Custom instructions for the chat response</label></p>
485
486
  <p><textarea id="LLM_preferences" rows="4" cols="50" placeholder="I'm a stats undergrad and work with Python. I prefer a casual tone, and favor information accuracy over creative description; just the facts please!"></textarea></p>
486
487
  </div>
487
488
  </div>
@@ -966,7 +967,16 @@ class ChatLLM {
966
967
  this.CreateComponent();
967
968
  this.SetEvents();
968
969
  if (constants.autoInitLLM) {
969
- this.InitChatMessage();
970
+ // only run if we have API keys set
971
+ if (
972
+ (constants.LLMModel == 'openai' && constants.openAIAuthKey) ||
973
+ (constants.LLMModel == 'gemini' && constants.geminiAuthKey) ||
974
+ (constants.LLMModel == 'multi' &&
975
+ constants.openAIAuthKey &&
976
+ constants.geminiAuthKey)
977
+ ) {
978
+ this.InitChatMessage();
979
+ }
970
980
  }
971
981
  }
972
982
 
@@ -997,11 +1007,10 @@ class ChatLLM {
997
1007
  <p><button type="button">What is the title?</button></p>
998
1008
  <p><button type="button">What are the high and low values?</button></p>
999
1009
  <p><button type="button">What is the general shape of the chart?</button></p>
1000
- <p><button type="button" id="more_suggestions">More</button></p>
1001
1010
  </div>
1002
- <div id="more_suggestions_container" class="hidden LLM_suggestions">
1011
+ <div id="more_suggestions_container" class="LLM_suggestions">
1003
1012
  <p><button type="button">Please provide the title of this visualization, then provide a description for someone who is blind or low vision. Include general overview of axes and the data at a high-level.</button></p>
1004
- <p><button type="button">For the visualization I shared, please provide the following (where applicable): mean, standard deviation, extrema, correlations, relational comparisons like greater than OR lesser than.</button></p>
1013
+ <p><button type="button">For the visualization I shared, please provide the following (where applicable): mean, standard deviation, extreme, correlations, relational comparisons like greater than OR lesser than.</button></p>
1005
1014
  <p><button type="button">Based on the visualization shared, address the following: Do you observe any unforeseen trends? If yes, what? Please convey any complex multi-faceted patterns present. Can you identify any noteworthy exceptions that aren't readily apparent through non-visual methods of analysis?</button></p>
1006
1015
  <p><button type="button">Provide context to help explain the data depicted in this visualization based on domain-specific insight.</button></p>
1007
1016
  </div>
@@ -1080,21 +1089,6 @@ class ChatLLM {
1080
1089
  ]);
1081
1090
 
1082
1091
  // ChatLLM suggestion events
1083
- // the more button
1084
- constants.events.push([
1085
- document.getElementById('more_suggestions'),
1086
- 'click',
1087
- function (e) {
1088
- document
1089
- .getElementById('more_suggestions_container')
1090
- .classList.toggle('hidden');
1091
- // focus on button right after the more button
1092
- document
1093
- .querySelector('#more_suggestions_container > p > button')
1094
- .focus();
1095
- document.getElementById('more_suggestions').remove();
1096
- },
1097
- ]);
1098
1092
  // actual suggestions:
1099
1093
  let suggestions = document.querySelectorAll(
1100
1094
  '#chatLLM .LLM_suggestions button:not(#more_suggestions)'
@@ -1205,7 +1199,7 @@ class ChatLLM {
1205
1199
  markdown = markdown.replace(/\n{3,}/g, '\n\n');
1206
1200
 
1207
1201
  try {
1208
- navigator.clipboard.writeText(markdown);
1202
+ navigator.clipboard.writeText(markdown); // note: this fails if you're on the inspector. That's fine as it'll never happen to real users
1209
1203
  } catch (err) {
1210
1204
  console.error('Failed to copy: ', err);
1211
1205
  }
@@ -1317,22 +1311,40 @@ class ChatLLM {
1317
1311
  * @returns {void}
1318
1312
  */
1319
1313
  WaitingSound(onoff = true) {
1320
- // clear old intervals and timeouts
1321
- if (constants.waitingInterval) {
1322
- // destroy old waiting sound
1323
- clearInterval(constants.waitingInterval);
1324
- constants.waitingSound = null;
1325
- }
1326
- if (constants.waitingSoundOverride) {
1327
- clearTimeout(constants.waitingSoundOverride);
1328
- constants.waitingSoundOverride = null;
1314
+ let delay = 1000;
1315
+ let freq = 440; // a440 babee
1316
+ let inprogressFreq = freq * 2;
1317
+
1318
+ if (onoff) {
1319
+ // if turning on, clear old intervals and timeouts
1320
+ if (constants.waitingInterval) {
1321
+ // destroy old waiting sound
1322
+ clearInterval(constants.waitingInterval);
1323
+ constants.waitingInterval = null;
1324
+ }
1325
+ if (constants.waitingSoundOverride) {
1326
+ clearTimeout(constants.waitingSoundOverride);
1327
+ constants.waitingSoundOverride = null;
1328
+ }
1329
+ } else {
1330
+ // notify user that we're done waiting for this one
1331
+ if (audio && chatLLM.shown) {
1332
+ audio.playOscillator(inprogressFreq, 0.2, 0);
1333
+ }
1334
+
1335
+ // turning off, but do we have more in the queue after this?
1336
+ if (constants.waitingQueue > 1) {
1337
+ // turning off and still have a queue, decrement by 1, and play a new sound
1338
+ constants.waitingQueue--;
1339
+ } else {
1340
+ // no queue, just turn off
1341
+ chatLLM.KillAllWaitingSounds();
1342
+ }
1329
1343
  }
1330
1344
 
1331
- // assuming we're turning it on, start playing a new waiting sound
1345
+ // turning it on: start playing a new waiting sound
1332
1346
  if (onoff) {
1333
1347
  // create new waiting sound
1334
- let delay = 1000;
1335
- let freq = 440; // a440 babee
1336
1348
  constants.waitingInterval = setInterval(function () {
1337
1349
  if (audio && chatLLM.shown) {
1338
1350
  audio.playOscillator(freq, 0.2, 0);
@@ -1341,10 +1353,37 @@ class ChatLLM {
1341
1353
 
1342
1354
  // clear automatically after 30 sec, assuming no response
1343
1355
  constants.waitingSoundOverride = setTimeout(function () {
1344
- chatLLM.WaitingSound(false);
1356
+ chatLLM.KillAllWaitingSounds();
1345
1357
  }, 30000);
1358
+
1359
+ // set queue for multi
1360
+ if (constants.LLMModel != 'multi') {
1361
+ constants.waitingQueue = 1;
1362
+ } else {
1363
+ constants.waitingQueue = 0;
1364
+ if (constants.LLMGeminiMulti) {
1365
+ constants.waitingQueue++;
1366
+ }
1367
+ if (constants.LLMOpenAiMulti) {
1368
+ constants.waitingQueue++;
1369
+ }
1370
+ }
1346
1371
  }
1347
1372
  }
1373
+ /**
1374
+ * Overrides and kills all waiting sounds for LLM
1375
+ */
1376
+ KillAllWaitingSounds() {
1377
+ if (constants.waitingInterval) {
1378
+ clearInterval(constants.waitingInterval);
1379
+ constants.waitingInterval = null;
1380
+ }
1381
+ if (constants.waitingSoundOverride) {
1382
+ clearTimeout(constants.waitingSoundOverride);
1383
+ constants.waitingSoundOverride = null;
1384
+ }
1385
+ constants.waitingQueue = 0;
1386
+ }
1348
1387
 
1349
1388
  InitChatMessage() {
1350
1389
  // get name from resource
@@ -1362,7 +1401,7 @@ class ChatLLM {
1362
1401
  */
1363
1402
  ProcessLLMResponse(data, model) {
1364
1403
  chatLLM.WaitingSound(false);
1365
- //console.log('LLM response: ', data);
1404
+ console.log('LLM response: ', data);
1366
1405
  let text = '';
1367
1406
  let LLMName = resources.GetString(model);
1368
1407
 
@@ -1375,6 +1414,7 @@ class ChatLLM {
1375
1414
 
1376
1415
  if (data.error) {
1377
1416
  chatLLM.DisplayChatMessage(LLMName, 'Error processing request.', true);
1417
+ chatLLM.WaitingSound(false);
1378
1418
  } else {
1379
1419
  chatLLM.DisplayChatMessage(LLMName, text);
1380
1420
  }
@@ -1385,10 +1425,12 @@ class ChatLLM {
1385
1425
  } else {
1386
1426
  if (!data.error) {
1387
1427
  data.error = 'Error processing request.';
1428
+ chatLLM.WaitingSound(false);
1388
1429
  }
1389
1430
  }
1390
1431
  if (data.error) {
1391
1432
  chatLLM.DisplayChatMessage(LLMName, 'Error processing request.', true);
1433
+ chatLLM.WaitingSound(false);
1392
1434
  } else {
1393
1435
  // todo: display actual response
1394
1436
  }
@@ -1489,7 +1531,7 @@ class ChatLLM {
1489
1531
  .catch((error) => {
1490
1532
  chatLLM.WaitingSound(false);
1491
1533
  console.error('Error:', error);
1492
- chatLLM.DisplayChatMessage(LLMName, 'Error processing request.', true);
1534
+ chatLLM.DisplayChatMessage('OpenAI', 'Error processing request.', true);
1493
1535
  // also todo: handle errors somehow
1494
1536
  });
1495
1537
  }
@@ -1541,6 +1583,7 @@ class ChatLLM {
1541
1583
  }
1542
1584
 
1543
1585
  async GeminiPrompt(text, imgBase64 = null) {
1586
+ // https://ai.google.dev/docs/gemini_api_overview#node.js
1544
1587
  try {
1545
1588
  // Save the image for next time
1546
1589
  if (imgBase64 == null) {
@@ -1579,6 +1622,8 @@ class ChatLLM {
1579
1622
  // Process the response
1580
1623
  chatLLM.ProcessLLMResponse(result.response, 'gemini');
1581
1624
  } catch (error) {
1625
+ chatLLM.WaitingSound(false);
1626
+ chatLLM.DisplayChatMessage('Gemini', 'Error processing request.', true);
1582
1627
  console.error('Error in GeminiPrompt:', error);
1583
1628
  throw error; // Rethrow the error for further handling if necessary
1584
1629
  }
@@ -1609,9 +1654,11 @@ class ChatLLM {
1609
1654
  let html = `
1610
1655
  <div class="chatLLM_message ${
1611
1656
  user == 'User' ? 'chatLLM_message_self' : 'chatLLM_message_other'
1612
- }">
1613
- <${hLevel} class="chatLLM_message_user">${user}</${hLevel}>
1614
- <p class="chatLLM_message_text">${text}</p>
1657
+ }">`;
1658
+ if (text != resources.GetString('processing')) {
1659
+ html += `<${hLevel} class="chatLLM_message_user">${user}</${hLevel}>`;
1660
+ }
1661
+ html += `<p class="chatLLM_message_text">${text}</p>
1615
1662
  </div>
1616
1663
  `;
1617
1664
  // add a copy button to actual messages
@@ -1640,11 +1687,6 @@ class ChatLLM {
1640
1687
  ResetLLM() {
1641
1688
  // clear the main chat history
1642
1689
  document.getElementById('chatLLM_chat_history').innerHTML = '';
1643
- // unhide the more button
1644
- document
1645
- .getElementById('more_suggestions_container')
1646
- .classList.add('hidden');
1647
- document.getElementById('more_suggestions').classList.remove('hidden');
1648
1690
 
1649
1691
  // reset the data
1650
1692
  this.requestJson = null;
@@ -2159,10 +2201,12 @@ class Tracker {
2159
2201
  SaveSettings() {
2160
2202
  // fetch all settings, push to data.settings
2161
2203
  let settings = JSON.parse(localStorage.getItem('settings_data'));
2162
- // don't store their auth keys
2163
- settings.openAIAuthKey = 'hidden';
2164
- settings.geminiAuthKey = 'hidden';
2165
- this.SetData('settings', settings);
2204
+ if (settings) {
2205
+ // don't store their auth keys
2206
+ settings.openAIAuthKey = 'hidden';
2207
+ settings.geminiAuthKey = 'hidden';
2208
+ this.SetData('settings', settings);
2209
+ }
2166
2210
  }
2167
2211
 
2168
2212
  /**
@@ -2308,6 +2352,7 @@ class Tracker {
2308
2352
  constants.plotOrientation == 'vert' ? position.x : position.y;
2309
2353
  let sectionPos =
2310
2354
  constants.plotOrientation == 'vert' ? position.y : position.x;
2355
+ let sectionLabel = plot.sections[sectionPos];
2311
2356
 
2312
2357
  if (!this.isUndefinedOrNull(plot.x_group_label)) {
2313
2358
  x_label = plot.x_group_label;
@@ -2317,42 +2362,26 @@ class Tracker {
2317
2362
  }
2318
2363
  if (constants.plotOrientation == 'vert') {
2319
2364
  if (plotPos > -1 && sectionPos > -1) {
2320
- if (
2321
- !this.isUndefinedOrNull(plot.plotData[plotPos][sectionPos].label)
2322
- ) {
2323
- y_tickmark = plot.plotData[plotPos][sectionPos].label;
2365
+ if (!this.isUndefinedOrNull(sectionLabel)) {
2366
+ y_tickmark = sectionLabel;
2324
2367
  }
2325
2368
  if (!this.isUndefinedOrNull(plot.x_labels[position.x])) {
2326
2369
  x_tickmark = plot.x_labels[position.x];
2327
2370
  }
2328
- if (
2329
- !this.isUndefinedOrNull(plot.plotData[plotPos][sectionPos].values)
2330
- ) {
2331
- value = plot.plotData[plotPos][sectionPos].values;
2332
- } else if (
2333
- !this.isUndefinedOrNull(plot.plotData[plotPos][sectionPos].y)
2334
- ) {
2335
- value = plot.plotData[plotPos][sectionPos].y;
2371
+ if (!this.isUndefinedOrNull(plot.plotData[plotPos][sectionLabel])) {
2372
+ value = plot.plotData[plotPos][sectionLabel];
2336
2373
  }
2337
2374
  }
2338
2375
  } else {
2339
2376
  if (plotPos > -1 && sectionPos > -1) {
2340
- if (
2341
- !this.isUndefinedOrNull(plot.plotData[plotPos][sectionPos].label)
2342
- ) {
2343
- x_tickmark = plot.plotData[plotPos][sectionPos].label;
2377
+ if (!this.isUndefinedOrNull(sectionLabel)) {
2378
+ x_tickmark = sectionLabel;
2344
2379
  }
2345
2380
  if (!this.isUndefinedOrNull(plot.y_labels[position.y])) {
2346
2381
  y_tickmark = plot.y_labels[position.y];
2347
2382
  }
2348
- if (
2349
- !this.isUndefinedOrNull(plot.plotData[plotPos][sectionPos].values)
2350
- ) {
2351
- value = plot.plotData[plotPos][sectionPos].values;
2352
- } else if (
2353
- !this.isUndefinedOrNull(plot.plotData[plotPos][sectionPos].x)
2354
- ) {
2355
- value = plot.plotData[plotPos][sectionPos].x;
2383
+ if (!this.isUndefinedOrNull(plot.plotData[plotPos][sectionLabel])) {
2384
+ value = plot.plotData[plotPos][sectionLabel];
2356
2385
  }
2357
2386
  }
2358
2387
  }