@getlupa/client 1.18.3 → 1.18.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -12621,6 +12621,11 @@ var __async = (__this, __arguments, generator) => {
12621
12621
  }
12622
12622
  return result2;
12623
12623
  };
12624
+ const getSocketClientId = () => {
12625
+ const timestamp = Date.now().toString(36);
12626
+ const randomString = getRandomString(8);
12627
+ return `${timestamp}-${randomString}`;
12628
+ };
12624
12629
  const toFixedIfNecessary = (value, precision = 2) => {
12625
12630
  return (+parseFloat(value).toFixed(precision)).toString();
12626
12631
  };
@@ -12735,6 +12740,10 @@ var __async = (__this, __arguments, generator) => {
12735
12740
  links: {
12736
12741
  searchResults: "/search"
12737
12742
  },
12743
+ voiceSearch: {
12744
+ enabled: false,
12745
+ queryKey: ""
12746
+ },
12738
12747
  panels: [
12739
12748
  {
12740
12749
  type: "suggestion",
@@ -14316,6 +14325,350 @@ var __async = (__this, __arguments, generator) => {
14316
14325
  resetHighlightIndex
14317
14326
  };
14318
14327
  });
14328
+ const Env = {
14329
+ production: "https://api.lupasearch.com/v1/",
14330
+ staging: "https://api.staging.lupasearch.com/v1/"
14331
+ };
14332
+ const VoiceServiceEnv = {
14333
+ production: "ws://voice.lupasearch.com:3001",
14334
+ staging: "ws://voice.lupasearch.dev:3001"
14335
+ };
14336
+ const DEFAULT_REQUEST_CONFIG = {
14337
+ method: "POST",
14338
+ headers: { "Content-Type": "application/json" }
14339
+ };
14340
+ const DEFAULT_HEADERS = DEFAULT_REQUEST_CONFIG.headers;
14341
+ const getVoiceServiceApiUrl = (environment, customVoiceServiceUrl) => {
14342
+ if (customVoiceServiceUrl) {
14343
+ return customVoiceServiceUrl;
14344
+ }
14345
+ return VoiceServiceEnv[environment] || VoiceServiceEnv["production"];
14346
+ };
14347
+ const getApiUrl = (environment, customBaseUrl) => {
14348
+ if (customBaseUrl) {
14349
+ return customBaseUrl;
14350
+ }
14351
+ return Env[environment] || Env["production"];
14352
+ };
14353
+ const _sfc_main$1z = /* @__PURE__ */ defineComponent({
14354
+ __name: "VoiceSearchProgressCircle",
14355
+ props: {
14356
+ isRecording: { type: Boolean },
14357
+ timesliceLimit: {},
14358
+ timeSliceLength: {}
14359
+ },
14360
+ setup(__props, { expose: __expose }) {
14361
+ const props = __props;
14362
+ const progressBar = ref(null);
14363
+ const getProgressBarColor = (progressBarStyle) => {
14364
+ if (!progressBarStyle.backgroundImage.startsWith("conic-gradient")) {
14365
+ return progressBarStyle.backgroundColor;
14366
+ }
14367
+ const colorStops = progressBarStyle.backgroundImage.replace(/conic-gradient\(|\)$/g, "").split(")");
14368
+ if (colorStops.length > 1) {
14369
+ return `${colorStops[0]})`;
14370
+ } else {
14371
+ return progressBarStyle.backgroundColor;
14372
+ }
14373
+ };
14374
+ const startProgressBar = () => {
14375
+ if (!progressBar.value || !props.isRecording) {
14376
+ return;
14377
+ }
14378
+ const duration = props.timesliceLimit * props.timeSliceLength;
14379
+ const progressBarStyle = window.getComputedStyle(progressBar.value);
14380
+ const progressBarColor = getProgressBarColor(progressBarStyle);
14381
+ progressBar.value.style.background = `conic-gradient(${progressBarColor} 0%, transparent 0%)`;
14382
+ let startTime = null;
14383
+ function updateProgress(timestamp) {
14384
+ if (!progressBar.value || !props.isRecording) {
14385
+ return;
14386
+ }
14387
+ if (!startTime)
14388
+ startTime = timestamp;
14389
+ const elapsed = timestamp - startTime;
14390
+ const progress = Math.min(elapsed / duration, 1) * 100;
14391
+ progressBar.value.style.background = `conic-gradient(${progressBarColor} ${progress}%, transparent ${progress}%)`;
14392
+ if (elapsed < duration) {
14393
+ requestAnimationFrame(updateProgress);
14394
+ }
14395
+ }
14396
+ requestAnimationFrame(updateProgress);
14397
+ };
14398
+ const stopProgressBar = () => {
14399
+ if (!progressBar.value) {
14400
+ return;
14401
+ }
14402
+ progressBar.value.style.background = "";
14403
+ };
14404
+ __expose({
14405
+ startProgressBar,
14406
+ stopProgressBar
14407
+ });
14408
+ return (_ctx, _cache) => {
14409
+ return openBlock(), createElementBlock("div", {
14410
+ ref_key: "progressBar",
14411
+ ref: progressBar,
14412
+ class: "lupa-progress-circle"
14413
+ }, null, 512);
14414
+ };
14415
+ }
14416
+ });
14417
+ const buildSocketMessageFrameHeader = (event, payloadLength) => {
14418
+ const headerObj = { event, length: payloadLength };
14419
+ const headerJson = JSON.stringify(headerObj);
14420
+ const headerBytes = new TextEncoder().encode(headerJson);
14421
+ const headerLength = new Uint32Array([headerBytes.length]);
14422
+ const headerLengthBytes = new Uint8Array(headerLength.buffer);
14423
+ const result2 = new Uint8Array(4 + headerBytes.length);
14424
+ result2.set(headerLengthBytes, 0);
14425
+ result2.set(headerBytes, 4);
14426
+ return result2;
14427
+ };
14428
+ function useVoiceRecorder(options) {
14429
+ const socket = ref(null);
14430
+ const mediaStream = ref(null);
14431
+ const mediaRecorder = ref(null);
14432
+ const isRecording = ref(false);
14433
+ const errorRef = ref(null);
14434
+ const transcription = ref("");
14435
+ const timeSliceLength = computed(() => {
14436
+ var _a;
14437
+ return (_a = options.timesliceLength) != null ? _a : 1e3;
14438
+ });
14439
+ onBeforeUnmount(() => {
14440
+ closeSocket();
14441
+ stopRecording();
14442
+ });
14443
+ const initSocket = (url, onMessage) => {
14444
+ socket.value = new WebSocket(url);
14445
+ socket.value.onopen = () => __async2(this, null, function* () {
14446
+ var _a;
14447
+ if (((_a = mediaRecorder.value) == null ? void 0 : _a.state) !== "recording") {
14448
+ yield startRecording();
14449
+ }
14450
+ });
14451
+ socket.value.onmessage = (event) => {
14452
+ const msg = JSON.parse(event.data);
14453
+ if (msg.event === "transcription") {
14454
+ transcription.value = msg.transcription;
14455
+ onMessage == null ? void 0 : onMessage(msg.transcription);
14456
+ stopSocketConnection();
14457
+ } else if (msg.event === "error") {
14458
+ errorRef.value = "Server error during transcription";
14459
+ stopRecording();
14460
+ }
14461
+ };
14462
+ socket.value.onclose = () => {
14463
+ stopRecording();
14464
+ };
14465
+ socket.value.onerror = () => {
14466
+ stopRecording();
14467
+ errorRef.value = "Service connection error";
14468
+ };
14469
+ };
14470
+ const onMediaRecorderDataAvailable = (event) => __async2(this, null, function* () {
14471
+ var _a, _b;
14472
+ if (((_a = mediaRecorder.value) == null ? void 0 : _a.state) !== "recording")
14473
+ return;
14474
+ const audioBuffer = yield event.data.arrayBuffer();
14475
+ const header = buildSocketMessageFrameHeader("audio-chunk", audioBuffer.byteLength);
14476
+ const buffer = new Uint8Array(header.length + audioBuffer.byteLength);
14477
+ buffer.set(header, 0);
14478
+ buffer.set(new Uint8Array(audioBuffer), header.length);
14479
+ (_b = socket.value) == null ? void 0 : _b.send(buffer);
14480
+ });
14481
+ const startRecording = () => __async2(this, null, function* () {
14482
+ mediaStream.value = yield navigator.mediaDevices.getUserMedia({
14483
+ video: false,
14484
+ audio: {
14485
+ channelCount: 1,
14486
+ echoCancellation: true,
14487
+ sampleRate: 16e3
14488
+ }
14489
+ });
14490
+ mediaRecorder.value = new MediaRecorder(mediaStream.value, {
14491
+ mimeType: "audio/webm; codecs=opus"
14492
+ });
14493
+ mediaRecorder.value.ondataavailable = onMediaRecorderDataAvailable;
14494
+ mediaRecorder.value.start(timeSliceLength.value);
14495
+ isRecording.value = true;
14496
+ });
14497
+ const stopRecording = () => {
14498
+ var _a, _b;
14499
+ (_a = mediaRecorder.value) == null ? void 0 : _a.stop();
14500
+ (_b = mediaStream.value) == null ? void 0 : _b.getTracks().forEach((track2) => {
14501
+ track2.stop();
14502
+ });
14503
+ isRecording.value = false;
14504
+ };
14505
+ const stopSocketConnection = () => {
14506
+ if (socket.value && socket.value.readyState === WebSocket.OPEN) {
14507
+ const endHeader = buildSocketMessageFrameHeader("audio-chunk-end", 0);
14508
+ socket.value.send(endHeader);
14509
+ setTimeout(() => {
14510
+ closeSocket();
14511
+ }, 1e3);
14512
+ }
14513
+ };
14514
+ const closeSocket = () => {
14515
+ var _a;
14516
+ (_a = socket.value) == null ? void 0 : _a.close();
14517
+ socket.value = null;
14518
+ };
14519
+ const reset = () => {
14520
+ stopRecording();
14521
+ closeSocket();
14522
+ transcription.value = "";
14523
+ errorRef.value = null;
14524
+ isRecording.value = false;
14525
+ };
14526
+ return {
14527
+ isRecording,
14528
+ transcription,
14529
+ errorRef,
14530
+ initSocket,
14531
+ startRecording,
14532
+ stopRecording,
14533
+ stopSocketConnection,
14534
+ reset,
14535
+ closeSocket
14536
+ };
14537
+ }
14538
+ const _hoisted_1$1l = {
14539
+ key: 0,
14540
+ class: "lupa-dialog-overlay"
14541
+ };
14542
+ const _hoisted_2$W = { class: "lupa-dialog-content" };
14543
+ const _hoisted_3$F = { class: "lupa-listening-text" };
14544
+ const _hoisted_4$v = { class: "lupa-mic-button-wrapper" };
14545
+ const _sfc_main$1y = /* @__PURE__ */ defineComponent({
14546
+ __name: "VoiceSearchDialog",
14547
+ props: {
14548
+ isOpen: { type: Boolean },
14549
+ options: {}
14550
+ },
14551
+ emits: [
14552
+ "close",
14553
+ "transcript-update",
14554
+ "stop-recognize"
14555
+ ],
14556
+ setup(__props, { expose: __expose, emit: emit2 }) {
14557
+ const props = __props;
14558
+ const optionsStore = useOptionsStore();
14559
+ const {
14560
+ isRecording,
14561
+ transcription,
14562
+ errorRef,
14563
+ initSocket,
14564
+ stopSocketConnection,
14565
+ reset
14566
+ } = useVoiceRecorder(props.options);
14567
+ const clientId = ref(null);
14568
+ const voiceSearchProgressBar = ref(null);
14569
+ const timesliceLimit = computed(() => {
14570
+ var _a;
14571
+ return (_a = props.options.timesliceLimit) != null ? _a : 4;
14572
+ });
14573
+ const timeSliceLength = computed(() => {
14574
+ var _a;
14575
+ return (_a = props.options.timesliceLength) != null ? _a : 1e3;
14576
+ });
14577
+ const stopDelay = computed(() => {
14578
+ var _a;
14579
+ return (_a = props.options.stopDelay) != null ? _a : 700;
14580
+ });
14581
+ const labels = computed(() => {
14582
+ var _a;
14583
+ return (_a = props.options.labels) != null ? _a : {};
14584
+ });
14585
+ const description = computed(() => {
14586
+ var _a, _b, _c;
14587
+ if (errorRef.value) {
14588
+ return (_a = labels.value.serviceError) != null ? _a : errorRef.value;
14589
+ }
14590
+ if (!isRecording.value) {
14591
+ return (_b = labels.value.microphoneOff) != null ? _b : "Microphone is off. Try again.";
14592
+ }
14593
+ return (_c = labels.value.listening) != null ? _c : "Listening...";
14594
+ });
14595
+ watch(transcription, (newValue) => {
14596
+ emit2("transcript-update", newValue);
14597
+ });
14598
+ const handleRecordingButtonClick = () => {
14599
+ var _a, _b;
14600
+ if (isRecording.value) {
14601
+ setTimeout(() => {
14602
+ stopSocketConnection();
14603
+ handleOnStopEvent();
14604
+ }, stopDelay.value);
14605
+ return;
14606
+ }
14607
+ const voiceServiceUrl = getVoiceServiceApiUrl(
14608
+ optionsStore.envOptions.environment,
14609
+ props.options.customVoiceServiceUrl
14610
+ );
14611
+ const socketUrl = `${voiceServiceUrl}?clientId=${clientId.value}&queryKey=${props.options.queryKey}&languageCode=${(_a = props.options.language) != null ? _a : "en-US"}&connectionType=write-first`;
14612
+ initSocket(socketUrl);
14613
+ (_b = voiceSearchProgressBar.value) == null ? void 0 : _b.startProgressBar();
14614
+ setTimeout(() => {
14615
+ stopSocketConnection();
14616
+ handleOnStopEvent();
14617
+ }, timesliceLimit.value * timeSliceLength.value);
14618
+ };
14619
+ const handleOnStopEvent = () => {
14620
+ var _a;
14621
+ setTimeout(() => {
14622
+ if (errorRef.value)
14623
+ return;
14624
+ emit2("stop-recognize", transcription.value);
14625
+ }, 1500);
14626
+ (_a = voiceSearchProgressBar.value) == null ? void 0 : _a.stopProgressBar();
14627
+ };
14628
+ onMounted(() => {
14629
+ clientId.value = getSocketClientId();
14630
+ });
14631
+ onBeforeUnmount(() => {
14632
+ clientId.value = null;
14633
+ });
14634
+ const dialogReset = () => {
14635
+ var _a;
14636
+ reset();
14637
+ (_a = voiceSearchProgressBar.value) == null ? void 0 : _a.stopProgressBar();
14638
+ };
14639
+ __expose({
14640
+ handleRecordingButtonClick,
14641
+ reset: dialogReset
14642
+ });
14643
+ return (_ctx, _cache) => {
14644
+ return openBlock(), createElementBlock("div", null, [
14645
+ props.isOpen ? (openBlock(), createElementBlock("div", _hoisted_1$1l, [
14646
+ createBaseVNode("button", {
14647
+ class: "lupa-dialog-box-close-button",
14648
+ onClick: _cache[0] || (_cache[0] = () => emit2("close"))
14649
+ }),
14650
+ createBaseVNode("div", _hoisted_2$W, [
14651
+ createBaseVNode("p", _hoisted_3$F, toDisplayString(description.value), 1),
14652
+ createBaseVNode("div", _hoisted_4$v, [
14653
+ createBaseVNode("button", {
14654
+ class: normalizeClass(["lupa-mic-button", { recording: unref(isRecording) }]),
14655
+ onClick: handleRecordingButtonClick
14656
+ }, null, 2),
14657
+ createVNode(_sfc_main$1z, {
14658
+ ref_key: "voiceSearchProgressBar",
14659
+ ref: voiceSearchProgressBar,
14660
+ class: "lupa-progress-circle",
14661
+ isRecording: unref(isRecording),
14662
+ timesliceLimit: timesliceLimit.value,
14663
+ timeSliceLength: timeSliceLength.value
14664
+ }, null, 8, ["isRecording", "timesliceLimit", "timeSliceLength"])
14665
+ ])
14666
+ ])
14667
+ ])) : createCommentVNode("", true)
14668
+ ]);
14669
+ };
14670
+ }
14671
+ });
14319
14672
  const _hoisted_1$1k = { id: "lupa-search-box-input-container" };
14320
14673
  const _hoisted_2$V = { class: "lupa-input-clear" };
14321
14674
  const _hoisted_3$E = { id: "lupa-search-box-input" };
@@ -14329,6 +14682,7 @@ var __async = (__this, __arguments, generator) => {
14329
14682
  key: 0,
14330
14683
  class: "lupa-close-label"
14331
14684
  };
14685
+ const _hoisted_9$3 = { key: 1 };
14332
14686
  const _sfc_main$1x = /* @__PURE__ */ defineComponent({
14333
14687
  __name: "SearchBoxInput",
14334
14688
  props: {
@@ -14344,6 +14698,8 @@ var __async = (__this, __arguments, generator) => {
14344
14698
  const searchBoxStore = useSearchBoxStore();
14345
14699
  const { query } = storeToRefs(paramStore);
14346
14700
  const mainInput = ref(null);
14701
+ const voiceDialogOverlay = ref(null);
14702
+ const isVoiceDialogOpen = ref(false);
14347
14703
  const emitInputOnFocus = computed(() => {
14348
14704
  var _a;
14349
14705
  return (_a = props.emitInputOnFocus) != null ? _a : true;
@@ -14354,6 +14710,10 @@ var __async = (__this, __arguments, generator) => {
14354
14710
  return (_a = props.suggestedValue) != null ? _a : { value: "", override: false, item: { suggestion: "" } };
14355
14711
  }
14356
14712
  );
14713
+ const isVoiceSearchEnabled = computed(() => {
14714
+ var _a, _b;
14715
+ return (_b = (_a = props.options.voiceSearch) == null ? void 0 : _a.enabled) != null ? _b : false;
14716
+ });
14357
14717
  const labels = computed(() => props.options.labels);
14358
14718
  const input2 = ref("");
14359
14719
  const inputValue = computed({
@@ -14377,6 +14737,12 @@ var __async = (__this, __arguments, generator) => {
14377
14737
  var _a;
14378
14738
  return (_a = labels.value.searchInputAriaLabel) != null ? _a : "Search input";
14379
14739
  });
14740
+ onMounted(() => {
14741
+ document.addEventListener("click", handleClickOutsideVoiceDialogOverlay);
14742
+ });
14743
+ onBeforeUnmount(() => {
14744
+ document.removeEventListener("click", handleClickOutsideVoiceDialogOverlay);
14745
+ });
14380
14746
  watch(suggestedValue, () => {
14381
14747
  if (suggestedValue.value.override) {
14382
14748
  input2.value = suggestedValue.value.item.suggestion;
@@ -14411,6 +14777,37 @@ var __async = (__this, __arguments, generator) => {
14411
14777
  }
14412
14778
  (_a = mainInput == null ? void 0 : mainInput.value) == null ? void 0 : _a.focus();
14413
14779
  };
14780
+ const openVoiceSearchDialog = () => {
14781
+ var _a;
14782
+ isVoiceDialogOpen.value = true;
14783
+ (_a = voiceDialogOverlay.value) == null ? void 0 : _a.handleRecordingButtonClick();
14784
+ };
14785
+ const closeDialog = () => {
14786
+ var _a;
14787
+ isVoiceDialogOpen.value = false;
14788
+ (_a = voiceDialogOverlay.value) == null ? void 0 : _a.reset();
14789
+ };
14790
+ const handleVoiceSearchOutput = (transcription) => {
14791
+ inputValue.value = transcription;
14792
+ handleSubmit();
14793
+ };
14794
+ const stopRecognition = (trascription) => {
14795
+ setTimeout(() => {
14796
+ isVoiceDialogOpen.value = false;
14797
+ handleVoiceSearchOutput(trascription);
14798
+ }, 500);
14799
+ };
14800
+ const handleClickOutsideVoiceDialogOverlay = (event) => {
14801
+ if (event.target.classList.contains("lupa-voice-search-button")) {
14802
+ return;
14803
+ }
14804
+ if (voiceDialogOverlay.value && voiceDialogOverlay.value.$el.contains(event.target)) {
14805
+ return;
14806
+ }
14807
+ if (isVoiceDialogOpen.value) {
14808
+ closeDialog();
14809
+ }
14810
+ };
14414
14811
  __expose({ focus });
14415
14812
  return (_ctx, _cache) => {
14416
14813
  return openBlock(), createElementBlock("div", _hoisted_1$1k, [
@@ -14454,7 +14851,23 @@ var __async = (__this, __arguments, generator) => {
14454
14851
  onClick: _cache[1] || (_cache[1] = ($event) => _ctx.$emit("close"))
14455
14852
  }, [
14456
14853
  labels.value.close ? (openBlock(), createElementBlock("span", _hoisted_8$3, toDisplayString(labels.value.close), 1)) : createCommentVNode("", true)
14457
- ])) : createCommentVNode("", true)
14854
+ ])) : createCommentVNode("", true),
14855
+ isVoiceSearchEnabled.value ? (openBlock(), createElementBlock("div", _hoisted_9$3, [
14856
+ createBaseVNode("button", {
14857
+ onClick: openVoiceSearchDialog,
14858
+ class: "lupa-voice-search-button"
14859
+ })
14860
+ ])) : createCommentVNode("", true),
14861
+ isVoiceSearchEnabled.value ? (openBlock(), createBlock(_sfc_main$1y, {
14862
+ key: 2,
14863
+ ref_key: "voiceDialogOverlay",
14864
+ ref: voiceDialogOverlay,
14865
+ isOpen: isVoiceDialogOpen.value,
14866
+ options: props.options.voiceSearch,
14867
+ onClose: closeDialog,
14868
+ onTranscriptUpdate: handleVoiceSearchOutput,
14869
+ onStopRecognize: stopRecognition
14870
+ }, null, 8, ["isOpen", "options"])) : createCommentVNode("", true)
14458
14871
  ]);
14459
14872
  };
14460
14873
  }
@@ -25993,7 +26406,8 @@ and ensure you are accounting for this risk.
25993
26406
  "labels",
25994
26407
  "links",
25995
26408
  "inputAttributes",
25996
- "showSubmitButton"
26409
+ "showSubmitButton",
26410
+ "voiceSearch"
25997
26411
  ])
25998
26412
  );
25999
26413
  const panelOptions = computed(
@@ -26577,22 +26991,42 @@ and ensure you are accounting for this risk.
26577
26991
  emits: ["remove"],
26578
26992
  setup(__props, { emit: emit2 }) {
26579
26993
  const props = __props;
26580
- const facetKeyClass = computed(() => {
26581
- return `lupa-facet-active-filter-${props.filter.key}`;
26994
+ const facetKeyClass = computed(() => `lupa-facet-active-filter-${props.filter.key}`);
26995
+ const { searchResultOptions } = storeToRefs(useOptionsStore());
26996
+ const units = computed(() => {
26997
+ var _a, _b, _c, _d, _e;
26998
+ return (_e = (_d = (_c = (_b = (_a = searchResultOptions.value) == null ? void 0 : _a.filters) == null ? void 0 : _b.facets) == null ? void 0 : _c.stats) == null ? void 0 : _d.units) != null ? _e : {};
26582
26999
  });
26583
- const handleClick = () => {
27000
+ function handleClick() {
26584
27001
  emit2("remove", { filter: props.filter });
26585
- };
27002
+ }
27003
+ function formatFilterValue(filter2) {
27004
+ const unit = units.value[filter2.key] || "";
27005
+ let min, max;
27006
+ if (Array.isArray(filter2.value)) {
27007
+ [min, max] = filter2.value.map(String);
27008
+ } else if (typeof filter2.value === "string" && filter2.value.includes("-")) {
27009
+ const parts = filter2.value.split("-").map((s) => s.trim());
27010
+ if (parts.length === 2)
27011
+ [min, max] = parts;
27012
+ }
27013
+ if (min != null && max != null) {
27014
+ return `${min} ${unit} – ${max} ${unit}`;
27015
+ }
27016
+ return `${filter2.value} ${unit}`.trim();
27017
+ }
26586
27018
  return (_ctx, _cache) => {
26587
27019
  return openBlock(), createElementBlock("div", {
26588
- class: normalizeClass(["lupa-search-result-filter-value", { [facetKeyClass.value]: true }])
27020
+ class: normalizeClass(["lupa-search-result-filter-value", [facetKeyClass.value]]),
27021
+ "data-cy": "lupa-current-filter-item"
26589
27022
  }, [
26590
27023
  createBaseVNode("div", {
26591
27024
  class: "lupa-current-filter-action",
26592
- onClick: handleClick
26593
- }, ""),
27025
+ onClick: handleClick,
27026
+ "aria-label": "Remove filter"
27027
+ }, " ⨉ "),
26594
27028
  createBaseVNode("div", _hoisted_1$U, toDisplayString(_ctx.filter.label) + ": ", 1),
26595
- createBaseVNode("div", _hoisted_2$F, toDisplayString(_ctx.filter.value), 1)
27029
+ createBaseVNode("div", _hoisted_2$F, toDisplayString(formatFilterValue(props.filter)), 1)
26596
27030
  ], 2);
26597
27031
  };
26598
27032
  }
@@ -26614,6 +27048,14 @@ and ensure you are accounting for this risk.
26614
27048
  expandable: { type: Boolean }
26615
27049
  },
26616
27050
  setup(__props) {
27051
+ const optionsStore = useOptionsStore();
27052
+ const { searchResultOptions } = storeToRefs(optionsStore);
27053
+ const units = computed(
27054
+ () => {
27055
+ var _a, _b, _c, _d, _e;
27056
+ return (_e = (_d = (_c = (_b = (_a = searchResultOptions == null ? void 0 : searchResultOptions.value) == null ? void 0 : _a.filters) == null ? void 0 : _b.facets) == null ? void 0 : _c.stats) == null ? void 0 : _d.units) != null ? _e : {};
27057
+ }
27058
+ );
26617
27059
  const isOpen = ref(false);
26618
27060
  const paramsStore = useParamsStore();
26619
27061
  const optionStore = useOptionsStore();
@@ -26695,8 +27137,9 @@ and ensure you are accounting for this risk.
26695
27137
  return openBlock(), createBlock(_sfc_main$_, {
26696
27138
  key: filter2.key + "_" + filter2.value,
26697
27139
  filter: filter2,
27140
+ units: units.value,
26698
27141
  onRemove: handleRemove
26699
- }, null, 8, ["filter"]);
27142
+ }, null, 8, ["filter", "units"]);
26700
27143
  }), 128))
26701
27144
  ]),
26702
27145
  createBaseVNode("div", {
@@ -28020,15 +28463,17 @@ and ensure you are accounting for this risk.
28020
28463
  const _hoisted_5$d = { class: "lupa-stats-from" };
28021
28464
  const _hoisted_6$7 = ["max", "min", "pattern", "aria-label"];
28022
28465
  const _hoisted_7$5 = { key: 0 };
28023
- const _hoisted_8$1 = /* @__PURE__ */ createBaseVNode("div", { class: "lupa-stats-separator" }, null, -1);
28024
- const _hoisted_9$1 = {
28466
+ const _hoisted_8$1 = { key: 1 };
28467
+ const _hoisted_9$1 = /* @__PURE__ */ createBaseVNode("div", { class: "lupa-stats-separator" }, null, -1);
28468
+ const _hoisted_10 = {
28025
28469
  key: 0,
28026
28470
  class: "lupa-stats-range-label"
28027
28471
  };
28028
- const _hoisted_10 = { class: "lupa-stats-to" };
28029
- const _hoisted_11 = ["max", "min", "pattern", "aria-label"];
28030
- const _hoisted_12 = { key: 0 };
28031
- const _hoisted_13 = {
28472
+ const _hoisted_11 = { class: "lupa-stats-to" };
28473
+ const _hoisted_12 = ["max", "min", "pattern", "aria-label"];
28474
+ const _hoisted_13 = { key: 0 };
28475
+ const _hoisted_14 = { key: 1 };
28476
+ const _hoisted_15 = {
28032
28477
  key: 2,
28033
28478
  class: "lupa-stats-slider-wrapper"
28034
28479
  };
@@ -28095,7 +28540,7 @@ and ensure you are accounting for this risk.
28095
28540
  if (!value || value > facetMax.value) {
28096
28541
  return;
28097
28542
  }
28098
- innerSliderRange.value = [value, sliderRange.value[1]];
28543
+ innerSliderRange.value = [sliderRange.value[1], value];
28099
28544
  handleInputChange();
28100
28545
  }
28101
28546
  });
@@ -28151,7 +28596,18 @@ and ensure you are accounting for this risk.
28151
28596
  });
28152
28597
  const statsSummary = computed(() => {
28153
28598
  const [min, max] = sliderRange.value;
28154
- return isPrice.value ? formatPriceSummary([min, max], currency.value, separator.value, currencyTemplate.value) : formatRange({ gte: min, lte: max });
28599
+ if (isPrice.value) {
28600
+ return formatPriceSummary(
28601
+ [min, max],
28602
+ currency.value,
28603
+ separator.value,
28604
+ currencyTemplate.value
28605
+ );
28606
+ }
28607
+ if (unit.value) {
28608
+ return `${min} ${unit.value} - ${max} ${unit.value}`;
28609
+ }
28610
+ return formatRange({ gte: min, lte: max });
28155
28611
  });
28156
28612
  const separator = computed(() => {
28157
28613
  var _a, _b, _c;
@@ -28212,6 +28668,12 @@ and ensure you are accounting for this risk.
28212
28668
  const handleDragging = (value) => {
28213
28669
  innerSliderRange.value = value;
28214
28670
  };
28671
+ const unit = computed(
28672
+ () => {
28673
+ var _a, _b, _c, _d, _e;
28674
+ return (_e = (_d = (_a = props.options.stats) == null ? void 0 : _a.units) == null ? void 0 : _d[(_c = (_b = props.facet) == null ? void 0 : _b.key) != null ? _c : ""]) != null ? _e : "";
28675
+ }
28676
+ );
28215
28677
  return (_ctx, _cache) => {
28216
28678
  return openBlock(), createElementBlock("div", _hoisted_1$P, [
28217
28679
  !isInputVisible.value ? (openBlock(), createElementBlock("div", _hoisted_2$B, toDisplayString(statsSummary.value), 1)) : (openBlock(), createElementBlock("div", _hoisted_3$s, [
@@ -28234,13 +28696,14 @@ and ensure you are accounting for this risk.
28234
28696
  { lazy: true }
28235
28697
  ]
28236
28698
  ]),
28237
- isPrice.value ? (openBlock(), createElementBlock("span", _hoisted_7$5, toDisplayString(currency.value), 1)) : createCommentVNode("", true)
28699
+ isPrice.value ? (openBlock(), createElementBlock("span", _hoisted_7$5, toDisplayString(currency.value), 1)) : createCommentVNode("", true),
28700
+ unit.value ? (openBlock(), createElementBlock("span", _hoisted_8$1, toDisplayString(unit.value), 1)) : createCommentVNode("", true)
28238
28701
  ])
28239
28702
  ]),
28240
- _hoisted_8$1,
28703
+ _hoisted_9$1,
28241
28704
  createBaseVNode("div", null, [
28242
- rangeLabelTo.value ? (openBlock(), createElementBlock("div", _hoisted_9$1, toDisplayString(rangeLabelTo.value), 1)) : createCommentVNode("", true),
28243
- createBaseVNode("div", _hoisted_10, [
28705
+ rangeLabelTo.value ? (openBlock(), createElementBlock("div", _hoisted_10, toDisplayString(rangeLabelTo.value), 1)) : createCommentVNode("", true),
28706
+ createBaseVNode("div", _hoisted_11, [
28244
28707
  withDirectives(createBaseVNode("input", {
28245
28708
  "onUpdate:modelValue": _cache[1] || (_cache[1] = ($event) => toValue.value = $event),
28246
28709
  type: "text",
@@ -28249,7 +28712,7 @@ and ensure you are accounting for this risk.
28249
28712
  min: facetMin.value,
28250
28713
  pattern: sliderInputFormat.value,
28251
28714
  "aria-label": ariaLabelTo.value
28252
- }, null, 8, _hoisted_11), [
28715
+ }, null, 8, _hoisted_12), [
28253
28716
  [
28254
28717
  vModelText,
28255
28718
  toValue.value,
@@ -28257,11 +28720,12 @@ and ensure you are accounting for this risk.
28257
28720
  { lazy: true }
28258
28721
  ]
28259
28722
  ]),
28260
- isPrice.value ? (openBlock(), createElementBlock("span", _hoisted_12, toDisplayString(currency.value), 1)) : createCommentVNode("", true)
28723
+ isPrice.value ? (openBlock(), createElementBlock("span", _hoisted_13, toDisplayString(currency.value), 1)) : createCommentVNode("", true),
28724
+ unit.value ? (openBlock(), createElementBlock("span", _hoisted_14, toDisplayString(unit.value), 1)) : createCommentVNode("", true)
28261
28725
  ])
28262
28726
  ])
28263
28727
  ])),
28264
- isSliderVisible.value ? (openBlock(), createElementBlock("div", _hoisted_13, [
28728
+ isSliderVisible.value ? (openBlock(), createElementBlock("div", _hoisted_15, [
28265
28729
  createVNode(unref(m), {
28266
28730
  class: "slider",
28267
28731
  tooltips: false,
@@ -33164,21 +33628,6 @@ and ensure you are accounting for this risk.
33164
33628
  };
33165
33629
  }
33166
33630
  });
33167
- const Env = {
33168
- production: "https://api.lupasearch.com/v1/",
33169
- staging: "https://api.staging.lupasearch.com/v1/"
33170
- };
33171
- const DEFAULT_REQUEST_CONFIG = {
33172
- method: "POST",
33173
- headers: { "Content-Type": "application/json" }
33174
- };
33175
- const DEFAULT_HEADERS = DEFAULT_REQUEST_CONFIG.headers;
33176
- const getApiUrl = (environment, customBaseUrl) => {
33177
- if (customBaseUrl) {
33178
- return customBaseUrl;
33179
- }
33180
- return Env[environment] || Env["production"];
33181
- };
33182
33631
  const suggestSearchChatPhrases = (options, request, chatSettings) => __async2(void 0, null, function* () {
33183
33632
  var _a, _b, _c;
33184
33633
  const { environment, customBaseUrl } = options;
@@ -33685,7 +34134,7 @@ and ensure you are accounting for this risk.
33685
34134
  key: 0,
33686
34135
  class: "lupasearch-chat-content"
33687
34136
  };
33688
- const _sfc_main$1y = /* @__PURE__ */ defineComponent({
34137
+ const _sfc_main$1A = /* @__PURE__ */ defineComponent({
33689
34138
  __name: "ChatContainer",
33690
34139
  props: {
33691
34140
  options: {}
@@ -40339,7 +40788,7 @@ and ensure you are accounting for this risk.
40339
40788
  const instance = createVue(
40340
40789
  options.displayOptions.containerSelector,
40341
40790
  mountOptions == null ? void 0 : mountOptions.mountingBehavior,
40342
- _sfc_main$1y,
40791
+ _sfc_main$1A,
40343
40792
  {
40344
40793
  options
40345
40794
  }