@getlupa/client 1.18.3 → 1.18.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -12619,6 +12619,11 @@ var __async = (__this, __arguments, generator) => {
12619
12619
  }
12620
12620
  return result2;
12621
12621
  };
12622
+ const getSocketClientId = () => {
12623
+ const timestamp = Date.now().toString(36);
12624
+ const randomString = getRandomString(8);
12625
+ return `${timestamp}-${randomString}`;
12626
+ };
12622
12627
  const toFixedIfNecessary = (value, precision = 2) => {
12623
12628
  return (+parseFloat(value).toFixed(precision)).toString();
12624
12629
  };
@@ -12733,6 +12738,10 @@ var __async = (__this, __arguments, generator) => {
12733
12738
  links: {
12734
12739
  searchResults: "/search"
12735
12740
  },
12741
+ voiceSearch: {
12742
+ enabled: false,
12743
+ queryKey: ""
12744
+ },
12736
12745
  panels: [
12737
12746
  {
12738
12747
  type: "suggestion",
@@ -14314,6 +14323,350 @@ var __async = (__this, __arguments, generator) => {
14314
14323
  resetHighlightIndex
14315
14324
  };
14316
14325
  });
14326
+ const Env = {
14327
+ production: "https://api.lupasearch.com/v1/",
14328
+ staging: "https://api.staging.lupasearch.com/v1/"
14329
+ };
14330
+ const VoiceServiceEnv = {
14331
+ production: "ws://voice.lupasearch.com:3001",
14332
+ staging: "ws://voice.lupasearch.dev:3001"
14333
+ };
14334
+ const DEFAULT_REQUEST_CONFIG = {
14335
+ method: "POST",
14336
+ headers: { "Content-Type": "application/json" }
14337
+ };
14338
+ const DEFAULT_HEADERS = DEFAULT_REQUEST_CONFIG.headers;
14339
+ const getVoiceServiceApiUrl = (environment, customVoiceServiceUrl) => {
14340
+ if (customVoiceServiceUrl) {
14341
+ return customVoiceServiceUrl;
14342
+ }
14343
+ return VoiceServiceEnv[environment] || VoiceServiceEnv["production"];
14344
+ };
14345
+ const getApiUrl = (environment, customBaseUrl) => {
14346
+ if (customBaseUrl) {
14347
+ return customBaseUrl;
14348
+ }
14349
+ return Env[environment] || Env["production"];
14350
+ };
14351
+ const _sfc_main$1z = /* @__PURE__ */ defineComponent({
14352
+ __name: "VoiceSearchProgressCircle",
14353
+ props: {
14354
+ isRecording: { type: Boolean },
14355
+ timesliceLimit: {},
14356
+ timeSliceLength: {}
14357
+ },
14358
+ setup(__props, { expose: __expose }) {
14359
+ const props = __props;
14360
+ const progressBar = ref(null);
14361
+ const getProgressBarColor = (progressBarStyle) => {
14362
+ if (!progressBarStyle.backgroundImage.startsWith("conic-gradient")) {
14363
+ return progressBarStyle.backgroundColor;
14364
+ }
14365
+ const colorStops = progressBarStyle.backgroundImage.replace(/conic-gradient\(|\)$/g, "").split(")");
14366
+ if (colorStops.length > 1) {
14367
+ return `${colorStops[0]})`;
14368
+ } else {
14369
+ return progressBarStyle.backgroundColor;
14370
+ }
14371
+ };
14372
+ const startProgressBar = () => {
14373
+ if (!progressBar.value || !props.isRecording) {
14374
+ return;
14375
+ }
14376
+ const duration = props.timesliceLimit * props.timeSliceLength;
14377
+ const progressBarStyle = window.getComputedStyle(progressBar.value);
14378
+ const progressBarColor = getProgressBarColor(progressBarStyle);
14379
+ progressBar.value.style.background = `conic-gradient(${progressBarColor} 0%, transparent 0%)`;
14380
+ let startTime = null;
14381
+ function updateProgress(timestamp) {
14382
+ if (!progressBar.value || !props.isRecording) {
14383
+ return;
14384
+ }
14385
+ if (!startTime)
14386
+ startTime = timestamp;
14387
+ const elapsed = timestamp - startTime;
14388
+ const progress = Math.min(elapsed / duration, 1) * 100;
14389
+ progressBar.value.style.background = `conic-gradient(${progressBarColor} ${progress}%, transparent ${progress}%)`;
14390
+ if (elapsed < duration) {
14391
+ requestAnimationFrame(updateProgress);
14392
+ }
14393
+ }
14394
+ requestAnimationFrame(updateProgress);
14395
+ };
14396
+ const stopProgressBar = () => {
14397
+ if (!progressBar.value) {
14398
+ return;
14399
+ }
14400
+ progressBar.value.style.background = "";
14401
+ };
14402
+ __expose({
14403
+ startProgressBar,
14404
+ stopProgressBar
14405
+ });
14406
+ return (_ctx, _cache) => {
14407
+ return openBlock(), createElementBlock("div", {
14408
+ ref_key: "progressBar",
14409
+ ref: progressBar,
14410
+ class: "lupa-progress-circle"
14411
+ }, null, 512);
14412
+ };
14413
+ }
14414
+ });
14415
+ const buildSocketMessageFrameHeader = (event, payloadLength) => {
14416
+ const headerObj = { event, length: payloadLength };
14417
+ const headerJson = JSON.stringify(headerObj);
14418
+ const headerBytes = new TextEncoder().encode(headerJson);
14419
+ const headerLength = new Uint32Array([headerBytes.length]);
14420
+ const headerLengthBytes = new Uint8Array(headerLength.buffer);
14421
+ const result2 = new Uint8Array(4 + headerBytes.length);
14422
+ result2.set(headerLengthBytes, 0);
14423
+ result2.set(headerBytes, 4);
14424
+ return result2;
14425
+ };
14426
+ function useVoiceRecorder(options) {
14427
+ const socket = ref(null);
14428
+ const mediaStream = ref(null);
14429
+ const mediaRecorder = ref(null);
14430
+ const isRecording = ref(false);
14431
+ const errorRef = ref(null);
14432
+ const transcription = ref("");
14433
+ const timeSliceLength = computed(() => {
14434
+ var _a;
14435
+ return (_a = options.timesliceLength) != null ? _a : 1e3;
14436
+ });
14437
+ onBeforeUnmount(() => {
14438
+ closeSocket();
14439
+ stopRecording();
14440
+ });
14441
+ const initSocket = (url, onMessage) => {
14442
+ socket.value = new WebSocket(url);
14443
+ socket.value.onopen = () => __async2(this, null, function* () {
14444
+ var _a;
14445
+ if (((_a = mediaRecorder.value) == null ? void 0 : _a.state) !== "recording") {
14446
+ yield startRecording();
14447
+ }
14448
+ });
14449
+ socket.value.onmessage = (event) => {
14450
+ const msg = JSON.parse(event.data);
14451
+ if (msg.event === "transcription") {
14452
+ transcription.value = msg.transcription;
14453
+ onMessage == null ? void 0 : onMessage(msg.transcription);
14454
+ stopSocketConnection();
14455
+ } else if (msg.event === "error") {
14456
+ errorRef.value = "Server error during transcription";
14457
+ stopRecording();
14458
+ }
14459
+ };
14460
+ socket.value.onclose = () => {
14461
+ stopRecording();
14462
+ };
14463
+ socket.value.onerror = () => {
14464
+ stopRecording();
14465
+ errorRef.value = "Service connection error";
14466
+ };
14467
+ };
14468
+ const onMediaRecorderDataAvailable = (event) => __async2(this, null, function* () {
14469
+ var _a, _b;
14470
+ if (((_a = mediaRecorder.value) == null ? void 0 : _a.state) !== "recording")
14471
+ return;
14472
+ const audioBuffer = yield event.data.arrayBuffer();
14473
+ const header = buildSocketMessageFrameHeader("audio-chunk", audioBuffer.byteLength);
14474
+ const buffer = new Uint8Array(header.length + audioBuffer.byteLength);
14475
+ buffer.set(header, 0);
14476
+ buffer.set(new Uint8Array(audioBuffer), header.length);
14477
+ (_b = socket.value) == null ? void 0 : _b.send(buffer);
14478
+ });
14479
+ const startRecording = () => __async2(this, null, function* () {
14480
+ mediaStream.value = yield navigator.mediaDevices.getUserMedia({
14481
+ video: false,
14482
+ audio: {
14483
+ channelCount: 1,
14484
+ echoCancellation: true,
14485
+ sampleRate: 16e3
14486
+ }
14487
+ });
14488
+ mediaRecorder.value = new MediaRecorder(mediaStream.value, {
14489
+ mimeType: "audio/webm; codecs=opus"
14490
+ });
14491
+ mediaRecorder.value.ondataavailable = onMediaRecorderDataAvailable;
14492
+ mediaRecorder.value.start(timeSliceLength.value);
14493
+ isRecording.value = true;
14494
+ });
14495
+ const stopRecording = () => {
14496
+ var _a, _b;
14497
+ (_a = mediaRecorder.value) == null ? void 0 : _a.stop();
14498
+ (_b = mediaStream.value) == null ? void 0 : _b.getTracks().forEach((track2) => {
14499
+ track2.stop();
14500
+ });
14501
+ isRecording.value = false;
14502
+ };
14503
+ const stopSocketConnection = () => {
14504
+ if (socket.value && socket.value.readyState === WebSocket.OPEN) {
14505
+ const endHeader = buildSocketMessageFrameHeader("audio-chunk-end", 0);
14506
+ socket.value.send(endHeader);
14507
+ setTimeout(() => {
14508
+ closeSocket();
14509
+ }, 1e3);
14510
+ }
14511
+ };
14512
+ const closeSocket = () => {
14513
+ var _a;
14514
+ (_a = socket.value) == null ? void 0 : _a.close();
14515
+ socket.value = null;
14516
+ };
14517
+ const reset = () => {
14518
+ stopRecording();
14519
+ closeSocket();
14520
+ transcription.value = "";
14521
+ errorRef.value = null;
14522
+ isRecording.value = false;
14523
+ };
14524
+ return {
14525
+ isRecording,
14526
+ transcription,
14527
+ errorRef,
14528
+ initSocket,
14529
+ startRecording,
14530
+ stopRecording,
14531
+ stopSocketConnection,
14532
+ reset,
14533
+ closeSocket
14534
+ };
14535
+ }
14536
+ const _hoisted_1$1l = {
14537
+ key: 0,
14538
+ class: "lupa-dialog-overlay"
14539
+ };
14540
+ const _hoisted_2$W = { class: "lupa-dialog-content" };
14541
+ const _hoisted_3$F = { class: "lupa-listening-text" };
14542
+ const _hoisted_4$v = { class: "lupa-mic-button-wrapper" };
14543
+ const _sfc_main$1y = /* @__PURE__ */ defineComponent({
14544
+ __name: "VoiceSearchDialog",
14545
+ props: {
14546
+ isOpen: { type: Boolean },
14547
+ options: {}
14548
+ },
14549
+ emits: [
14550
+ "close",
14551
+ "transcript-update",
14552
+ "stop-recognize"
14553
+ ],
14554
+ setup(__props, { expose: __expose, emit: emit2 }) {
14555
+ const props = __props;
14556
+ const optionsStore = useOptionsStore();
14557
+ const {
14558
+ isRecording,
14559
+ transcription,
14560
+ errorRef,
14561
+ initSocket,
14562
+ stopSocketConnection,
14563
+ reset
14564
+ } = useVoiceRecorder(props.options);
14565
+ const clientId = ref(null);
14566
+ const voiceSearchProgressBar = ref(null);
14567
+ const timesliceLimit = computed(() => {
14568
+ var _a;
14569
+ return (_a = props.options.timesliceLimit) != null ? _a : 4;
14570
+ });
14571
+ const timeSliceLength = computed(() => {
14572
+ var _a;
14573
+ return (_a = props.options.timesliceLength) != null ? _a : 1e3;
14574
+ });
14575
+ const stopDelay = computed(() => {
14576
+ var _a;
14577
+ return (_a = props.options.stopDelay) != null ? _a : 700;
14578
+ });
14579
+ const labels = computed(() => {
14580
+ var _a;
14581
+ return (_a = props.options.labels) != null ? _a : {};
14582
+ });
14583
+ const description = computed(() => {
14584
+ var _a, _b, _c;
14585
+ if (errorRef.value) {
14586
+ return (_a = labels.value.serviceError) != null ? _a : errorRef.value;
14587
+ }
14588
+ if (!isRecording.value) {
14589
+ return (_b = labels.value.microphoneOff) != null ? _b : "Microphone is off. Try again.";
14590
+ }
14591
+ return (_c = labels.value.listening) != null ? _c : "Listening...";
14592
+ });
14593
+ watch(transcription, (newValue) => {
14594
+ emit2("transcript-update", newValue);
14595
+ });
14596
+ const handleRecordingButtonClick = () => {
14597
+ var _a, _b;
14598
+ if (isRecording.value) {
14599
+ setTimeout(() => {
14600
+ stopSocketConnection();
14601
+ handleOnStopEvent();
14602
+ }, stopDelay.value);
14603
+ return;
14604
+ }
14605
+ const voiceServiceUrl = getVoiceServiceApiUrl(
14606
+ optionsStore.envOptions.environment,
14607
+ props.options.customVoiceServiceUrl
14608
+ );
14609
+ const socketUrl = `${voiceServiceUrl}?clientId=${clientId.value}&queryKey=${props.options.queryKey}&languageCode=${(_a = props.options.language) != null ? _a : "en-US"}&connectionType=write-first`;
14610
+ initSocket(socketUrl);
14611
+ (_b = voiceSearchProgressBar.value) == null ? void 0 : _b.startProgressBar();
14612
+ setTimeout(() => {
14613
+ stopSocketConnection();
14614
+ handleOnStopEvent();
14615
+ }, timesliceLimit.value * timeSliceLength.value);
14616
+ };
14617
+ const handleOnStopEvent = () => {
14618
+ var _a;
14619
+ setTimeout(() => {
14620
+ if (errorRef.value)
14621
+ return;
14622
+ emit2("stop-recognize", transcription.value);
14623
+ }, 1500);
14624
+ (_a = voiceSearchProgressBar.value) == null ? void 0 : _a.stopProgressBar();
14625
+ };
14626
+ onMounted(() => {
14627
+ clientId.value = getSocketClientId();
14628
+ });
14629
+ onBeforeUnmount(() => {
14630
+ clientId.value = null;
14631
+ });
14632
+ const dialogReset = () => {
14633
+ var _a;
14634
+ reset();
14635
+ (_a = voiceSearchProgressBar.value) == null ? void 0 : _a.stopProgressBar();
14636
+ };
14637
+ __expose({
14638
+ handleRecordingButtonClick,
14639
+ reset: dialogReset
14640
+ });
14641
+ return (_ctx, _cache) => {
14642
+ return openBlock(), createElementBlock("div", null, [
14643
+ props.isOpen ? (openBlock(), createElementBlock("div", _hoisted_1$1l, [
14644
+ createBaseVNode("button", {
14645
+ class: "lupa-dialog-box-close-button",
14646
+ onClick: _cache[0] || (_cache[0] = () => emit2("close"))
14647
+ }),
14648
+ createBaseVNode("div", _hoisted_2$W, [
14649
+ createBaseVNode("p", _hoisted_3$F, toDisplayString(description.value), 1),
14650
+ createBaseVNode("div", _hoisted_4$v, [
14651
+ createBaseVNode("button", {
14652
+ class: normalizeClass(["lupa-mic-button", { recording: unref(isRecording) }]),
14653
+ onClick: handleRecordingButtonClick
14654
+ }, null, 2),
14655
+ createVNode(_sfc_main$1z, {
14656
+ ref_key: "voiceSearchProgressBar",
14657
+ ref: voiceSearchProgressBar,
14658
+ class: "lupa-progress-circle",
14659
+ isRecording: unref(isRecording),
14660
+ timesliceLimit: timesliceLimit.value,
14661
+ timeSliceLength: timeSliceLength.value
14662
+ }, null, 8, ["isRecording", "timesliceLimit", "timeSliceLength"])
14663
+ ])
14664
+ ])
14665
+ ])) : createCommentVNode("", true)
14666
+ ]);
14667
+ };
14668
+ }
14669
+ });
14317
14670
  const _hoisted_1$1k = { id: "lupa-search-box-input-container" };
14318
14671
  const _hoisted_2$V = { class: "lupa-input-clear" };
14319
14672
  const _hoisted_3$E = { id: "lupa-search-box-input" };
@@ -14327,6 +14680,7 @@ var __async = (__this, __arguments, generator) => {
14327
14680
  key: 0,
14328
14681
  class: "lupa-close-label"
14329
14682
  };
14683
+ const _hoisted_9$3 = { key: 1 };
14330
14684
  const _sfc_main$1x = /* @__PURE__ */ defineComponent({
14331
14685
  __name: "SearchBoxInput",
14332
14686
  props: {
@@ -14342,6 +14696,8 @@ var __async = (__this, __arguments, generator) => {
14342
14696
  const searchBoxStore = useSearchBoxStore();
14343
14697
  const { query } = storeToRefs(paramStore);
14344
14698
  const mainInput = ref(null);
14699
+ const voiceDialogOverlay = ref(null);
14700
+ const isVoiceDialogOpen = ref(false);
14345
14701
  const emitInputOnFocus = computed(() => {
14346
14702
  var _a;
14347
14703
  return (_a = props.emitInputOnFocus) != null ? _a : true;
@@ -14352,6 +14708,10 @@ var __async = (__this, __arguments, generator) => {
14352
14708
  return (_a = props.suggestedValue) != null ? _a : { value: "", override: false, item: { suggestion: "" } };
14353
14709
  }
14354
14710
  );
14711
+ const isVoiceSearchEnabled = computed(() => {
14712
+ var _a, _b;
14713
+ return (_b = (_a = props.options.voiceSearch) == null ? void 0 : _a.enabled) != null ? _b : false;
14714
+ });
14355
14715
  const labels = computed(() => props.options.labels);
14356
14716
  const input2 = ref("");
14357
14717
  const inputValue = computed({
@@ -14375,6 +14735,12 @@ var __async = (__this, __arguments, generator) => {
14375
14735
  var _a;
14376
14736
  return (_a = labels.value.searchInputAriaLabel) != null ? _a : "Search input";
14377
14737
  });
14738
+ onMounted(() => {
14739
+ document.addEventListener("click", handleClickOutsideVoiceDialogOverlay);
14740
+ });
14741
+ onBeforeUnmount(() => {
14742
+ document.removeEventListener("click", handleClickOutsideVoiceDialogOverlay);
14743
+ });
14378
14744
  watch(suggestedValue, () => {
14379
14745
  if (suggestedValue.value.override) {
14380
14746
  input2.value = suggestedValue.value.item.suggestion;
@@ -14409,6 +14775,37 @@ var __async = (__this, __arguments, generator) => {
14409
14775
  }
14410
14776
  (_a = mainInput == null ? void 0 : mainInput.value) == null ? void 0 : _a.focus();
14411
14777
  };
14778
+ const openVoiceSearchDialog = () => {
14779
+ var _a;
14780
+ isVoiceDialogOpen.value = true;
14781
+ (_a = voiceDialogOverlay.value) == null ? void 0 : _a.handleRecordingButtonClick();
14782
+ };
14783
+ const closeDialog = () => {
14784
+ var _a;
14785
+ isVoiceDialogOpen.value = false;
14786
+ (_a = voiceDialogOverlay.value) == null ? void 0 : _a.reset();
14787
+ };
14788
+ const handleVoiceSearchOutput = (transcription) => {
14789
+ inputValue.value = transcription;
14790
+ handleSubmit();
14791
+ };
14792
+ const stopRecognition = (trascription) => {
14793
+ setTimeout(() => {
14794
+ isVoiceDialogOpen.value = false;
14795
+ handleVoiceSearchOutput(trascription);
14796
+ }, 500);
14797
+ };
14798
+ const handleClickOutsideVoiceDialogOverlay = (event) => {
14799
+ if (event.target.classList.contains("lupa-voice-search-button")) {
14800
+ return;
14801
+ }
14802
+ if (voiceDialogOverlay.value && voiceDialogOverlay.value.$el.contains(event.target)) {
14803
+ return;
14804
+ }
14805
+ if (isVoiceDialogOpen.value) {
14806
+ closeDialog();
14807
+ }
14808
+ };
14412
14809
  __expose({ focus });
14413
14810
  return (_ctx, _cache) => {
14414
14811
  return openBlock(), createElementBlock("div", _hoisted_1$1k, [
@@ -14452,7 +14849,23 @@ var __async = (__this, __arguments, generator) => {
14452
14849
  onClick: _cache[1] || (_cache[1] = ($event) => _ctx.$emit("close"))
14453
14850
  }, [
14454
14851
  labels.value.close ? (openBlock(), createElementBlock("span", _hoisted_8$3, toDisplayString(labels.value.close), 1)) : createCommentVNode("", true)
14455
- ])) : createCommentVNode("", true)
14852
+ ])) : createCommentVNode("", true),
14853
+ isVoiceSearchEnabled.value ? (openBlock(), createElementBlock("div", _hoisted_9$3, [
14854
+ createBaseVNode("button", {
14855
+ onClick: openVoiceSearchDialog,
14856
+ class: "lupa-voice-search-button"
14857
+ })
14858
+ ])) : createCommentVNode("", true),
14859
+ isVoiceSearchEnabled.value ? (openBlock(), createBlock(_sfc_main$1y, {
14860
+ key: 2,
14861
+ ref_key: "voiceDialogOverlay",
14862
+ ref: voiceDialogOverlay,
14863
+ isOpen: isVoiceDialogOpen.value,
14864
+ options: props.options.voiceSearch,
14865
+ onClose: closeDialog,
14866
+ onTranscriptUpdate: handleVoiceSearchOutput,
14867
+ onStopRecognize: stopRecognition
14868
+ }, null, 8, ["isOpen", "options"])) : createCommentVNode("", true)
14456
14869
  ]);
14457
14870
  };
14458
14871
  }
@@ -25991,7 +26404,8 @@ and ensure you are accounting for this risk.
25991
26404
  "labels",
25992
26405
  "links",
25993
26406
  "inputAttributes",
25994
- "showSubmitButton"
26407
+ "showSubmitButton",
26408
+ "voiceSearch"
25995
26409
  ])
25996
26410
  );
25997
26411
  const panelOptions = computed(
@@ -26575,22 +26989,42 @@ and ensure you are accounting for this risk.
26575
26989
  emits: ["remove"],
26576
26990
  setup(__props, { emit: emit2 }) {
26577
26991
  const props = __props;
26578
- const facetKeyClass = computed(() => {
26579
- return `lupa-facet-active-filter-${props.filter.key}`;
26992
+ const facetKeyClass = computed(() => `lupa-facet-active-filter-${props.filter.key}`);
26993
+ const { searchResultOptions } = storeToRefs(useOptionsStore());
26994
+ const units = computed(() => {
26995
+ var _a, _b, _c, _d, _e;
26996
+ return (_e = (_d = (_c = (_b = (_a = searchResultOptions.value) == null ? void 0 : _a.filters) == null ? void 0 : _b.facets) == null ? void 0 : _c.stats) == null ? void 0 : _d.units) != null ? _e : {};
26580
26997
  });
26581
- const handleClick = () => {
26998
+ function handleClick() {
26582
26999
  emit2("remove", { filter: props.filter });
26583
- };
27000
+ }
27001
+ function formatFilterValue(filter2) {
27002
+ const unit = units.value[filter2.key] || "";
27003
+ let min, max;
27004
+ if (Array.isArray(filter2.value)) {
27005
+ [min, max] = filter2.value.map(String);
27006
+ } else if (typeof filter2.value === "string" && filter2.value.includes("-")) {
27007
+ const parts = filter2.value.split("-").map((s) => s.trim());
27008
+ if (parts.length === 2)
27009
+ [min, max] = parts;
27010
+ }
27011
+ if (min != null && max != null) {
27012
+ return `${min} ${unit} – ${max} ${unit}`;
27013
+ }
27014
+ return `${filter2.value} ${unit}`.trim();
27015
+ }
26584
27016
  return (_ctx, _cache) => {
26585
27017
  return openBlock(), createElementBlock("div", {
26586
- class: normalizeClass(["lupa-search-result-filter-value", { [facetKeyClass.value]: true }])
27018
+ class: normalizeClass(["lupa-search-result-filter-value", [facetKeyClass.value]]),
27019
+ "data-cy": "lupa-current-filter-item"
26587
27020
  }, [
26588
27021
  createBaseVNode("div", {
26589
27022
  class: "lupa-current-filter-action",
26590
- onClick: handleClick
26591
- }, ""),
27023
+ onClick: handleClick,
27024
+ "aria-label": "Remove filter"
27025
+ }, " ⨉ "),
26592
27026
  createBaseVNode("div", _hoisted_1$U, toDisplayString(_ctx.filter.label) + ": ", 1),
26593
- createBaseVNode("div", _hoisted_2$F, toDisplayString(_ctx.filter.value), 1)
27027
+ createBaseVNode("div", _hoisted_2$F, toDisplayString(formatFilterValue(props.filter)), 1)
26594
27028
  ], 2);
26595
27029
  };
26596
27030
  }
@@ -26612,6 +27046,14 @@ and ensure you are accounting for this risk.
26612
27046
  expandable: { type: Boolean }
26613
27047
  },
26614
27048
  setup(__props) {
27049
+ const optionsStore = useOptionsStore();
27050
+ const { searchResultOptions } = storeToRefs(optionsStore);
27051
+ const units = computed(
27052
+ () => {
27053
+ var _a, _b, _c, _d, _e;
27054
+ return (_e = (_d = (_c = (_b = (_a = searchResultOptions == null ? void 0 : searchResultOptions.value) == null ? void 0 : _a.filters) == null ? void 0 : _b.facets) == null ? void 0 : _c.stats) == null ? void 0 : _d.units) != null ? _e : {};
27055
+ }
27056
+ );
26615
27057
  const isOpen = ref(false);
26616
27058
  const paramsStore = useParamsStore();
26617
27059
  const optionStore = useOptionsStore();
@@ -26693,8 +27135,9 @@ and ensure you are accounting for this risk.
26693
27135
  return openBlock(), createBlock(_sfc_main$_, {
26694
27136
  key: filter2.key + "_" + filter2.value,
26695
27137
  filter: filter2,
27138
+ units: units.value,
26696
27139
  onRemove: handleRemove
26697
- }, null, 8, ["filter"]);
27140
+ }, null, 8, ["filter", "units"]);
26698
27141
  }), 128))
26699
27142
  ]),
26700
27143
  createBaseVNode("div", {
@@ -28018,15 +28461,17 @@ and ensure you are accounting for this risk.
28018
28461
  const _hoisted_5$d = { class: "lupa-stats-from" };
28019
28462
  const _hoisted_6$7 = ["max", "min", "pattern", "aria-label"];
28020
28463
  const _hoisted_7$5 = { key: 0 };
28021
- const _hoisted_8$1 = /* @__PURE__ */ createBaseVNode("div", { class: "lupa-stats-separator" }, null, -1);
28022
- const _hoisted_9$1 = {
28464
+ const _hoisted_8$1 = { key: 1 };
28465
+ const _hoisted_9$1 = /* @__PURE__ */ createBaseVNode("div", { class: "lupa-stats-separator" }, null, -1);
28466
+ const _hoisted_10 = {
28023
28467
  key: 0,
28024
28468
  class: "lupa-stats-range-label"
28025
28469
  };
28026
- const _hoisted_10 = { class: "lupa-stats-to" };
28027
- const _hoisted_11 = ["max", "min", "pattern", "aria-label"];
28028
- const _hoisted_12 = { key: 0 };
28029
- const _hoisted_13 = {
28470
+ const _hoisted_11 = { class: "lupa-stats-to" };
28471
+ const _hoisted_12 = ["max", "min", "pattern", "aria-label"];
28472
+ const _hoisted_13 = { key: 0 };
28473
+ const _hoisted_14 = { key: 1 };
28474
+ const _hoisted_15 = {
28030
28475
  key: 2,
28031
28476
  class: "lupa-stats-slider-wrapper"
28032
28477
  };
@@ -28093,7 +28538,7 @@ and ensure you are accounting for this risk.
28093
28538
  if (!value || value > facetMax.value) {
28094
28539
  return;
28095
28540
  }
28096
- innerSliderRange.value = [value, sliderRange.value[1]];
28541
+ innerSliderRange.value = [sliderRange.value[1], value];
28097
28542
  handleInputChange();
28098
28543
  }
28099
28544
  });
@@ -28149,7 +28594,18 @@ and ensure you are accounting for this risk.
28149
28594
  });
28150
28595
  const statsSummary = computed(() => {
28151
28596
  const [min, max] = sliderRange.value;
28152
- return isPrice.value ? formatPriceSummary([min, max], currency.value, separator.value, currencyTemplate.value) : formatRange({ gte: min, lte: max });
28597
+ if (isPrice.value) {
28598
+ return formatPriceSummary(
28599
+ [min, max],
28600
+ currency.value,
28601
+ separator.value,
28602
+ currencyTemplate.value
28603
+ );
28604
+ }
28605
+ if (unit.value) {
28606
+ return `${min} ${unit.value} - ${max} ${unit.value}`;
28607
+ }
28608
+ return formatRange({ gte: min, lte: max });
28153
28609
  });
28154
28610
  const separator = computed(() => {
28155
28611
  var _a, _b, _c;
@@ -28210,6 +28666,12 @@ and ensure you are accounting for this risk.
28210
28666
  const handleDragging = (value) => {
28211
28667
  innerSliderRange.value = value;
28212
28668
  };
28669
+ const unit = computed(
28670
+ () => {
28671
+ var _a, _b, _c, _d, _e;
28672
+ return (_e = (_d = (_a = props.options.stats) == null ? void 0 : _a.units) == null ? void 0 : _d[(_c = (_b = props.facet) == null ? void 0 : _b.key) != null ? _c : ""]) != null ? _e : "";
28673
+ }
28674
+ );
28213
28675
  return (_ctx, _cache) => {
28214
28676
  return openBlock(), createElementBlock("div", _hoisted_1$P, [
28215
28677
  !isInputVisible.value ? (openBlock(), createElementBlock("div", _hoisted_2$B, toDisplayString(statsSummary.value), 1)) : (openBlock(), createElementBlock("div", _hoisted_3$s, [
@@ -28232,13 +28694,14 @@ and ensure you are accounting for this risk.
28232
28694
  { lazy: true }
28233
28695
  ]
28234
28696
  ]),
28235
- isPrice.value ? (openBlock(), createElementBlock("span", _hoisted_7$5, toDisplayString(currency.value), 1)) : createCommentVNode("", true)
28697
+ isPrice.value ? (openBlock(), createElementBlock("span", _hoisted_7$5, toDisplayString(currency.value), 1)) : createCommentVNode("", true),
28698
+ unit.value ? (openBlock(), createElementBlock("span", _hoisted_8$1, toDisplayString(unit.value), 1)) : createCommentVNode("", true)
28236
28699
  ])
28237
28700
  ]),
28238
- _hoisted_8$1,
28701
+ _hoisted_9$1,
28239
28702
  createBaseVNode("div", null, [
28240
- rangeLabelTo.value ? (openBlock(), createElementBlock("div", _hoisted_9$1, toDisplayString(rangeLabelTo.value), 1)) : createCommentVNode("", true),
28241
- createBaseVNode("div", _hoisted_10, [
28703
+ rangeLabelTo.value ? (openBlock(), createElementBlock("div", _hoisted_10, toDisplayString(rangeLabelTo.value), 1)) : createCommentVNode("", true),
28704
+ createBaseVNode("div", _hoisted_11, [
28242
28705
  withDirectives(createBaseVNode("input", {
28243
28706
  "onUpdate:modelValue": _cache[1] || (_cache[1] = ($event) => toValue.value = $event),
28244
28707
  type: "text",
@@ -28247,7 +28710,7 @@ and ensure you are accounting for this risk.
28247
28710
  min: facetMin.value,
28248
28711
  pattern: sliderInputFormat.value,
28249
28712
  "aria-label": ariaLabelTo.value
28250
- }, null, 8, _hoisted_11), [
28713
+ }, null, 8, _hoisted_12), [
28251
28714
  [
28252
28715
  vModelText,
28253
28716
  toValue.value,
@@ -28255,11 +28718,12 @@ and ensure you are accounting for this risk.
28255
28718
  { lazy: true }
28256
28719
  ]
28257
28720
  ]),
28258
- isPrice.value ? (openBlock(), createElementBlock("span", _hoisted_12, toDisplayString(currency.value), 1)) : createCommentVNode("", true)
28721
+ isPrice.value ? (openBlock(), createElementBlock("span", _hoisted_13, toDisplayString(currency.value), 1)) : createCommentVNode("", true),
28722
+ unit.value ? (openBlock(), createElementBlock("span", _hoisted_14, toDisplayString(unit.value), 1)) : createCommentVNode("", true)
28259
28723
  ])
28260
28724
  ])
28261
28725
  ])),
28262
- isSliderVisible.value ? (openBlock(), createElementBlock("div", _hoisted_13, [
28726
+ isSliderVisible.value ? (openBlock(), createElementBlock("div", _hoisted_15, [
28263
28727
  createVNode(unref(m), {
28264
28728
  class: "slider",
28265
28729
  tooltips: false,
@@ -33162,21 +33626,6 @@ and ensure you are accounting for this risk.
33162
33626
  };
33163
33627
  }
33164
33628
  });
33165
- const Env = {
33166
- production: "https://api.lupasearch.com/v1/",
33167
- staging: "https://api.staging.lupasearch.com/v1/"
33168
- };
33169
- const DEFAULT_REQUEST_CONFIG = {
33170
- method: "POST",
33171
- headers: { "Content-Type": "application/json" }
33172
- };
33173
- const DEFAULT_HEADERS = DEFAULT_REQUEST_CONFIG.headers;
33174
- const getApiUrl = (environment, customBaseUrl) => {
33175
- if (customBaseUrl) {
33176
- return customBaseUrl;
33177
- }
33178
- return Env[environment] || Env["production"];
33179
- };
33180
33629
  const suggestSearchChatPhrases = (options, request, chatSettings) => __async2(void 0, null, function* () {
33181
33630
  var _a, _b, _c;
33182
33631
  const { environment, customBaseUrl } = options;
@@ -33683,7 +34132,7 @@ and ensure you are accounting for this risk.
33683
34132
  key: 0,
33684
34133
  class: "lupasearch-chat-content"
33685
34134
  };
33686
- const _sfc_main$1y = /* @__PURE__ */ defineComponent({
34135
+ const _sfc_main$1A = /* @__PURE__ */ defineComponent({
33687
34136
  __name: "ChatContainer",
33688
34137
  props: {
33689
34138
  options: {}
@@ -40337,7 +40786,7 @@ and ensure you are accounting for this risk.
40337
40786
  const instance = createVue(
40338
40787
  options.displayOptions.containerSelector,
40339
40788
  mountOptions == null ? void 0 : mountOptions.mountingBehavior,
40340
- _sfc_main$1y,
40789
+ _sfc_main$1A,
40341
40790
  {
40342
40791
  options
40343
40792
  }