@humeai/voice-embed-react 0.2.5 → 0.2.7-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +12 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.js +92 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +69 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +15 -15
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { EmbeddedVoiceConfig, TranscriptMessageHandler, CloseHandler } from '@humeai/voice-embed';
|
|
2
|
+
export { AssistantTranscriptMessage, COLLAPSE_WIDGET_ACTION, ChatMetadataMessage, EXPAND_WIDGET_ACTION, EmotionScores, FrameToClientAction, JSONMessage, LanguageModelOption, MINIMIZE_WIDGET_ACTION, RESIZE_FRAME_ACTION, SocketConfig, TRANSCRIPT_MESSAGE_ACTION, ToolCall, ToolError, ToolResponse, UserTranscriptMessage, WIDGET_IFRAME_IS_READY_ACTION, WindowDimensions, parseClientToFrameAction } from '@humeai/voice-embed';
|
|
3
|
+
|
|
4
|
+
type EmbeddedVoiceProps = Partial<EmbeddedVoiceConfig> & NonNullable<Pick<EmbeddedVoiceConfig, 'auth'>> & {
|
|
5
|
+
onMessage?: TranscriptMessageHandler;
|
|
6
|
+
onClose?: CloseHandler;
|
|
7
|
+
isEmbedOpen: boolean;
|
|
8
|
+
openOnMount?: boolean;
|
|
9
|
+
};
|
|
10
|
+
declare const EmbeddedVoice: (props: EmbeddedVoiceProps) => null;
|
|
11
|
+
|
|
12
|
+
export { EmbeddedVoice };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { EmbeddedVoiceConfig, TranscriptMessageHandler, CloseHandler } from '@humeai/voice-embed';
|
|
2
|
+
export { AssistantTranscriptMessage, COLLAPSE_WIDGET_ACTION, ChatMetadataMessage, EXPAND_WIDGET_ACTION, EmotionScores, FrameToClientAction, JSONMessage, LanguageModelOption, MINIMIZE_WIDGET_ACTION, RESIZE_FRAME_ACTION, SocketConfig, TRANSCRIPT_MESSAGE_ACTION, ToolCall, ToolError, ToolResponse, UserTranscriptMessage, WIDGET_IFRAME_IS_READY_ACTION, WindowDimensions, parseClientToFrameAction } from '@humeai/voice-embed';
|
|
3
|
+
|
|
4
|
+
type EmbeddedVoiceProps = Partial<EmbeddedVoiceConfig> & NonNullable<Pick<EmbeddedVoiceConfig, 'auth'>> & {
|
|
5
|
+
onMessage?: TranscriptMessageHandler;
|
|
6
|
+
onClose?: CloseHandler;
|
|
7
|
+
isEmbedOpen: boolean;
|
|
8
|
+
openOnMount?: boolean;
|
|
9
|
+
};
|
|
10
|
+
declare const EmbeddedVoice: (props: EmbeddedVoiceProps) => null;
|
|
11
|
+
|
|
12
|
+
export { EmbeddedVoice };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
"use strict";
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
20
|
+
|
|
21
|
+
// src/index.ts
|
|
22
|
+
var index_exports = {};
|
|
23
|
+
__export(index_exports, {
|
|
24
|
+
COLLAPSE_WIDGET_ACTION: () => import_voice_embed2.COLLAPSE_WIDGET_ACTION,
|
|
25
|
+
EXPAND_WIDGET_ACTION: () => import_voice_embed2.EXPAND_WIDGET_ACTION,
|
|
26
|
+
EmbeddedVoice: () => EmbeddedVoice,
|
|
27
|
+
LanguageModelOption: () => import_voice_embed2.LanguageModelOption,
|
|
28
|
+
MINIMIZE_WIDGET_ACTION: () => import_voice_embed2.MINIMIZE_WIDGET_ACTION,
|
|
29
|
+
RESIZE_FRAME_ACTION: () => import_voice_embed2.RESIZE_FRAME_ACTION,
|
|
30
|
+
TRANSCRIPT_MESSAGE_ACTION: () => import_voice_embed2.TRANSCRIPT_MESSAGE_ACTION,
|
|
31
|
+
WIDGET_IFRAME_IS_READY_ACTION: () => import_voice_embed2.WIDGET_IFRAME_IS_READY_ACTION,
|
|
32
|
+
parseClientToFrameAction: () => import_voice_embed2.parseClientToFrameAction
|
|
33
|
+
});
|
|
34
|
+
module.exports = __toCommonJS(index_exports);
|
|
35
|
+
|
|
36
|
+
// src/lib/EmbeddedVoice.ts
|
|
37
|
+
var import_voice_embed = require("@humeai/voice-embed");
|
|
38
|
+
var import_react = require("react");
|
|
39
|
+
var EmbeddedVoice = (props) => {
|
|
40
|
+
const {
|
|
41
|
+
onMessage,
|
|
42
|
+
isEmbedOpen,
|
|
43
|
+
onClose,
|
|
44
|
+
openOnMount = false,
|
|
45
|
+
...config
|
|
46
|
+
} = props;
|
|
47
|
+
const embeddedVoice = (0, import_react.useRef)(null);
|
|
48
|
+
const onMessageHandler = (0, import_react.useRef)();
|
|
49
|
+
onMessageHandler.current = onMessage;
|
|
50
|
+
const onCloseHandler = (0, import_react.useRef)();
|
|
51
|
+
onCloseHandler.current = onClose;
|
|
52
|
+
const stableConfig = (0, import_react.useRef)();
|
|
53
|
+
stableConfig.current = config;
|
|
54
|
+
(0, import_react.useEffect)(() => {
|
|
55
|
+
let unmount;
|
|
56
|
+
if (!embeddedVoice.current && stableConfig.current) {
|
|
57
|
+
embeddedVoice.current = import_voice_embed.EmbeddedVoice.create({
|
|
58
|
+
onMessage: onMessageHandler.current,
|
|
59
|
+
onClose: onCloseHandler.current,
|
|
60
|
+
openOnMount,
|
|
61
|
+
...stableConfig.current
|
|
62
|
+
});
|
|
63
|
+
unmount = embeddedVoice.current.mount();
|
|
64
|
+
}
|
|
65
|
+
return () => {
|
|
66
|
+
unmount?.();
|
|
67
|
+
embeddedVoice.current = null;
|
|
68
|
+
};
|
|
69
|
+
}, [openOnMount]);
|
|
70
|
+
(0, import_react.useEffect)(() => {
|
|
71
|
+
if (isEmbedOpen) {
|
|
72
|
+
embeddedVoice.current?.openEmbed();
|
|
73
|
+
}
|
|
74
|
+
}, [isEmbedOpen]);
|
|
75
|
+
return null;
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
// src/index.ts
|
|
79
|
+
var import_voice_embed2 = require("@humeai/voice-embed");
|
|
80
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
81
|
+
0 && (module.exports = {
|
|
82
|
+
COLLAPSE_WIDGET_ACTION,
|
|
83
|
+
EXPAND_WIDGET_ACTION,
|
|
84
|
+
EmbeddedVoice,
|
|
85
|
+
LanguageModelOption,
|
|
86
|
+
MINIMIZE_WIDGET_ACTION,
|
|
87
|
+
RESIZE_FRAME_ACTION,
|
|
88
|
+
TRANSCRIPT_MESSAGE_ACTION,
|
|
89
|
+
WIDGET_IFRAME_IS_READY_ACTION,
|
|
90
|
+
parseClientToFrameAction
|
|
91
|
+
});
|
|
92
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/lib/EmbeddedVoice.ts"],"sourcesContent":["export * from './lib/EmbeddedVoice';\n\nexport {\n COLLAPSE_WIDGET_ACTION,\n EXPAND_WIDGET_ACTION,\n MINIMIZE_WIDGET_ACTION,\n RESIZE_FRAME_ACTION,\n TRANSCRIPT_MESSAGE_ACTION,\n WIDGET_IFRAME_IS_READY_ACTION,\n parseClientToFrameAction,\n LanguageModelOption,\n} from '@humeai/voice-embed';\n\nexport type {\n AssistantTranscriptMessage,\n SocketConfig,\n FrameToClientAction,\n JSONMessage,\n UserTranscriptMessage,\n WindowDimensions,\n EmotionScores,\n ToolCall,\n ToolResponse,\n ToolError,\n ChatMetadataMessage,\n} from '@humeai/voice-embed';\n","import {\n type CloseHandler,\n EmbeddedVoice as EA,\n type EmbeddedVoiceConfig,\n type TranscriptMessageHandler,\n} from '@humeai/voice-embed';\nimport { useEffect, useRef } from 'react';\n\ntype EmbeddedVoiceProps = Partial<EmbeddedVoiceConfig> &\n NonNullable<Pick<EmbeddedVoiceConfig, 'auth'>> & {\n onMessage?: TranscriptMessageHandler;\n onClose?: CloseHandler;\n isEmbedOpen: boolean;\n openOnMount?: boolean;\n };\n\nexport const EmbeddedVoice = (props: EmbeddedVoiceProps) => {\n const {\n onMessage,\n isEmbedOpen,\n onClose,\n openOnMount = false,\n ...config\n } = props;\n const embeddedVoice = useRef<EA | null>(null);\n const onMessageHandler = useRef<TranscriptMessageHandler | undefined>();\n onMessageHandler.current = onMessage;\n\n const onCloseHandler = useRef<CloseHandler | undefined>();\n onCloseHandler.current = onClose;\n\n const stableConfig = useRef<\n Partial<EmbeddedVoiceConfig> &\n NonNullable<Pick<EmbeddedVoiceConfig, 'auth'>>\n >();\n stableConfig.current = config;\n\n useEffect(() => {\n let unmount: () => void;\n if (!embeddedVoice.current && stableConfig.current) {\n embeddedVoice.current = EA.create({\n onMessage: onMessageHandler.current,\n onClose: onCloseHandler.current,\n openOnMount: openOnMount,\n ...stableConfig.current,\n });\n unmount = embeddedVoice.current.mount();\n }\n\n return () => {\n unmount?.();\n embeddedVoice.current = null;\n };\n }, [openOnMount]);\n\n useEffect(() => {\n if (isEmbedOpen) {\n embeddedVoice.current?.openEmbed();\n }\n }, [isEmbedOpen]);\n\n return null;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,yBAKO;AACP,mBAAkC;AAU3B,IAAM,gBAAgB,CAAC,UAA8B;AAC1D,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd,GAAG;AAAA,EACL,IAAI;AACJ,QAAM,oBAAgB,qBAAkB,IAAI;AAC5C,QAAM,uBAAmB,qBAA6C;AACtE,mBAAiB,UAAU;AAE3B,QAAM,qBAAiB,qBAAiC;AACxD,iBAAe,UAAU;AAEzB,QAAM,mBAAe,qBAGnB;AACF,eAAa,UAAU;AAEvB,8BAAU,MAAM;AACd,QAAI;AACJ,QAAI,CAAC,cAAc,WAAW,aAAa,SAAS;AAClD,oBAAc,UAAU,mBAAAA,cAAG,OAAO;AAAA,QAChC,WAAW,iBAAiB;AAAA,QAC5B,SAAS,eAAe;AAAA,QACxB;AAAA,QACA,GAAG,aAAa;AAAA,MAClB,CAAC;AACD,gBAAU,cAAc,QAAQ,MAAM;AAAA,IACxC;AAEA,WAAO,MAAM;AACX,gBAAU;AACV,oBAAc,UAAU;AAAA,IAC1B;AAAA,EACF,GAAG,CAAC,WAAW,CAAC;AAEhB,8BAAU,MAAM;AACd,QAAI,aAAa;AACf,oBAAc,SAAS,UAAU;AAAA,IACnC;AAAA,EACF,GAAG,CAAC,WAAW,CAAC;AAEhB,SAAO;AACT;;;AD5DA,IAAAC,sBASO;","names":["EA","import_voice_embed"]}
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
'use client';
|
|
2
|
+
|
|
3
|
+
// src/lib/EmbeddedVoice.ts
|
|
4
|
+
import {
|
|
5
|
+
EmbeddedVoice as EA
|
|
6
|
+
} from "@humeai/voice-embed";
|
|
7
|
+
import { useEffect, useRef } from "react";
|
|
8
|
+
var EmbeddedVoice = (props) => {
|
|
9
|
+
const {
|
|
10
|
+
onMessage,
|
|
11
|
+
isEmbedOpen,
|
|
12
|
+
onClose,
|
|
13
|
+
openOnMount = false,
|
|
14
|
+
...config
|
|
15
|
+
} = props;
|
|
16
|
+
const embeddedVoice = useRef(null);
|
|
17
|
+
const onMessageHandler = useRef();
|
|
18
|
+
onMessageHandler.current = onMessage;
|
|
19
|
+
const onCloseHandler = useRef();
|
|
20
|
+
onCloseHandler.current = onClose;
|
|
21
|
+
const stableConfig = useRef();
|
|
22
|
+
stableConfig.current = config;
|
|
23
|
+
useEffect(() => {
|
|
24
|
+
let unmount;
|
|
25
|
+
if (!embeddedVoice.current && stableConfig.current) {
|
|
26
|
+
embeddedVoice.current = EA.create({
|
|
27
|
+
onMessage: onMessageHandler.current,
|
|
28
|
+
onClose: onCloseHandler.current,
|
|
29
|
+
openOnMount,
|
|
30
|
+
...stableConfig.current
|
|
31
|
+
});
|
|
32
|
+
unmount = embeddedVoice.current.mount();
|
|
33
|
+
}
|
|
34
|
+
return () => {
|
|
35
|
+
unmount?.();
|
|
36
|
+
embeddedVoice.current = null;
|
|
37
|
+
};
|
|
38
|
+
}, [openOnMount]);
|
|
39
|
+
useEffect(() => {
|
|
40
|
+
if (isEmbedOpen) {
|
|
41
|
+
embeddedVoice.current?.openEmbed();
|
|
42
|
+
}
|
|
43
|
+
}, [isEmbedOpen]);
|
|
44
|
+
return null;
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
// src/index.ts
|
|
48
|
+
import {
|
|
49
|
+
COLLAPSE_WIDGET_ACTION,
|
|
50
|
+
EXPAND_WIDGET_ACTION,
|
|
51
|
+
MINIMIZE_WIDGET_ACTION,
|
|
52
|
+
RESIZE_FRAME_ACTION,
|
|
53
|
+
TRANSCRIPT_MESSAGE_ACTION,
|
|
54
|
+
WIDGET_IFRAME_IS_READY_ACTION,
|
|
55
|
+
parseClientToFrameAction,
|
|
56
|
+
LanguageModelOption
|
|
57
|
+
} from "@humeai/voice-embed";
|
|
58
|
+
export {
|
|
59
|
+
COLLAPSE_WIDGET_ACTION,
|
|
60
|
+
EXPAND_WIDGET_ACTION,
|
|
61
|
+
EmbeddedVoice,
|
|
62
|
+
LanguageModelOption,
|
|
63
|
+
MINIMIZE_WIDGET_ACTION,
|
|
64
|
+
RESIZE_FRAME_ACTION,
|
|
65
|
+
TRANSCRIPT_MESSAGE_ACTION,
|
|
66
|
+
WIDGET_IFRAME_IS_READY_ACTION,
|
|
67
|
+
parseClientToFrameAction
|
|
68
|
+
};
|
|
69
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/EmbeddedVoice.ts","../src/index.ts"],"sourcesContent":["import {\n type CloseHandler,\n EmbeddedVoice as EA,\n type EmbeddedVoiceConfig,\n type TranscriptMessageHandler,\n} from '@humeai/voice-embed';\nimport { useEffect, useRef } from 'react';\n\ntype EmbeddedVoiceProps = Partial<EmbeddedVoiceConfig> &\n NonNullable<Pick<EmbeddedVoiceConfig, 'auth'>> & {\n onMessage?: TranscriptMessageHandler;\n onClose?: CloseHandler;\n isEmbedOpen: boolean;\n openOnMount?: boolean;\n };\n\nexport const EmbeddedVoice = (props: EmbeddedVoiceProps) => {\n const {\n onMessage,\n isEmbedOpen,\n onClose,\n openOnMount = false,\n ...config\n } = props;\n const embeddedVoice = useRef<EA | null>(null);\n const onMessageHandler = useRef<TranscriptMessageHandler | undefined>();\n onMessageHandler.current = onMessage;\n\n const onCloseHandler = useRef<CloseHandler | undefined>();\n onCloseHandler.current = onClose;\n\n const stableConfig = useRef<\n Partial<EmbeddedVoiceConfig> &\n NonNullable<Pick<EmbeddedVoiceConfig, 'auth'>>\n >();\n stableConfig.current = config;\n\n useEffect(() => {\n let unmount: () => void;\n if (!embeddedVoice.current && stableConfig.current) {\n embeddedVoice.current = EA.create({\n onMessage: onMessageHandler.current,\n onClose: onCloseHandler.current,\n openOnMount: openOnMount,\n ...stableConfig.current,\n });\n unmount = embeddedVoice.current.mount();\n }\n\n return () => {\n unmount?.();\n embeddedVoice.current = null;\n };\n }, [openOnMount]);\n\n useEffect(() => {\n if (isEmbedOpen) {\n embeddedVoice.current?.openEmbed();\n }\n }, [isEmbedOpen]);\n\n return null;\n};\n","export * from './lib/EmbeddedVoice';\n\nexport {\n COLLAPSE_WIDGET_ACTION,\n EXPAND_WIDGET_ACTION,\n MINIMIZE_WIDGET_ACTION,\n RESIZE_FRAME_ACTION,\n TRANSCRIPT_MESSAGE_ACTION,\n WIDGET_IFRAME_IS_READY_ACTION,\n parseClientToFrameAction,\n LanguageModelOption,\n} from '@humeai/voice-embed';\n\nexport type {\n AssistantTranscriptMessage,\n SocketConfig,\n FrameToClientAction,\n JSONMessage,\n UserTranscriptMessage,\n WindowDimensions,\n EmotionScores,\n ToolCall,\n ToolResponse,\n ToolError,\n ChatMetadataMessage,\n} from '@humeai/voice-embed';\n"],"mappings":";;;AAAA;AAAA,EAEE,iBAAiB;AAAA,OAGZ;AACP,SAAS,WAAW,cAAc;AAU3B,IAAM,gBAAgB,CAAC,UAA8B;AAC1D,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd,GAAG;AAAA,EACL,IAAI;AACJ,QAAM,gBAAgB,OAAkB,IAAI;AAC5C,QAAM,mBAAmB,OAA6C;AACtE,mBAAiB,UAAU;AAE3B,QAAM,iBAAiB,OAAiC;AACxD,iBAAe,UAAU;AAEzB,QAAM,eAAe,OAGnB;AACF,eAAa,UAAU;AAEvB,YAAU,MAAM;AACd,QAAI;AACJ,QAAI,CAAC,cAAc,WAAW,aAAa,SAAS;AAClD,oBAAc,UAAU,GAAG,OAAO;AAAA,QAChC,WAAW,iBAAiB;AAAA,QAC5B,SAAS,eAAe;AAAA,QACxB;AAAA,QACA,GAAG,aAAa;AAAA,MAClB,CAAC;AACD,gBAAU,cAAc,QAAQ,MAAM;AAAA,IACxC;AAEA,WAAO,MAAM;AACX,gBAAU;AACV,oBAAc,UAAU;AAAA,IAC1B;AAAA,EACF,GAAG,CAAC,WAAW,CAAC;AAEhB,YAAU,MAAM;AACd,QAAI,aAAa;AACf,oBAAc,SAAS,UAAU;AAAA,IACnC;AAAA,EACF,GAAG,CAAC,WAAW,CAAC;AAEhB,SAAO;AACT;;;AC5DA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;","names":[]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@humeai/voice-embed-react",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.7-beta.1",
|
|
4
4
|
"description": "",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"module": "./dist/index.mjs",
|
|
@@ -14,24 +14,16 @@
|
|
|
14
14
|
"package.json",
|
|
15
15
|
"dist"
|
|
16
16
|
],
|
|
17
|
-
"scripts": {
|
|
18
|
-
"build": "tsup",
|
|
19
|
-
"dev": "tsup --watch",
|
|
20
|
-
"lint": "eslint ./src",
|
|
21
|
-
"pack": "npm pack"
|
|
22
|
-
},
|
|
23
17
|
"keywords": [],
|
|
24
18
|
"author": "",
|
|
25
19
|
"license": "ISC",
|
|
26
20
|
"dependencies": {
|
|
27
|
-
"
|
|
28
|
-
"hume": "^0.13.8",
|
|
21
|
+
"hume": "^0.14.1",
|
|
29
22
|
"react": "^18.2.0",
|
|
30
|
-
"react-dom": "^18.2.0"
|
|
23
|
+
"react-dom": "^18.2.0",
|
|
24
|
+
"@humeai/voice-embed": "0.2.7-beta.1"
|
|
31
25
|
},
|
|
32
26
|
"devDependencies": {
|
|
33
|
-
"@humeai/eslint-config": "workspace:*",
|
|
34
|
-
"@humeai/typescript-config": "workspace:*",
|
|
35
27
|
"@testing-library/react": "^14.2.2",
|
|
36
28
|
"@testing-library/react-hooks": "^8.0.1",
|
|
37
29
|
"@types/eslint": "^8.56.6",
|
|
@@ -46,7 +38,9 @@
|
|
|
46
38
|
"jsdom": "^24.0.0",
|
|
47
39
|
"tsup": "^8.3.5",
|
|
48
40
|
"typescript": "^5.4.3",
|
|
49
|
-
"vitest": "^3.1.2"
|
|
41
|
+
"vitest": "^3.1.2",
|
|
42
|
+
"@humeai/eslint-config": "0.0.1",
|
|
43
|
+
"@humeai/typescript-config": "0.0.0"
|
|
50
44
|
},
|
|
51
45
|
"peerDependencies": {
|
|
52
46
|
"react": ">=18.2.0",
|
|
@@ -54,5 +48,11 @@
|
|
|
54
48
|
},
|
|
55
49
|
"browserslist": [
|
|
56
50
|
"last 2 Chrome versions, last 2 iOS major versions, Firefox ESR, not dead"
|
|
57
|
-
]
|
|
58
|
-
|
|
51
|
+
],
|
|
52
|
+
"scripts": {
|
|
53
|
+
"build": "tsup",
|
|
54
|
+
"dev": "tsup --watch",
|
|
55
|
+
"lint": "eslint ./src",
|
|
56
|
+
"pack": "npm pack"
|
|
57
|
+
}
|
|
58
|
+
}
|