hume 0.2.5 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/CITATIONS.md +258 -0
  2. package/Client.d.ts +11 -0
  3. package/Client.js +42 -7
  4. package/LICENSE +21 -0
  5. package/api/client/requests/ListJobsRequest.d.ts +9 -0
  6. package/api/types/index.d.ts +11 -11
  7. package/api/types/index.js +11 -11
  8. package/core/fetcher/APIResponse.d.ts +1 -0
  9. package/core/fetcher/Fetcher.d.ts +1 -4
  10. package/core/fetcher/Fetcher.js +52 -31
  11. package/core/fetcher/getHeader.d.ts +1 -0
  12. package/{dist/core/streaming-fetcher → core/fetcher}/getHeader.js +2 -2
  13. package/core/fetcher/index.d.ts +1 -0
  14. package/core/fetcher/index.js +3 -1
  15. package/core/index.d.ts +0 -1
  16. package/core/index.js +0 -1
  17. package/dist/Client.d.ts +11 -0
  18. package/dist/Client.js +42 -7
  19. package/dist/api/client/requests/ListJobsRequest.d.ts +9 -0
  20. package/dist/api/types/index.d.ts +11 -11
  21. package/dist/api/types/index.js +11 -11
  22. package/dist/core/fetcher/APIResponse.d.ts +1 -0
  23. package/dist/core/fetcher/Fetcher.d.ts +1 -4
  24. package/dist/core/fetcher/Fetcher.js +52 -31
  25. package/dist/core/fetcher/getHeader.d.ts +1 -0
  26. package/{core/streaming-fetcher → dist/core/fetcher}/getHeader.js +2 -2
  27. package/dist/core/fetcher/index.d.ts +1 -0
  28. package/dist/core/fetcher/index.js +3 -1
  29. package/dist/core/index.d.ts +0 -1
  30. package/dist/core/index.js +0 -1
  31. package/dist/serialization/types/index.d.ts +11 -11
  32. package/dist/serialization/types/index.js +11 -11
  33. package/package.json +6 -4
  34. package/serialization/types/index.d.ts +11 -11
  35. package/serialization/types/index.js +11 -11
  36. package/core/streaming-fetcher/Stream.d.ts +0 -14
  37. package/core/streaming-fetcher/Stream.js +0 -75
  38. package/core/streaming-fetcher/StreamingFetcher.d.ts +0 -24
  39. package/core/streaming-fetcher/StreamingFetcher.js +0 -58
  40. package/core/streaming-fetcher/getHeader.d.ts +0 -2
  41. package/core/streaming-fetcher/index.d.ts +0 -4
  42. package/core/streaming-fetcher/index.js +0 -9
  43. package/dist/core/streaming-fetcher/Stream.d.ts +0 -14
  44. package/dist/core/streaming-fetcher/Stream.js +0 -75
  45. package/dist/core/streaming-fetcher/StreamingFetcher.d.ts +0 -24
  46. package/dist/core/streaming-fetcher/StreamingFetcher.js +0 -58
  47. package/dist/core/streaming-fetcher/getHeader.d.ts +0 -2
  48. package/dist/core/streaming-fetcher/index.d.ts +0 -4
  49. package/dist/core/streaming-fetcher/index.js +0 -9
package/CITATIONS.md ADDED
@@ -0,0 +1,258 @@
1
+ # Citations
2
+
3
+ To cite Hume's expressive communication platform, please reference one or more of the papers relevant to your application.
4
+
5
+ | Publication | Year | Modality | BibTeX |
6
+ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :--: | :------: | :---------: |
7
+ | [Self-report captures 27 distinct categories of emotion bridged by continuous gradients](https://doi.org/10.1073/pnas.1702247114) | 2017 | multi | [Cite](#1) |
8
+ | [Mapping the Passions: Toward a High-Dimensional Taxonomy of Emotional Experience and Expression](https://doi.org/10.1177/1529100619850176) | 2019 | multi | [Cite](#2) |
9
+ | [The primacy of categories in the recognition of 12 emotions in speech prosody across two cultures](https://doi.org/10.1038/s41562-019-0533-6) | 2019 | voice | [Cite](#3) |
10
+ | [Mapping 24 emotions conveyed by brief human vocalization](https://doi.org/10.1037/amp0000399) | 2019 | voice | [Cite](#4) |
11
+ | [Emotional expression: Advances in basic emotion theory](https://doi.org/10.1007%2Fs10919-019-00293-3) | 2019 | multi | [Cite](#5) |
12
+ | [What the face displays: Mapping 28 emotions conveyed by naturalistic expression](https://doi.org/10.1037/amp0000488) | 2020 | face | [Cite](#6) |
13
+ | [The neural representation of visually evoked emotion is high-dimensional, categorical, and distributed across transmodal brain regions](https://doi.org/10.1016/j.isci.2020.101060) | 2020 | multi | [Cite](#7) |
14
+ | [What music makes us feel: At least 13 dimensions organize subjective experiences associated with music across different cultures](https://doi.org/10.1073/pnas.1910704117) | 2020 | music | [Cite](#8) |
15
+ | [GoEmotions: A Dataset of Fine-Grained Emotions](https://doi.org/10.18653/v1/2020.acl-main.372) | 2020 | text | [Cite](#9) |
16
+ | [Universal facial expressions uncovered in art of the ancient Americas: A computational approach](https://doi.org/10.1126/sciadv.abb1005) | 2020 | face | [Cite](#10) |
17
+ | [Sixteen facial expressions occur in similar contexts worldwide](https://doi.org/10.1038/s41586-020-3037-7) | 2021 | face | [Cite](#11) |
18
+ | [The MuSe 2022 Multimodal Sentiment Analysis Challenge: Humor, Emotional Reactions, and Stress](https://doi.org/10.48550/arXiv.2207.05691) | 2022 | multi | [Cite](#12) |
19
+ | [The ACII 2022 Affective Vocal Bursts Workshop & Competition: Understanding a critically understudied modality of emotional expression](https://doi.org/10.48550/arXiv.2207.03572) | 2022 | voice | [Cite](#13) |
20
+ | [The ICML 2022 Expressive Vocalizations Workshop and Competition: Recognizing, Generating, and Personalizing Vocal Bursts](https://doi.org/10.48550/arXiv.2205.01780) | 2022 | voice | [Cite](#14) |
21
+ | [Intersectionality in emotion signaling and recognition: The influence of gender, ethnicity, and social class](https://doi.org/10.1037/emo0001082) | 2022 | body | [Cite](#15) |
22
+ | [How emotions, relationships, and culture constitute each other: advances in social functionalist theory](https://doi.org/10.1080/02699931.2022.2047009) | 2022 | multi | [Cite](#16) |
23
+ | [State & Trait Measurement from Nonverbal Vocalizations: A Multi-Task Joint Learning Approach](https://doi.org/10.21437/Interspeech.2022-10927) | 2022 | voice | [Cite](#17) |
24
+
25
+ ## BibTeX
26
+
27
+ ### <a id="1"></a>
28
+
29
+ ```bibtex
30
+ @article{cowen2017self,
31
+ title={Self-report captures 27 distinct categories of emotion bridged by continuous gradients},
32
+ author={Cowen, Alan S and Keltner, Dacher},
33
+ journal={Proceedings of the national academy of sciences},
34
+ volume={114},
35
+ number={38},
36
+ pages={E7900--E7909},
37
+ year={2017},
38
+ publisher={National Acad Sciences}
39
+ }
40
+ ```
41
+
42
+ ### <a id="2"></a>
43
+
44
+ ```bibtex
45
+ @article{cowen2019mapping,
46
+ title={Mapping the passions: Toward a high-dimensional taxonomy of emotional experience and expression},
47
+ author={Cowen, Alan and Sauter, Disa and Tracy, Jessica L and Keltner, Dacher},
48
+ journal={Psychological Science in the Public Interest},
49
+ volume={20},
50
+ number={1},
51
+ pages={69--90},
52
+ year={2019},
53
+ publisher={Sage Publications Sage CA: Los Angeles, CA}
54
+ }
55
+ ```
56
+
57
+ ### <a id="3"></a>
58
+
59
+ ```bibtex
60
+ @article{cowen2019primacy,
61
+ title={The primacy of categories in the recognition of 12 emotions in speech prosody across two cultures},
62
+ author={Cowen, Alan S and Laukka, Petri and Elfenbein, Hillary Anger and Liu, Runjing and Keltner, Dacher},
63
+ journal={Nature human behaviour},
64
+ volume={3},
65
+ number={4},
66
+ pages={369--382},
67
+ year={2019},
68
+ publisher={Nature Publishing Group}
69
+ }
70
+ ```
71
+
72
+ ### <a id="4"></a>
73
+
74
+ ```bibtex
75
+ @article{cowen2019mapping,
76
+ title={Mapping 24 emotions conveyed by brief human vocalization.},
77
+ author={Cowen, Alan S and Elfenbein, Hillary Anger and Laukka, Petri and Keltner, Dacher},
78
+ journal={American Psychologist},
79
+ volume={74},
80
+ number={6},
81
+ pages={698},
82
+ year={2019},
83
+ publisher={American Psychological Association}
84
+ }
85
+ ```
86
+
87
+ ### <a id="5"></a>
88
+
89
+ ```bibtex
90
+ @article{keltner2019emotional,
91
+ title={Emotional expression: Advances in basic emotion theory},
92
+ author={Keltner, Dacher and Sauter, Disa and Tracy, Jessica and Cowen, Alan},
93
+ journal={Journal of nonverbal behavior},
94
+ volume={43},
95
+ number={2},
96
+ pages={133--160},
97
+ year={2019},
98
+ publisher={Springer}
99
+ }
100
+ ```
101
+
102
+ ### <a id="6"></a>
103
+
104
+ ```bibtex
105
+ @article{cowen2020face,
106
+ title={What the face displays: Mapping 28 emotions conveyed by naturalistic expression.},
107
+ author={Cowen, Alan S and Keltner, Dacher},
108
+ journal={American Psychologist},
109
+ volume={75},
110
+ number={3},
111
+ pages={349},
112
+ year={2020},
113
+ publisher={American Psychological Association}
114
+ }
115
+ ```
116
+
117
+ ### <a id="7"></a>
118
+
119
+ ```bibtex
120
+ @article{horikawa2020neural,
121
+ title={The neural representation of visually evoked emotion is high-dimensional, categorical, and distributed across transmodal brain regions},
122
+ author={Horikawa, Tomoyasu and Cowen, Alan S and Keltner, Dacher and Kamitani, Yukiyasu},
123
+ journal={Iscience},
124
+ volume={23},
125
+ number={5},
126
+ pages={101060},
127
+ year={2020},
128
+ publisher={Elsevier}
129
+ }
130
+ ```
131
+
132
+ ### <a id="8"></a>
133
+
134
+ ```bibtex
135
+ @article{cowen2020music,
136
+ title={What music makes us feel: At least 13 dimensions organize subjective experiences associated with music across different cultures},
137
+ author={Cowen, Alan S and Fang, Xia and Sauter, Disa and Keltner, Dacher},
138
+ journal={Proceedings of the National Academy of Sciences},
139
+ volume={117},
140
+ number={4},
141
+ pages={1924--1934},
142
+ year={2020},
143
+ publisher={National Acad Sciences}
144
+ }
145
+ ```
146
+
147
+ ### <a id="9"></a>
148
+
149
+ ```bibtex
150
+ @article{demszky2020goemotions,
151
+ title={GoEmotions: A dataset of fine-grained emotions},
152
+ author={Demszky, Dorottya and Movshovitz-Attias, Dana and Ko, Jeongwoo and Cowen, Alan and Nemade, Gaurav and Ravi, Sujith},
153
+ journal={arXiv preprint arXiv:2005.00547},
154
+ year={2020}
155
+ }
156
+ ```
157
+
158
+ ### <a id="10"></a>
159
+
160
+ ```bibtex
161
+ @article{cowen2020universal,
162
+ title={Universal facial expressions uncovered in art of the ancient Americas: A computational approach},
163
+ author={Cowen, Alan S and Keltner, Dacher},
164
+ journal={Science advances},
165
+ volume={6},
166
+ number={34},
167
+ pages={eabb1005},
168
+ year={2020},
169
+ publisher={American Association for the Advancement of Science}
170
+ }
171
+ ```
172
+
173
+ ### <a id="11"></a>
174
+
175
+ ```bibtex
176
+ @article{cowen2021sixteen,
177
+ title={Sixteen facial expressions occur in similar contexts worldwide},
178
+ author={Cowen, Alan S and Keltner, Dacher and Schroff, Florian and Jou, Brendan and Adam, Hartwig and Prasad, Gautam},
179
+ journal={Nature},
180
+ volume={589},
181
+ number={7841},
182
+ pages={251--257},
183
+ year={2021},
184
+ publisher={Nature Publishing Group}
185
+ }
186
+ ```
187
+
188
+ ### <a id="12"></a>
189
+
190
+ ```bibtex
191
+ @article{christ2022muse,
192
+ title={The MuSe 2022 Multimodal Sentiment Analysis Challenge: Humor, Emotional Reactions, and Stress},
193
+ author={Christ, Lukas and Amiriparian, Shahin and Baird, Alice and Tzirakis, Panagiotis and Kathan, Alexander and M{\"u}ller, Niklas and Stappen, Lukas and Me{\ss}ner, Eva-Maria and K{\"o}nig, Andreas and Cowen, Alan and others},
194
+ year={2022}
195
+ }
196
+ ```
197
+
198
+ ### <a id="13"></a>
199
+
200
+ ```bibtex
201
+ @article{baird2022acii,
202
+ title={The ACII 2022 Affective Vocal Bursts Workshop \& Competition: Understanding a critically understudied modality of emotional expression},
203
+ author={Baird, Alice and Tzirakis, Panagiotis and Brooks, Jeffrey A and Gregory, Christopher B and Schuller, Bj{\"o}rn and Batliner, Anton and Keltner, Dacher and Cowen, Alan},
204
+ journal={arXiv preprint arXiv:2207.03572},
205
+ year={2022}
206
+ }
207
+ ```
208
+
209
+ ### <a id="14"></a>
210
+
211
+ ```bibtex
212
+ @article{baird2022icml,
213
+ title={The ICML 2022 Expressive Vocalizations Workshop and Competition: Recognizing, Generating, and Personalizing Vocal Bursts},
214
+ author={Baird, Alice and Tzirakis, Panagiotis and Gidel, Gauthier and Jiralerspong, Marco and Muller, Eilif B and Mathewson, Kory and Schuller, Bj{\"o}rn and Cambria, Erik and Keltner, Dacher and Cowen, Alan},
215
+ journal={arXiv preprint arXiv:2205.01780},
216
+ year={2022}
217
+ }
218
+ ```
219
+
220
+ ### <a id="15"></a>
221
+
222
+ ```bibtex
223
+ @article{monroy2022intersectionality,
224
+ title={Intersectionality in emotion signaling and recognition: The influence of gender, ethnicity, and social class.},
225
+ author={Monroy, Maria and Cowen, Alan S and Keltner, Dacher},
226
+ journal={Emotion},
227
+ year={2022},
228
+ publisher={American Psychological Association}
229
+ }
230
+ ```
231
+
232
+ ### <a id="16"></a>
233
+
234
+ ```bibtex
235
+ @article{keltner2022emotions,
236
+ title={How emotions, relationships, and culture constitute each other: advances in social functionalist theory},
237
+ author={Keltner, Dacher and Sauter, Disa and Tracy, Jessica L and Wetchler, Everett and Cowen, Alan S},
238
+ journal={Cognition and Emotion},
239
+ volume={36},
240
+ number={3},
241
+ pages={388--401},
242
+ year={2022},
243
+ publisher={Taylor \& Francis}
244
+ }
245
+ ```
246
+
247
+ ### <a id="17"></a>
248
+
249
+ ```bibtex
250
+ @inproceedings{baird22_interspeech,
251
+ author={Alice Baird and Panagiotis Tzirakis and Jeff Brooks and Lauren Kim and Michael Opara and Chris Gregory and Jacob Metrick and Garrett Boseck and Dacher Keltner and Alan Cowen},
252
+ title={{State & Trait Measurement from Nonverbal Vocalizations: A Multi-Task Joint Learning Approach}},
253
+ year=2022,
254
+ booktitle={Proc. Interspeech 2022},
255
+ pages={2028--2032},
256
+ doi={10.21437/Interspeech.2022-10927}
257
+ }
258
+ ```
package/Client.d.ts CHANGED
@@ -21,6 +21,14 @@ export declare class HumeClient {
21
21
  constructor(_options: HumeClient.Options);
22
22
  /**
23
23
  * Sort and filter jobs.
24
+ *
25
+ * @example
26
+ * await hume.listJobs({
27
+ * status: Hume.Status.Queued,
28
+ * when: Hume.When.CreatedBefore,
29
+ * sortBy: Hume.SortBy.Created,
30
+ * direction: Hume.Direction.Asc
31
+ * })
24
32
  */
25
33
  listJobs(request?: Hume.ListJobsRequest, requestOptions?: HumeClient.RequestOptions): Promise<Hume.JobRequest[]>;
26
34
  /**
@@ -29,6 +37,9 @@ export declare class HumeClient {
29
37
  submitJob(request?: Hume.BaseRequest, requestOptions?: HumeClient.RequestOptions): Promise<Hume.JobId>;
30
38
  /**
31
39
  * Get the JSON predictions of a completed job.
40
+ *
41
+ * @example
42
+ * await hume.getJobPredictions("id")
32
43
  */
33
44
  getJobPredictions(id: string, requestOptions?: HumeClient.RequestOptions): Promise<Hume.SourceResult[]>;
34
45
  /**
package/Client.js CHANGED
@@ -50,6 +50,14 @@ class HumeClient {
50
50
  }
51
51
  /**
52
52
  * Sort and filter jobs.
53
+ *
54
+ * @example
55
+ * await hume.listJobs({
56
+ * status: Hume.Status.Queued,
57
+ * when: Hume.When.CreatedBefore,
58
+ * sortBy: Hume.SortBy.Created,
59
+ * direction: Hume.Direction.Asc
60
+ * })
53
61
  */
54
62
  listJobs(request = {}, requestOptions) {
55
63
  var _a;
@@ -86,7 +94,7 @@ class HumeClient {
86
94
  "X-Hume-Api-Key": yield core.Supplier.get(this._options.apiKey),
87
95
  "X-Fern-Language": "JavaScript",
88
96
  "X-Fern-SDK-Name": "hume",
89
- "X-Fern-SDK-Version": "0.2.5",
97
+ "X-Fern-SDK-Version": "0.3.0",
90
98
  },
91
99
  contentType: "application/json",
92
100
  queryParameters: _queryParams,
@@ -135,7 +143,7 @@ class HumeClient {
135
143
  "X-Hume-Api-Key": yield core.Supplier.get(this._options.apiKey),
136
144
  "X-Fern-Language": "JavaScript",
137
145
  "X-Fern-SDK-Name": "hume",
138
- "X-Fern-SDK-Version": "0.2.5",
146
+ "X-Fern-SDK-Version": "0.3.0",
139
147
  },
140
148
  contentType: "application/json",
141
149
  body: yield serializers.BaseRequest.jsonOrThrow(request, { unrecognizedObjectKeys: "strip" }),
@@ -173,6 +181,9 @@ class HumeClient {
173
181
  }
174
182
  /**
175
183
  * Get the JSON predictions of a completed job.
184
+ *
185
+ * @example
186
+ * await hume.getJobPredictions("id")
176
187
  */
177
188
  getJobPredictions(id, requestOptions) {
178
189
  var _a;
@@ -184,7 +195,7 @@ class HumeClient {
184
195
  "X-Hume-Api-Key": yield core.Supplier.get(this._options.apiKey),
185
196
  "X-Fern-Language": "JavaScript",
186
197
  "X-Fern-SDK-Name": "hume",
187
- "X-Fern-SDK-Version": "0.2.5",
198
+ "X-Fern-SDK-Version": "0.3.0",
188
199
  },
189
200
  contentType: "application/json",
190
201
  timeoutMs: (requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeoutInSeconds) != null ? requestOptions.timeoutInSeconds * 1000 : 60000,
@@ -225,18 +236,42 @@ class HumeClient {
225
236
  getJobArtifacts(id, requestOptions) {
226
237
  var _a;
227
238
  return __awaiter(this, void 0, void 0, function* () {
228
- const _response = yield core.streamingFetcher({
239
+ const _response = yield core.fetcher({
229
240
  url: (0, url_join_1.default)((_a = (yield core.Supplier.get(this._options.environment))) !== null && _a !== void 0 ? _a : environments.HumeEnvironment.Default, `v0/batch/jobs/${id}/artifacts`),
230
241
  method: "GET",
231
242
  headers: {
232
243
  "X-Hume-Api-Key": yield core.Supplier.get(this._options.apiKey),
233
244
  "X-Fern-Language": "JavaScript",
234
245
  "X-Fern-SDK-Name": "hume",
235
- "X-Fern-SDK-Version": "0.2.5",
246
+ "X-Fern-SDK-Version": "0.3.0",
236
247
  },
248
+ contentType: "application/json",
249
+ responseType: "streaming",
237
250
  timeoutMs: (requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeoutInSeconds) != null ? requestOptions.timeoutInSeconds * 1000 : 60000,
251
+ maxRetries: requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.maxRetries,
238
252
  });
239
- return _response.data;
253
+ if (_response.ok) {
254
+ return _response.body;
255
+ }
256
+ if (_response.error.reason === "status-code") {
257
+ throw new errors.HumeError({
258
+ statusCode: _response.error.statusCode,
259
+ body: _response.error.body,
260
+ });
261
+ }
262
+ switch (_response.error.reason) {
263
+ case "non-json":
264
+ throw new errors.HumeError({
265
+ statusCode: _response.error.statusCode,
266
+ body: _response.error.rawBody,
267
+ });
268
+ case "timeout":
269
+ throw new errors.HumeTimeoutError();
270
+ case "unknown":
271
+ throw new errors.HumeError({
272
+ message: _response.error.errorMessage,
273
+ });
274
+ }
240
275
  });
241
276
  }
242
277
  /**
@@ -252,7 +287,7 @@ class HumeClient {
252
287
  "X-Hume-Api-Key": yield core.Supplier.get(this._options.apiKey),
253
288
  "X-Fern-Language": "JavaScript",
254
289
  "X-Fern-SDK-Name": "hume",
255
- "X-Fern-SDK-Version": "0.2.5",
290
+ "X-Fern-SDK-Version": "0.3.0",
256
291
  },
257
292
  contentType: "application/json",
258
293
  timeoutMs: (requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeoutInSeconds) != null ? requestOptions.timeoutInSeconds * 1000 : 60000,
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Hume AI, Inc.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -2,6 +2,15 @@
2
2
  * This file was auto-generated by Fern from our API Definition.
3
3
  */
4
4
  import * as Hume from "../..";
5
+ /**
6
+ * @example
7
+ * {
8
+ * status: Hume.Status.Queued,
9
+ * when: Hume.When.CreatedBefore,
10
+ * sortBy: Hume.SortBy.Created,
11
+ * direction: Hume.Direction.Asc
12
+ * }
13
+ */
5
14
  export interface ListJobsRequest {
6
15
  /**
7
16
  * The maximum number of jobs to include in the response.
@@ -62,28 +62,28 @@ export * from "./TranscriptionMetadata";
62
62
  export * from "./Url";
63
63
  export * from "./When";
64
64
  export * from "./Window";
65
- export * from "./ModelsInput";
66
- export * from "./ModelConfig";
67
65
  export * from "./FaceModelConfig";
68
66
  export * from "./LanguageConfig";
69
- export * from "./ModelResponse";
70
- export * from "./BurstResponse";
67
+ export * from "./ModelConfig";
68
+ export * from "./ModelsInput";
71
69
  export * from "./ModelsSuccessBurstPredictionsItem";
72
- export * from "./FaceResponse";
70
+ export * from "./BurstResponse";
73
71
  export * from "./ModelsSuccessFacePredictionsItem";
74
- export * from "./FacemeshResponse";
72
+ export * from "./FaceResponse";
75
73
  export * from "./ModelsSuccessFacemeshPredictionsItem";
76
- export * from "./LanguageResponse";
74
+ export * from "./FacemeshResponse";
77
75
  export * from "./ModelsSuccessLanguagePredictionsItem";
78
- export * from "./ProsodyResponse";
76
+ export * from "./LanguageResponse";
79
77
  export * from "./ModelsSuccessProsodyPredictionsItem";
78
+ export * from "./ProsodyResponse";
79
+ export * from "./ModelResponse";
80
80
  export * from "./ModelsError";
81
81
  export * from "./ModelsWarning";
82
- export * from "./EmotionEmbedding";
83
82
  export * from "./EmotionEmbeddingItem";
83
+ export * from "./EmotionEmbedding";
84
84
  export * from "./TimeRange";
85
85
  export * from "./TextPosition";
86
- export * from "./Sentiment";
87
86
  export * from "./SentimentItem";
88
- export * from "./Toxicity";
87
+ export * from "./Sentiment";
89
88
  export * from "./ToxicityItem";
89
+ export * from "./Toxicity";
@@ -78,28 +78,28 @@ __exportStar(require("./TranscriptionMetadata"), exports);
78
78
  __exportStar(require("./Url"), exports);
79
79
  __exportStar(require("./When"), exports);
80
80
  __exportStar(require("./Window"), exports);
81
- __exportStar(require("./ModelsInput"), exports);
82
- __exportStar(require("./ModelConfig"), exports);
83
81
  __exportStar(require("./FaceModelConfig"), exports);
84
82
  __exportStar(require("./LanguageConfig"), exports);
85
- __exportStar(require("./ModelResponse"), exports);
86
- __exportStar(require("./BurstResponse"), exports);
83
+ __exportStar(require("./ModelConfig"), exports);
84
+ __exportStar(require("./ModelsInput"), exports);
87
85
  __exportStar(require("./ModelsSuccessBurstPredictionsItem"), exports);
88
- __exportStar(require("./FaceResponse"), exports);
86
+ __exportStar(require("./BurstResponse"), exports);
89
87
  __exportStar(require("./ModelsSuccessFacePredictionsItem"), exports);
90
- __exportStar(require("./FacemeshResponse"), exports);
88
+ __exportStar(require("./FaceResponse"), exports);
91
89
  __exportStar(require("./ModelsSuccessFacemeshPredictionsItem"), exports);
92
- __exportStar(require("./LanguageResponse"), exports);
90
+ __exportStar(require("./FacemeshResponse"), exports);
93
91
  __exportStar(require("./ModelsSuccessLanguagePredictionsItem"), exports);
94
- __exportStar(require("./ProsodyResponse"), exports);
92
+ __exportStar(require("./LanguageResponse"), exports);
95
93
  __exportStar(require("./ModelsSuccessProsodyPredictionsItem"), exports);
94
+ __exportStar(require("./ProsodyResponse"), exports);
95
+ __exportStar(require("./ModelResponse"), exports);
96
96
  __exportStar(require("./ModelsError"), exports);
97
97
  __exportStar(require("./ModelsWarning"), exports);
98
- __exportStar(require("./EmotionEmbedding"), exports);
99
98
  __exportStar(require("./EmotionEmbeddingItem"), exports);
99
+ __exportStar(require("./EmotionEmbedding"), exports);
100
100
  __exportStar(require("./TimeRange"), exports);
101
101
  __exportStar(require("./TextPosition"), exports);
102
- __exportStar(require("./Sentiment"), exports);
103
102
  __exportStar(require("./SentimentItem"), exports);
104
- __exportStar(require("./Toxicity"), exports);
103
+ __exportStar(require("./Sentiment"), exports);
105
104
  __exportStar(require("./ToxicityItem"), exports);
105
+ __exportStar(require("./Toxicity"), exports);
@@ -2,6 +2,7 @@ export declare type APIResponse<Success, Failure> = SuccessfulResponse<Success>
2
2
  export interface SuccessfulResponse<T> {
3
3
  ok: true;
4
4
  body: T;
5
+ headers?: Record<string, any>;
5
6
  }
6
7
  export interface FailedResponse<T> {
7
8
  ok: false;
@@ -1,4 +1,3 @@
1
- import { AxiosAdapter } from "axios";
2
1
  import { APIResponse } from "./APIResponse";
3
2
  export declare type FetchFunction = <R = unknown>(args: Fetcher.Args) => Promise<APIResponse<R, Fetcher.Error>>;
4
3
  export declare namespace Fetcher {
@@ -12,9 +11,7 @@ export declare namespace Fetcher {
12
11
  timeoutMs?: number;
13
12
  maxRetries?: number;
14
13
  withCredentials?: boolean;
15
- responseType?: "json" | "blob";
16
- adapter?: AxiosAdapter;
17
- onUploadProgress?: (event: ProgressEvent) => void;
14
+ responseType?: "json" | "blob" | "streaming";
18
15
  }
19
16
  type Error = FailedStatusCodeError | NonJsonError | TimeoutError | UnknownError;
20
17
  interface FailedStatusCodeError {