@huggingface/tasks 0.15.1 → 0.15.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -208,6 +208,10 @@ export declare const SKUS: {
208
208
  tflops: number;
209
209
  memory: number[];
210
210
  };
211
+ "GTX 1060": {
212
+ tflops: number;
213
+ memory: number[];
214
+ };
211
215
  "RTX Titan": {
212
216
  tflops: number;
213
217
  memory: number[];
@@ -1 +1 @@
1
- {"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAqeuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
1
+ {"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAyeuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
@@ -193,6 +193,10 @@ exports.SKUS = {
193
193
  tflops: 8.2, // float32 (GPU does not support native float16)
194
194
  memory: [8],
195
195
  },
196
+ "GTX 1060": {
197
+ tflops: 3.9, // float32 (GPU does not support native float16)
198
+ memory: [3, 6],
199
+ },
196
200
  "RTX Titan": {
197
201
  tflops: 32.62,
198
202
  memory: [24],
@@ -181,7 +181,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
181
181
  repoName: string;
182
182
  docsUrl: string;
183
183
  repoUrl: string;
184
- countDownloads: string;
185
184
  };
186
185
  "depth-anything-v2": {
187
186
  prettyLabel: string;
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAs0BI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,0gDAQ1B,CAAC"}
1
+ {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAq0BI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,0gDAQ1B,CAAC"}
@@ -167,7 +167,6 @@ exports.MODEL_LIBRARIES_UI_ELEMENTS = {
167
167
  repoName: "deepforest",
168
168
  docsUrl: "https://deepforest.readthedocs.io/en/latest/",
169
169
  repoUrl: "https://github.com/weecology/DeepForest",
170
- countDownloads: `path_extension:"pt" OR path_extension:"pl"`,
171
170
  },
172
171
  "depth-anything-v2": {
173
172
  prettyLabel: "DepthAnythingV2",
@@ -1 +1 @@
1
- {"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,EAAkD,KAAK,iBAAiB,EAAE,MAAM,2BAA2B,CAAC;AACnH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAWrE,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,YACT,iBAAiB,SACpB;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAAgB,EA+FlB,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,gBAAgB,EAcvF,CAAC;AAEF,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,gBAAgB,EAoB5F,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,gBAAgB,EAatE,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,gBAAgB,EAarE,CAAC;AAEF,eAAO,MAAM,kBAAkB,UACvB,gBAAgB,eACV,MAAM,YACT,iBAAiB,KACzB,gBAAgB,EAqDlB,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,gBAAgB,EAYxE,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,gBAAgB,EAqC5E,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,gBAAgB,EAgB1F,CAAC;AAEF,eAAO,MAAM,cAAc,EAAE,OAAO,CACnC,MAAM,CACL,YAAY,EACZ,CACC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC1B,gBAAgB,EAAE,CACvB,CA8BD,CAAC;AAEF,wBAAgB,yBAAyB,CACxC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC5B,gBAAgB,EAAE,CA+BpB"}
1
+ {"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,EAAkD,KAAK,iBAAiB,EAAE,MAAM,2BAA2B,CAAC;AACnH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAWrE,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,YACT,iBAAiB,SACpB;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAAgB,EAiGlB,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,gBAAgB,EAevF,CAAC;AAEF,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,gBAAgB,EAqB5F,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,gBAAgB,EActE,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,gBAAgB,EAcrE,CAAC;AAEF,eAAO,MAAM,kBAAkB,UACvB,gBAAgB,eACV,MAAM,YACT,iBAAiB,KACzB,gBAAgB,EAqDlB,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,gBAAgB,EAcxE,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,gBAAgB,EAuC5E,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,gBAAgB,EAiB1F,CAAC;AAEF,eAAO,MAAM,cAAc,EAAE,OAAO,CACnC,MAAM,CACL,YAAY,EACZ,CACC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC1B,gBAAgB,EAAE,CACvB,CA8BD,CAAC;AAEF,wBAAgB,yBAAyB,CACxC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC5B,gBAAgB,EAAE,CA+BpB"}
@@ -36,7 +36,7 @@ ${snippetImportInferenceClient(accessToken, provider)}
36
36
  messages = ${messagesStr}
37
37
 
38
38
  stream = client.chat.completions.create(
39
- model="${model.id}",
39
+ model="${model.id}",
40
40
  messages=messages,
41
41
  ${configStr},
42
42
  stream=True
@@ -47,7 +47,8 @@ for chunk in stream:
47
47
  },
48
48
  {
49
49
  client: "openai",
50
- content: `from openai import OpenAI
50
+ content: `\
51
+ from openai import OpenAI
51
52
 
52
53
  client = OpenAI(
53
54
  base_url="${(0, inference_providers_js_1.openAIbaseUrl)(provider)}",
@@ -64,7 +65,7 @@ stream = client.chat.completions.create(
64
65
  )
65
66
 
66
67
  for chunk in stream:
67
- print(chunk.choices[0].delta.content, end="")`,
68
+ print(chunk.choices[0].delta.content, end="")`,
68
69
  },
69
70
  ];
70
71
  }
@@ -87,7 +88,8 @@ print(completion.choices[0].message)`,
87
88
  },
88
89
  {
89
90
  client: "openai",
90
- content: `from openai import OpenAI
91
+ content: `\
92
+ from openai import OpenAI
91
93
 
92
94
  client = OpenAI(
93
95
  base_url="${(0, inference_providers_js_1.openAIbaseUrl)(provider)}",
@@ -97,7 +99,7 @@ client = OpenAI(
97
99
  messages = ${messagesStr}
98
100
 
99
101
  completion = client.chat.completions.create(
100
- model="${model.id}",
102
+ model="${model.id}",
101
103
  messages=messages,
102
104
  ${configStr}
103
105
  )
@@ -112,7 +114,8 @@ const snippetZeroShotClassification = (model) => {
112
114
  return [
113
115
  {
114
116
  client: "requests",
115
- content: `def query(payload):
117
+ content: `\
118
+ def query(payload):
116
119
  response = requests.post(API_URL, headers=headers, json=payload)
117
120
  return response.json()
118
121
 
@@ -128,7 +131,8 @@ const snippetZeroShotImageClassification = (model) => {
128
131
  return [
129
132
  {
130
133
  client: "requests",
131
- content: `def query(data):
134
+ content: `\
135
+ def query(data):
132
136
  with open(data["image_path"], "rb") as f:
133
137
  img = f.read()
134
138
  payload={
@@ -139,8 +143,8 @@ const snippetZeroShotImageClassification = (model) => {
139
143
  return response.json()
140
144
 
141
145
  output = query({
142
- "image_path": ${(0, inputs_js_1.getModelInputSnippet)(model)},
143
- "parameters": {"candidate_labels": ["cat", "dog", "llama"]},
146
+ "image_path": ${(0, inputs_js_1.getModelInputSnippet)(model)},
147
+ "parameters": {"candidate_labels": ["cat", "dog", "llama"]},
144
148
  })`,
145
149
  },
146
150
  ];
@@ -150,7 +154,8 @@ const snippetBasic = (model) => {
150
154
  return [
151
155
  {
152
156
  client: "requests",
153
- content: `def query(payload):
157
+ content: `\
158
+ def query(payload):
154
159
  response = requests.post(API_URL, headers=headers, json=payload)
155
160
  return response.json()
156
161
 
@@ -165,13 +170,14 @@ const snippetFile = (model) => {
165
170
  return [
166
171
  {
167
172
  client: "requests",
168
- content: `def query(filename):
169
- with open(filename, "rb") as f:
170
- data = f.read()
171
- response = requests.post(API_URL, headers=headers, data=data)
172
- return response.json()
173
-
174
- output = query(${(0, inputs_js_1.getModelInputSnippet)(model)})`,
173
+ content: `\
174
+ def query(filename):
175
+ with open(filename, "rb") as f:
176
+ data = f.read()
177
+ response = requests.post(API_URL, headers=headers, data=data)
178
+ return response.json()
179
+
180
+ output = query(${(0, inputs_js_1.getModelInputSnippet)(model)})`,
175
181
  },
176
182
  ];
177
183
  };
@@ -235,12 +241,14 @@ const snippetTabular = (model) => {
235
241
  return [
236
242
  {
237
243
  client: "requests",
238
- content: `def query(payload):
239
- response = requests.post(API_URL, headers=headers, json=payload)
240
- return response.content
241
- response = query({
242
- "inputs": {"data": ${(0, inputs_js_1.getModelInputSnippet)(model)}},
243
- })`,
244
+ content: `\
245
+ def query(payload):
246
+ response = requests.post(API_URL, headers=headers, json=payload)
247
+ return response.content
248
+
249
+ response = query({
250
+ "inputs": {"data": ${(0, inputs_js_1.getModelInputSnippet)(model)}},
251
+ })`,
244
252
  },
245
253
  ];
246
254
  };
@@ -253,7 +261,8 @@ const snippetTextToAudio = (model) => {
253
261
  return [
254
262
  {
255
263
  client: "requests",
256
- content: `def query(payload):
264
+ content: `\
265
+ def query(payload):
257
266
  response = requests.post(API_URL, headers=headers, json=payload)
258
267
  return response.content
259
268
 
@@ -270,7 +279,8 @@ Audio(audio_bytes)`,
270
279
  return [
271
280
  {
272
281
  client: "requests",
273
- content: `def query(payload):
282
+ content: `\
283
+ def query(payload):
274
284
  response = requests.post(API_URL, headers=headers, json=payload)
275
285
  return response.json()
276
286
 
@@ -289,9 +299,10 @@ const snippetDocumentQuestionAnswering = (model) => {
289
299
  return [
290
300
  {
291
301
  client: "requests",
292
- content: `def query(payload):
293
- with open(payload["image"], "rb") as f:
294
- img = f.read()
302
+ content: `\
303
+ def query(payload):
304
+ with open(payload["image"], "rb") as f:
305
+ img = f.read()
295
306
  payload["image"] = base64.b64encode(img).decode("utf-8")
296
307
  response = requests.post(API_URL, headers=headers, json=payload)
297
308
  return response.json()
@@ -22,12 +22,12 @@ export interface TextToVideoInput {
22
22
  */
23
23
  export interface TextToVideoParameters {
24
24
  /**
25
- * A higher guidance scale value encourages the model to generate images closely linked to
25
+ * A higher guidance scale value encourages the model to generate videos closely linked to
26
26
  * the text prompt, but values too high may cause saturation and other artifacts.
27
27
  */
28
28
  guidance_scale?: number;
29
29
  /**
30
- * One or several prompt to guide what NOT to include in image generation.
30
+ * One or several prompt to guide what NOT to include in video generation.
31
31
  */
32
32
  negative_prompt?: string[];
33
33
  /**
@@ -36,7 +36,7 @@ export interface TextToVideoParameters {
36
36
  num_frames?: number;
37
37
  /**
38
38
  * The number of denoising steps. More denoising steps usually lead to a higher quality
39
- * image at the expense of slower inference.
39
+ * video at the expense of slower inference.
40
40
  */
41
41
  num_inference_steps?: number;
42
42
  /**
@@ -208,6 +208,10 @@ export declare const SKUS: {
208
208
  tflops: number;
209
209
  memory: number[];
210
210
  };
211
+ "GTX 1060": {
212
+ tflops: number;
213
+ memory: number[];
214
+ };
211
215
  "RTX Titan": {
212
216
  tflops: number;
213
217
  memory: number[];
@@ -1 +1 @@
1
- {"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAqeuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
1
+ {"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAyeuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
@@ -190,6 +190,10 @@ export const SKUS = {
190
190
  tflops: 8.2, // float32 (GPU does not support native float16)
191
191
  memory: [8],
192
192
  },
193
+ "GTX 1060": {
194
+ tflops: 3.9, // float32 (GPU does not support native float16)
195
+ memory: [3, 6],
196
+ },
193
197
  "RTX Titan": {
194
198
  tflops: 32.62,
195
199
  memory: [24],
@@ -181,7 +181,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
181
181
  repoName: string;
182
182
  docsUrl: string;
183
183
  repoUrl: string;
184
- countDownloads: string;
185
184
  };
186
185
  "depth-anything-v2": {
187
186
  prettyLabel: string;
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAs0BI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,0gDAQ1B,CAAC"}
1
+ {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAq0BI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,0gDAQ1B,CAAC"}
@@ -141,7 +141,6 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
141
141
  repoName: "deepforest",
142
142
  docsUrl: "https://deepforest.readthedocs.io/en/latest/",
143
143
  repoUrl: "https://github.com/weecology/DeepForest",
144
- countDownloads: `path_extension:"pt" OR path_extension:"pl"`,
145
144
  },
146
145
  "depth-anything-v2": {
147
146
  prettyLabel: "DepthAnythingV2",
@@ -1 +1 @@
1
- {"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,EAAkD,KAAK,iBAAiB,EAAE,MAAM,2BAA2B,CAAC;AACnH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAWrE,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,YACT,iBAAiB,SACpB;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAAgB,EA+FlB,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,gBAAgB,EAcvF,CAAC;AAEF,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,gBAAgB,EAoB5F,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,gBAAgB,EAatE,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,gBAAgB,EAarE,CAAC;AAEF,eAAO,MAAM,kBAAkB,UACvB,gBAAgB,eACV,MAAM,YACT,iBAAiB,KACzB,gBAAgB,EAqDlB,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,gBAAgB,EAYxE,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,gBAAgB,EAqC5E,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,gBAAgB,EAgB1F,CAAC;AAEF,eAAO,MAAM,cAAc,EAAE,OAAO,CACnC,MAAM,CACL,YAAY,EACZ,CACC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC1B,gBAAgB,EAAE,CACvB,CA8BD,CAAC;AAEF,wBAAgB,yBAAyB,CACxC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC5B,gBAAgB,EAAE,CA+BpB"}
1
+ {"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,EAAkD,KAAK,iBAAiB,EAAE,MAAM,2BAA2B,CAAC;AACnH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAWrE,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,YACT,iBAAiB,SACpB;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAAgB,EAiGlB,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,gBAAgB,EAevF,CAAC;AAEF,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,gBAAgB,EAqB5F,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,gBAAgB,EActE,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,gBAAgB,EAcrE,CAAC;AAEF,eAAO,MAAM,kBAAkB,UACvB,gBAAgB,eACV,MAAM,YACT,iBAAiB,KACzB,gBAAgB,EAqDlB,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,gBAAgB,EAcxE,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,gBAAgB,EAuC5E,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,gBAAgB,EAiB1F,CAAC;AAEF,eAAO,MAAM,cAAc,EAAE,OAAO,CACnC,MAAM,CACL,YAAY,EACZ,CACC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC1B,gBAAgB,EAAE,CACvB,CA8BD,CAAC;AAEF,wBAAgB,yBAAyB,CACxC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,iBAAiB,EAC3B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC5B,gBAAgB,EAAE,CA+BpB"}
@@ -32,7 +32,7 @@ ${snippetImportInferenceClient(accessToken, provider)}
32
32
  messages = ${messagesStr}
33
33
 
34
34
  stream = client.chat.completions.create(
35
- model="${model.id}",
35
+ model="${model.id}",
36
36
  messages=messages,
37
37
  ${configStr},
38
38
  stream=True
@@ -43,7 +43,8 @@ for chunk in stream:
43
43
  },
44
44
  {
45
45
  client: "openai",
46
- content: `from openai import OpenAI
46
+ content: `\
47
+ from openai import OpenAI
47
48
 
48
49
  client = OpenAI(
49
50
  base_url="${openAIbaseUrl(provider)}",
@@ -60,7 +61,7 @@ stream = client.chat.completions.create(
60
61
  )
61
62
 
62
63
  for chunk in stream:
63
- print(chunk.choices[0].delta.content, end="")`,
64
+ print(chunk.choices[0].delta.content, end="")`,
64
65
  },
65
66
  ];
66
67
  }
@@ -83,7 +84,8 @@ print(completion.choices[0].message)`,
83
84
  },
84
85
  {
85
86
  client: "openai",
86
- content: `from openai import OpenAI
87
+ content: `\
88
+ from openai import OpenAI
87
89
 
88
90
  client = OpenAI(
89
91
  base_url="${openAIbaseUrl(provider)}",
@@ -93,7 +95,7 @@ client = OpenAI(
93
95
  messages = ${messagesStr}
94
96
 
95
97
  completion = client.chat.completions.create(
96
- model="${model.id}",
98
+ model="${model.id}",
97
99
  messages=messages,
98
100
  ${configStr}
99
101
  )
@@ -107,7 +109,8 @@ export const snippetZeroShotClassification = (model) => {
107
109
  return [
108
110
  {
109
111
  client: "requests",
110
- content: `def query(payload):
112
+ content: `\
113
+ def query(payload):
111
114
  response = requests.post(API_URL, headers=headers, json=payload)
112
115
  return response.json()
113
116
 
@@ -122,7 +125,8 @@ export const snippetZeroShotImageClassification = (model) => {
122
125
  return [
123
126
  {
124
127
  client: "requests",
125
- content: `def query(data):
128
+ content: `\
129
+ def query(data):
126
130
  with open(data["image_path"], "rb") as f:
127
131
  img = f.read()
128
132
  payload={
@@ -133,8 +137,8 @@ export const snippetZeroShotImageClassification = (model) => {
133
137
  return response.json()
134
138
 
135
139
  output = query({
136
- "image_path": ${getModelInputSnippet(model)},
137
- "parameters": {"candidate_labels": ["cat", "dog", "llama"]},
140
+ "image_path": ${getModelInputSnippet(model)},
141
+ "parameters": {"candidate_labels": ["cat", "dog", "llama"]},
138
142
  })`,
139
143
  },
140
144
  ];
@@ -143,7 +147,8 @@ export const snippetBasic = (model) => {
143
147
  return [
144
148
  {
145
149
  client: "requests",
146
- content: `def query(payload):
150
+ content: `\
151
+ def query(payload):
147
152
  response = requests.post(API_URL, headers=headers, json=payload)
148
153
  return response.json()
149
154
 
@@ -157,13 +162,14 @@ export const snippetFile = (model) => {
157
162
  return [
158
163
  {
159
164
  client: "requests",
160
- content: `def query(filename):
161
- with open(filename, "rb") as f:
162
- data = f.read()
163
- response = requests.post(API_URL, headers=headers, data=data)
164
- return response.json()
165
-
166
- output = query(${getModelInputSnippet(model)})`,
165
+ content: `\
166
+ def query(filename):
167
+ with open(filename, "rb") as f:
168
+ data = f.read()
169
+ response = requests.post(API_URL, headers=headers, data=data)
170
+ return response.json()
171
+
172
+ output = query(${getModelInputSnippet(model)})`,
167
173
  },
168
174
  ];
169
175
  };
@@ -225,12 +231,14 @@ export const snippetTabular = (model) => {
225
231
  return [
226
232
  {
227
233
  client: "requests",
228
- content: `def query(payload):
229
- response = requests.post(API_URL, headers=headers, json=payload)
230
- return response.content
231
- response = query({
232
- "inputs": {"data": ${getModelInputSnippet(model)}},
233
- })`,
234
+ content: `\
235
+ def query(payload):
236
+ response = requests.post(API_URL, headers=headers, json=payload)
237
+ return response.content
238
+
239
+ response = query({
240
+ "inputs": {"data": ${getModelInputSnippet(model)}},
241
+ })`,
234
242
  },
235
243
  ];
236
244
  };
@@ -242,7 +250,8 @@ export const snippetTextToAudio = (model) => {
242
250
  return [
243
251
  {
244
252
  client: "requests",
245
- content: `def query(payload):
253
+ content: `\
254
+ def query(payload):
246
255
  response = requests.post(API_URL, headers=headers, json=payload)
247
256
  return response.content
248
257
 
@@ -259,7 +268,8 @@ Audio(audio_bytes)`,
259
268
  return [
260
269
  {
261
270
  client: "requests",
262
- content: `def query(payload):
271
+ content: `\
272
+ def query(payload):
263
273
  response = requests.post(API_URL, headers=headers, json=payload)
264
274
  return response.json()
265
275
 
@@ -277,9 +287,10 @@ export const snippetDocumentQuestionAnswering = (model) => {
277
287
  return [
278
288
  {
279
289
  client: "requests",
280
- content: `def query(payload):
281
- with open(payload["image"], "rb") as f:
282
- img = f.read()
290
+ content: `\
291
+ def query(payload):
292
+ with open(payload["image"], "rb") as f:
293
+ img = f.read()
283
294
  payload["image"] = base64.b64encode(img).decode("utf-8")
284
295
  response = requests.post(API_URL, headers=headers, json=payload)
285
296
  return response.json()
@@ -22,12 +22,12 @@ export interface TextToVideoInput {
22
22
  */
23
23
  export interface TextToVideoParameters {
24
24
  /**
25
- * A higher guidance scale value encourages the model to generate images closely linked to
25
+ * A higher guidance scale value encourages the model to generate videos closely linked to
26
26
  * the text prompt, but values too high may cause saturation and other artifacts.
27
27
  */
28
28
  guidance_scale?: number;
29
29
  /**
30
- * One or several prompt to guide what NOT to include in image generation.
30
+ * One or several prompt to guide what NOT to include in video generation.
31
31
  */
32
32
  negative_prompt?: string[];
33
33
  /**
@@ -36,7 +36,7 @@ export interface TextToVideoParameters {
36
36
  num_frames?: number;
37
37
  /**
38
38
  * The number of denoising steps. More denoising steps usually lead to a higher quality
39
- * image at the expense of slower inference.
39
+ * video at the expense of slower inference.
40
40
  */
41
41
  num_inference_steps?: number;
42
42
  /**
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.15.1",
4
+ "version": "0.15.3",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
package/src/hardware.ts CHANGED
@@ -212,6 +212,10 @@ export const SKUS = {
212
212
  tflops: 8.2, // float32 (GPU does not support native float16)
213
213
  memory: [8],
214
214
  },
215
+ "GTX 1060": {
216
+ tflops: 3.9, // float32 (GPU does not support native float16)
217
+ memory: [3, 6],
218
+ },
215
219
  "RTX Titan": {
216
220
  tflops: 32.62,
217
221
  memory: [24],
@@ -185,7 +185,6 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
185
185
  repoName: "deepforest",
186
186
  docsUrl: "https://deepforest.readthedocs.io/en/latest/",
187
187
  repoUrl: "https://github.com/weecology/DeepForest",
188
- countDownloads: `path_extension:"pt" OR path_extension:"pl"`,
189
188
  },
190
189
  "depth-anything-v2": {
191
190
  prettyLabel: "DepthAnythingV2",
@@ -51,7 +51,7 @@ ${snippetImportInferenceClient(accessToken, provider)}
51
51
  messages = ${messagesStr}
52
52
 
53
53
  stream = client.chat.completions.create(
54
- model="${model.id}",
54
+ model="${model.id}",
55
55
  messages=messages,
56
56
  ${configStr},
57
57
  stream=True
@@ -62,7 +62,8 @@ for chunk in stream:
62
62
  },
63
63
  {
64
64
  client: "openai",
65
- content: `from openai import OpenAI
65
+ content: `\
66
+ from openai import OpenAI
66
67
 
67
68
  client = OpenAI(
68
69
  base_url="${openAIbaseUrl(provider)}",
@@ -79,7 +80,7 @@ stream = client.chat.completions.create(
79
80
  )
80
81
 
81
82
  for chunk in stream:
82
- print(chunk.choices[0].delta.content, end="")`,
83
+ print(chunk.choices[0].delta.content, end="")`,
83
84
  },
84
85
  ];
85
86
  } else {
@@ -101,7 +102,8 @@ print(completion.choices[0].message)`,
101
102
  },
102
103
  {
103
104
  client: "openai",
104
- content: `from openai import OpenAI
105
+ content: `\
106
+ from openai import OpenAI
105
107
 
106
108
  client = OpenAI(
107
109
  base_url="${openAIbaseUrl(provider)}",
@@ -111,7 +113,7 @@ client = OpenAI(
111
113
  messages = ${messagesStr}
112
114
 
113
115
  completion = client.chat.completions.create(
114
- model="${model.id}",
116
+ model="${model.id}",
115
117
  messages=messages,
116
118
  ${configStr}
117
119
  )
@@ -126,7 +128,8 @@ export const snippetZeroShotClassification = (model: ModelDataMinimal): Inferenc
126
128
  return [
127
129
  {
128
130
  client: "requests",
129
- content: `def query(payload):
131
+ content: `\
132
+ def query(payload):
130
133
  response = requests.post(API_URL, headers=headers, json=payload)
131
134
  return response.json()
132
135
 
@@ -142,7 +145,8 @@ export const snippetZeroShotImageClassification = (model: ModelDataMinimal): Inf
142
145
  return [
143
146
  {
144
147
  client: "requests",
145
- content: `def query(data):
148
+ content: `\
149
+ def query(data):
146
150
  with open(data["image_path"], "rb") as f:
147
151
  img = f.read()
148
152
  payload={
@@ -153,8 +157,8 @@ export const snippetZeroShotImageClassification = (model: ModelDataMinimal): Inf
153
157
  return response.json()
154
158
 
155
159
  output = query({
156
- "image_path": ${getModelInputSnippet(model)},
157
- "parameters": {"candidate_labels": ["cat", "dog", "llama"]},
160
+ "image_path": ${getModelInputSnippet(model)},
161
+ "parameters": {"candidate_labels": ["cat", "dog", "llama"]},
158
162
  })`,
159
163
  },
160
164
  ];
@@ -164,7 +168,8 @@ export const snippetBasic = (model: ModelDataMinimal): InferenceSnippet[] => {
164
168
  return [
165
169
  {
166
170
  client: "requests",
167
- content: `def query(payload):
171
+ content: `\
172
+ def query(payload):
168
173
  response = requests.post(API_URL, headers=headers, json=payload)
169
174
  return response.json()
170
175
 
@@ -179,13 +184,14 @@ export const snippetFile = (model: ModelDataMinimal): InferenceSnippet[] => {
179
184
  return [
180
185
  {
181
186
  client: "requests",
182
- content: `def query(filename):
183
- with open(filename, "rb") as f:
184
- data = f.read()
185
- response = requests.post(API_URL, headers=headers, data=data)
186
- return response.json()
187
-
188
- output = query(${getModelInputSnippet(model)})`,
187
+ content: `\
188
+ def query(filename):
189
+ with open(filename, "rb") as f:
190
+ data = f.read()
191
+ response = requests.post(API_URL, headers=headers, data=data)
192
+ return response.json()
193
+
194
+ output = query(${getModelInputSnippet(model)})`,
189
195
  },
190
196
  ];
191
197
  };
@@ -253,12 +259,14 @@ export const snippetTabular = (model: ModelDataMinimal): InferenceSnippet[] => {
253
259
  return [
254
260
  {
255
261
  client: "requests",
256
- content: `def query(payload):
257
- response = requests.post(API_URL, headers=headers, json=payload)
258
- return response.content
259
- response = query({
260
- "inputs": {"data": ${getModelInputSnippet(model)}},
261
- })`,
262
+ content: `\
263
+ def query(payload):
264
+ response = requests.post(API_URL, headers=headers, json=payload)
265
+ return response.content
266
+
267
+ response = query({
268
+ "inputs": {"data": ${getModelInputSnippet(model)}},
269
+ })`,
262
270
  },
263
271
  ];
264
272
  };
@@ -271,7 +279,8 @@ export const snippetTextToAudio = (model: ModelDataMinimal): InferenceSnippet[]
271
279
  return [
272
280
  {
273
281
  client: "requests",
274
- content: `def query(payload):
282
+ content: `\
283
+ def query(payload):
275
284
  response = requests.post(API_URL, headers=headers, json=payload)
276
285
  return response.content
277
286
 
@@ -287,7 +296,8 @@ Audio(audio_bytes)`,
287
296
  return [
288
297
  {
289
298
  client: "requests",
290
- content: `def query(payload):
299
+ content: `\
300
+ def query(payload):
291
301
  response = requests.post(API_URL, headers=headers, json=payload)
292
302
  return response.json()
293
303
 
@@ -306,9 +316,10 @@ export const snippetDocumentQuestionAnswering = (model: ModelDataMinimal): Infer
306
316
  return [
307
317
  {
308
318
  client: "requests",
309
- content: `def query(payload):
310
- with open(payload["image"], "rb") as f:
311
- img = f.read()
319
+ content: `\
320
+ def query(payload):
321
+ with open(payload["image"], "rb") as f:
322
+ img = f.read()
312
323
  payload["image"] = base64.b64encode(img).decode("utf-8")
313
324
  response = requests.post(API_URL, headers=headers, json=payload)
314
325
  return response.json()
@@ -22,12 +22,12 @@ export interface TextToVideoInput {
22
22
  */
23
23
  export interface TextToVideoParameters {
24
24
  /**
25
- * A higher guidance scale value encourages the model to generate images closely linked to
25
+ * A higher guidance scale value encourages the model to generate videos closely linked to
26
26
  * the text prompt, but values too high may cause saturation and other artifacts.
27
27
  */
28
28
  guidance_scale?: number;
29
29
  /**
30
- * One or several prompt to guide what NOT to include in image generation.
30
+ * One or several prompt to guide what NOT to include in video generation.
31
31
  */
32
32
  negative_prompt?: string[];
33
33
  /**
@@ -36,7 +36,7 @@ export interface TextToVideoParameters {
36
36
  num_frames?: number;
37
37
  /**
38
38
  * The number of denoising steps. More denoising steps usually lead to a higher quality
39
- * image at the expense of slower inference.
39
+ * video at the expense of slower inference.
40
40
  */
41
41
  num_inference_steps?: number;
42
42
  /**
@@ -25,18 +25,18 @@
25
25
  },
26
26
  "guidance_scale": {
27
27
  "type": "number",
28
- "description": "A higher guidance scale value encourages the model to generate images closely linked to the text prompt, but values too high may cause saturation and other artifacts."
28
+ "description": "A higher guidance scale value encourages the model to generate videos closely linked to the text prompt, but values too high may cause saturation and other artifacts."
29
29
  },
30
30
  "negative_prompt": {
31
31
  "type": "array",
32
32
  "items": {
33
33
  "type": "string"
34
34
  },
35
- "description": "One or several prompt to guide what NOT to include in image generation."
35
+ "description": "One or several prompt to guide what NOT to include in video generation."
36
36
  },
37
37
  "num_inference_steps": {
38
38
  "type": "integer",
39
- "description": "The number of denoising steps. More denoising steps usually lead to a higher quality image at the expense of slower inference."
39
+ "description": "The number of denoising steps. More denoising steps usually lead to a higher quality video at the expense of slower inference."
40
40
  },
41
41
  "seed": {
42
42
  "type": "integer",