@huggingface/tasks 0.13.4 → 0.13.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/hardware.d.ts +12 -0
- package/dist/commonjs/hardware.d.ts.map +1 -1
- package/dist/commonjs/hardware.js +15 -3
- package/dist/commonjs/local-apps.d.ts.map +1 -1
- package/dist/commonjs/local-apps.js +40 -21
- package/dist/commonjs/local-apps.spec.d.ts +2 -0
- package/dist/commonjs/local-apps.spec.d.ts.map +1 -0
- package/dist/commonjs/local-apps.spec.js +114 -0
- package/dist/commonjs/model-libraries-snippets.d.ts.map +1 -1
- package/dist/commonjs/model-libraries-snippets.js +23 -12
- package/dist/commonjs/model-libraries-snippets.spec.d.ts +2 -0
- package/dist/commonjs/model-libraries-snippets.spec.d.ts.map +1 -0
- package/dist/commonjs/model-libraries-snippets.spec.js +51 -0
- package/dist/commonjs/snippets/curl.js +4 -4
- package/dist/commonjs/snippets/js.js +5 -5
- package/dist/commonjs/tasks/audio-classification/inference.d.ts +4 -3
- package/dist/commonjs/tasks/audio-classification/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/automatic-speech-recognition/inference.d.ts +1 -5
- package/dist/commonjs/tasks/automatic-speech-recognition/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/chat-completion/inference.d.ts +18 -6
- package/dist/commonjs/tasks/chat-completion/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/depth-estimation/inference.d.ts +1 -1
- package/dist/commonjs/tasks/document-question-answering/inference.d.ts +1 -3
- package/dist/commonjs/tasks/document-question-answering/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/feature-extraction/inference.d.ts +1 -1
- package/dist/commonjs/tasks/fill-mask/inference.d.ts +1 -3
- package/dist/commonjs/tasks/fill-mask/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/image-classification/inference.d.ts +4 -3
- package/dist/commonjs/tasks/image-classification/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/image-segmentation/inference.d.ts +1 -3
- package/dist/commonjs/tasks/image-segmentation/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/image-to-image/inference.d.ts +1 -3
- package/dist/commonjs/tasks/image-to-image/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/image-to-text/inference.d.ts +1 -5
- package/dist/commonjs/tasks/image-to-text/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/object-detection/inference.d.ts +1 -3
- package/dist/commonjs/tasks/object-detection/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/question-answering/inference.d.ts +1 -3
- package/dist/commonjs/tasks/question-answering/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/sentence-similarity/inference.d.ts +1 -1
- package/dist/commonjs/tasks/summarization/inference.d.ts +1 -3
- package/dist/commonjs/tasks/summarization/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/table-question-answering/inference.d.ts +1 -1
- package/dist/commonjs/tasks/text-classification/inference.d.ts +4 -3
- package/dist/commonjs/tasks/text-classification/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/text-to-audio/inference.d.ts +1 -5
- package/dist/commonjs/tasks/text-to-audio/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/text-to-image/inference.d.ts +1 -3
- package/dist/commonjs/tasks/text-to-image/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/text-to-speech/inference.d.ts +1 -5
- package/dist/commonjs/tasks/text-to-speech/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/text2text-generation/inference.d.ts +1 -3
- package/dist/commonjs/tasks/text2text-generation/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/token-classification/inference.d.ts +1 -3
- package/dist/commonjs/tasks/token-classification/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/translation/inference.d.ts +1 -3
- package/dist/commonjs/tasks/translation/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/video-classification/inference.d.ts +4 -3
- package/dist/commonjs/tasks/video-classification/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/visual-question-answering/inference.d.ts +1 -3
- package/dist/commonjs/tasks/visual-question-answering/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/zero-shot-classification/inference.d.ts +1 -3
- package/dist/commonjs/tasks/zero-shot-classification/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/zero-shot-image-classification/inference.d.ts +1 -3
- package/dist/commonjs/tasks/zero-shot-image-classification/inference.d.ts.map +1 -1
- package/dist/commonjs/tasks/zero-shot-object-detection/inference.d.ts +1 -3
- package/dist/commonjs/tasks/zero-shot-object-detection/inference.d.ts.map +1 -1
- package/dist/esm/hardware.d.ts +12 -0
- package/dist/esm/hardware.d.ts.map +1 -1
- package/dist/esm/hardware.js +15 -3
- package/dist/esm/local-apps.d.ts.map +1 -1
- package/dist/esm/local-apps.js +40 -21
- package/dist/esm/local-apps.spec.d.ts +2 -0
- package/dist/esm/local-apps.spec.d.ts.map +1 -0
- package/dist/esm/local-apps.spec.js +112 -0
- package/dist/esm/model-libraries-snippets.d.ts.map +1 -1
- package/dist/esm/model-libraries-snippets.js +23 -12
- package/dist/esm/model-libraries-snippets.spec.d.ts +2 -0
- package/dist/esm/model-libraries-snippets.spec.d.ts.map +1 -0
- package/dist/esm/model-libraries-snippets.spec.js +49 -0
- package/dist/esm/snippets/curl.js +4 -4
- package/dist/esm/snippets/js.js +5 -5
- package/dist/esm/tasks/audio-classification/inference.d.ts +4 -3
- package/dist/esm/tasks/audio-classification/inference.d.ts.map +1 -1
- package/dist/esm/tasks/automatic-speech-recognition/inference.d.ts +1 -5
- package/dist/esm/tasks/automatic-speech-recognition/inference.d.ts.map +1 -1
- package/dist/esm/tasks/chat-completion/inference.d.ts +18 -6
- package/dist/esm/tasks/chat-completion/inference.d.ts.map +1 -1
- package/dist/esm/tasks/depth-estimation/inference.d.ts +1 -1
- package/dist/esm/tasks/document-question-answering/inference.d.ts +1 -3
- package/dist/esm/tasks/document-question-answering/inference.d.ts.map +1 -1
- package/dist/esm/tasks/feature-extraction/inference.d.ts +1 -1
- package/dist/esm/tasks/fill-mask/inference.d.ts +1 -3
- package/dist/esm/tasks/fill-mask/inference.d.ts.map +1 -1
- package/dist/esm/tasks/image-classification/inference.d.ts +4 -3
- package/dist/esm/tasks/image-classification/inference.d.ts.map +1 -1
- package/dist/esm/tasks/image-segmentation/inference.d.ts +1 -3
- package/dist/esm/tasks/image-segmentation/inference.d.ts.map +1 -1
- package/dist/esm/tasks/image-to-image/inference.d.ts +1 -3
- package/dist/esm/tasks/image-to-image/inference.d.ts.map +1 -1
- package/dist/esm/tasks/image-to-text/inference.d.ts +1 -5
- package/dist/esm/tasks/image-to-text/inference.d.ts.map +1 -1
- package/dist/esm/tasks/object-detection/inference.d.ts +1 -3
- package/dist/esm/tasks/object-detection/inference.d.ts.map +1 -1
- package/dist/esm/tasks/question-answering/inference.d.ts +1 -3
- package/dist/esm/tasks/question-answering/inference.d.ts.map +1 -1
- package/dist/esm/tasks/sentence-similarity/inference.d.ts +1 -1
- package/dist/esm/tasks/summarization/inference.d.ts +1 -3
- package/dist/esm/tasks/summarization/inference.d.ts.map +1 -1
- package/dist/esm/tasks/table-question-answering/inference.d.ts +1 -1
- package/dist/esm/tasks/text-classification/inference.d.ts +4 -3
- package/dist/esm/tasks/text-classification/inference.d.ts.map +1 -1
- package/dist/esm/tasks/text-to-audio/inference.d.ts +1 -5
- package/dist/esm/tasks/text-to-audio/inference.d.ts.map +1 -1
- package/dist/esm/tasks/text-to-image/inference.d.ts +1 -3
- package/dist/esm/tasks/text-to-image/inference.d.ts.map +1 -1
- package/dist/esm/tasks/text-to-speech/inference.d.ts +1 -5
- package/dist/esm/tasks/text-to-speech/inference.d.ts.map +1 -1
- package/dist/esm/tasks/text2text-generation/inference.d.ts +1 -3
- package/dist/esm/tasks/text2text-generation/inference.d.ts.map +1 -1
- package/dist/esm/tasks/token-classification/inference.d.ts +1 -3
- package/dist/esm/tasks/token-classification/inference.d.ts.map +1 -1
- package/dist/esm/tasks/translation/inference.d.ts +1 -3
- package/dist/esm/tasks/translation/inference.d.ts.map +1 -1
- package/dist/esm/tasks/video-classification/inference.d.ts +4 -3
- package/dist/esm/tasks/video-classification/inference.d.ts.map +1 -1
- package/dist/esm/tasks/visual-question-answering/inference.d.ts +1 -3
- package/dist/esm/tasks/visual-question-answering/inference.d.ts.map +1 -1
- package/dist/esm/tasks/zero-shot-classification/inference.d.ts +1 -3
- package/dist/esm/tasks/zero-shot-classification/inference.d.ts.map +1 -1
- package/dist/esm/tasks/zero-shot-image-classification/inference.d.ts +1 -3
- package/dist/esm/tasks/zero-shot-image-classification/inference.d.ts.map +1 -1
- package/dist/esm/tasks/zero-shot-object-detection/inference.d.ts +1 -3
- package/dist/esm/tasks/zero-shot-object-detection/inference.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/hardware.ts +15 -3
- package/src/local-apps.spec.ts +123 -0
- package/src/local-apps.ts +37 -18
- package/src/model-libraries-snippets.spec.ts +54 -0
- package/src/model-libraries-snippets.ts +24 -11
- package/src/snippets/curl.ts +4 -4
- package/src/snippets/js.ts +5 -5
- package/src/tasks/audio-classification/inference.ts +4 -3
- package/src/tasks/audio-classification/spec/input.json +3 -3
- package/src/tasks/automatic-speech-recognition/inference.ts +1 -5
- package/src/tasks/automatic-speech-recognition/spec/input.json +1 -2
- package/src/tasks/chat-completion/inference.ts +19 -6
- package/src/tasks/chat-completion/spec/input.json +14 -19
- package/src/tasks/common-definitions.json +0 -1
- package/src/tasks/depth-estimation/inference.ts +1 -1
- package/src/tasks/depth-estimation/spec/input.json +1 -2
- package/src/tasks/document-question-answering/inference.ts +1 -3
- package/src/tasks/document-question-answering/spec/input.json +1 -2
- package/src/tasks/feature-extraction/inference.ts +1 -1
- package/src/tasks/feature-extraction/spec/input.json +1 -1
- package/src/tasks/fill-mask/inference.ts +1 -3
- package/src/tasks/fill-mask/spec/input.json +1 -2
- package/src/tasks/image-classification/inference.ts +4 -3
- package/src/tasks/image-classification/spec/input.json +3 -3
- package/src/tasks/image-segmentation/inference.ts +1 -3
- package/src/tasks/image-segmentation/spec/input.json +1 -2
- package/src/tasks/image-to-image/inference.ts +1 -3
- package/src/tasks/image-to-image/spec/input.json +1 -2
- package/src/tasks/image-to-text/inference.ts +1 -5
- package/src/tasks/image-to-text/spec/input.json +1 -2
- package/src/tasks/object-detection/inference.ts +1 -3
- package/src/tasks/object-detection/spec/input.json +1 -2
- package/src/tasks/placeholder/spec/input.json +1 -2
- package/src/tasks/question-answering/inference.ts +1 -3
- package/src/tasks/question-answering/spec/input.json +1 -2
- package/src/tasks/sentence-similarity/inference.ts +1 -1
- package/src/tasks/sentence-similarity/spec/input.json +1 -2
- package/src/tasks/summarization/inference.ts +1 -3
- package/src/tasks/summarization/spec/input.json +1 -2
- package/src/tasks/table-question-answering/inference.ts +1 -1
- package/src/tasks/table-question-answering/spec/input.json +1 -2
- package/src/tasks/text-classification/inference.ts +4 -3
- package/src/tasks/text-classification/spec/input.json +3 -3
- package/src/tasks/text-to-audio/inference.ts +1 -5
- package/src/tasks/text-to-audio/spec/input.json +1 -2
- package/src/tasks/text-to-image/inference.ts +1 -3
- package/src/tasks/text-to-image/spec/input.json +1 -2
- package/src/tasks/text-to-speech/inference.ts +1 -5
- package/src/tasks/text-to-speech/spec/input.json +1 -2
- package/src/tasks/text2text-generation/inference.ts +1 -3
- package/src/tasks/text2text-generation/spec/input.json +1 -2
- package/src/tasks/token-classification/inference.ts +1 -3
- package/src/tasks/token-classification/spec/input.json +1 -2
- package/src/tasks/translation/inference.ts +1 -3
- package/src/tasks/translation/spec/input.json +1 -2
- package/src/tasks/video-classification/inference.ts +4 -3
- package/src/tasks/video-classification/spec/input.json +3 -3
- package/src/tasks/visual-question-answering/inference.ts +1 -3
- package/src/tasks/visual-question-answering/spec/input.json +1 -2
- package/src/tasks/zero-shot-classification/inference.ts +1 -3
- package/src/tasks/zero-shot-classification/spec/input.json +1 -2
- package/src/tasks/zero-shot-image-classification/inference.ts +1 -3
- package/src/tasks/zero-shot-image-classification/spec/input.json +1 -2
- package/src/tasks/zero-shot-object-detection/inference.ts +1 -3
- package/src/tasks/zero-shot-object-detection/spec/input.json +1 -2
- package/dist/commonjs/snippets/curl.spec.d.ts +0 -2
- package/dist/commonjs/snippets/curl.spec.d.ts.map +0 -1
- package/dist/commonjs/snippets/curl.spec.js +0 -89
- package/dist/commonjs/snippets/js.spec.d.ts +0 -2
- package/dist/commonjs/snippets/js.spec.d.ts.map +0 -1
- package/dist/commonjs/snippets/js.spec.js +0 -141
- package/dist/commonjs/snippets/python.spec.d.ts +0 -2
- package/dist/commonjs/snippets/python.spec.d.ts.map +0 -1
- package/dist/commonjs/snippets/python.spec.js +0 -135
- package/dist/esm/snippets/curl.spec.d.ts +0 -2
- package/dist/esm/snippets/curl.spec.d.ts.map +0 -1
- package/dist/esm/snippets/curl.spec.js +0 -87
- package/dist/esm/snippets/js.spec.d.ts +0 -2
- package/dist/esm/snippets/js.spec.d.ts.map +0 -1
- package/dist/esm/snippets/js.spec.js +0 -139
- package/dist/esm/snippets/python.spec.d.ts +0 -2
- package/dist/esm/snippets/python.spec.d.ts.map +0 -1
- package/dist/esm/snippets/python.spec.js +0 -133
- package/src/snippets/curl.spec.ts +0 -94
- package/src/snippets/js.spec.ts +0 -148
- package/src/snippets/python.spec.ts +0 -144
|
@@ -471,6 +471,18 @@ export declare const SKUS: {
|
|
|
471
471
|
tflops: number;
|
|
472
472
|
memory: number[];
|
|
473
473
|
};
|
|
474
|
+
"Apple M4": {
|
|
475
|
+
tflops: number;
|
|
476
|
+
memory: number[];
|
|
477
|
+
};
|
|
478
|
+
"Apple M4 Pro": {
|
|
479
|
+
tflops: number;
|
|
480
|
+
memory: number[];
|
|
481
|
+
};
|
|
482
|
+
"Apple M4 Max": {
|
|
483
|
+
tflops: number;
|
|
484
|
+
memory: number[];
|
|
485
|
+
};
|
|
474
486
|
};
|
|
475
487
|
};
|
|
476
488
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI
|
|
1
|
+
{"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwcuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
|
|
@@ -433,7 +433,7 @@ exports.SKUS = {
|
|
|
433
433
|
memory: [8, 16, 24],
|
|
434
434
|
},
|
|
435
435
|
"Apple M2 Pro": {
|
|
436
|
-
tflops:
|
|
436
|
+
tflops: 6.8,
|
|
437
437
|
memory: [16, 24, 32],
|
|
438
438
|
},
|
|
439
439
|
"Apple M2 Max": {
|
|
@@ -445,17 +445,29 @@ exports.SKUS = {
|
|
|
445
445
|
memory: [64, 96, 128, 192],
|
|
446
446
|
},
|
|
447
447
|
"Apple M3": {
|
|
448
|
-
tflops:
|
|
448
|
+
tflops: 4.1,
|
|
449
449
|
memory: [8, 16, 24],
|
|
450
450
|
},
|
|
451
451
|
"Apple M3 Pro": {
|
|
452
|
-
tflops:
|
|
452
|
+
tflops: 7.4,
|
|
453
453
|
memory: [18, 36],
|
|
454
454
|
},
|
|
455
455
|
"Apple M3 Max": {
|
|
456
456
|
tflops: 14.2,
|
|
457
457
|
memory: [36, 48, 64, 96, 128],
|
|
458
458
|
},
|
|
459
|
+
"Apple M4": {
|
|
460
|
+
tflops: 4.6,
|
|
461
|
+
memory: [16, 24, 32],
|
|
462
|
+
},
|
|
463
|
+
"Apple M4 Pro": {
|
|
464
|
+
tflops: 9.2,
|
|
465
|
+
memory: [24, 48],
|
|
466
|
+
},
|
|
467
|
+
"Apple M4 Max": {
|
|
468
|
+
tflops: 18.4,
|
|
469
|
+
memory: [36, 48, 64, 128],
|
|
470
|
+
},
|
|
459
471
|
},
|
|
460
472
|
},
|
|
461
473
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;
|
|
1
|
+
{"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAKnD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;;OAIG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAqBF,iBAAS,UAAU,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAE7C;AAED,iBAAS,mBAAmB,CAAC,KAAK,EAAE,SAAS,WAE5C;AA+KD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBApLS,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;yBA0CzC,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;oCA6JzD,SAAS;yBA5GX,SAAS,KAAG,eAAe,EAAE;;;;;;;yBAiD9B,SAAS,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAxEzB,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBATjD,SAAS,aAAa,MAAM,KAAG,MAAM;;CAiR/B,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
|
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.LOCAL_APPS = void 0;
|
|
4
4
|
const gguf_js_1 = require("./gguf.js");
|
|
5
|
+
const common_js_1 = require("./snippets/common.js");
|
|
6
|
+
const inputs_js_1 = require("./snippets/inputs.js");
|
|
5
7
|
function isAwqModel(model) {
|
|
6
8
|
return model.config?.quantization_config?.quant_method === "awq";
|
|
7
9
|
}
|
|
@@ -27,14 +29,20 @@ function isMlxModel(model) {
|
|
|
27
29
|
return model.tags.includes("mlx");
|
|
28
30
|
}
|
|
29
31
|
const snippetLlamacpp = (model, filepath) => {
|
|
30
|
-
const command = (binary) =>
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
32
|
+
const command = (binary) => {
|
|
33
|
+
const snippet = [
|
|
34
|
+
"# Load and run the model:",
|
|
35
|
+
`${binary} \\`,
|
|
36
|
+
` --hf-repo "${model.id}" \\`,
|
|
37
|
+
` --hf-file ${filepath ?? "{{GGUF_FILE}}"} \\`,
|
|
38
|
+
` -p "${model.tags.includes("conversational") ? "You are a helpful assistant" : "Once upon a time,"}"`,
|
|
39
|
+
];
|
|
40
|
+
if (model.tags.includes("conversational")) {
|
|
41
|
+
snippet[snippet.length - 1] += " \\";
|
|
42
|
+
snippet.push(" --conversation");
|
|
43
|
+
}
|
|
44
|
+
return snippet.join("\n");
|
|
45
|
+
};
|
|
38
46
|
return [
|
|
39
47
|
{
|
|
40
48
|
title: "Install from brew",
|
|
@@ -105,22 +113,33 @@ const snippetLocalAI = (model, filepath) => {
|
|
|
105
113
|
];
|
|
106
114
|
};
|
|
107
115
|
const snippetVllm = (model) => {
|
|
108
|
-
const
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
116
|
+
const messages = (0, inputs_js_1.getModelInputSnippet)(model);
|
|
117
|
+
const runCommandInstruct = `# Call the server using curl:
|
|
118
|
+
curl -X POST "http://localhost:8000/v1/chat/completions" \\
|
|
119
|
+
-H "Content-Type: application/json" \\
|
|
120
|
+
--data '{
|
|
121
|
+
"model": "${model.id}",
|
|
122
|
+
"messages": ${(0, common_js_1.stringifyMessages)(messages, {
|
|
123
|
+
indent: "\t\t",
|
|
124
|
+
attributeKeyQuotes: true,
|
|
125
|
+
customContentEscaper: (str) => str.replace(/'/g, "'\\''"),
|
|
126
|
+
})}
|
|
127
|
+
}'`;
|
|
128
|
+
const runCommandNonInstruct = `# Call the server using curl:
|
|
129
|
+
curl -X POST "http://localhost:8000/v1/completions" \\
|
|
130
|
+
-H "Content-Type: application/json" \\
|
|
131
|
+
--data '{
|
|
132
|
+
"model": "${model.id}",
|
|
133
|
+
"prompt": "Once upon a time,",
|
|
134
|
+
"max_tokens": 512,
|
|
135
|
+
"temperature": 0.5
|
|
136
|
+
}'`;
|
|
137
|
+
const runCommand = model.tags.includes("conversational") ? runCommandInstruct : runCommandNonInstruct;
|
|
119
138
|
return [
|
|
120
139
|
{
|
|
121
140
|
title: "Install from pip",
|
|
122
141
|
setup: ["# Install vLLM from pip:", "pip install vllm"].join("\n"),
|
|
123
|
-
content: [`# Load and run the model:\nvllm serve "${model.id}"`, runCommand
|
|
142
|
+
content: [`# Load and run the model:\nvllm serve "${model.id}"`, runCommand],
|
|
124
143
|
},
|
|
125
144
|
{
|
|
126
145
|
title: "Use Docker images",
|
|
@@ -137,7 +156,7 @@ const snippetVllm = (model) => {
|
|
|
137
156
|
].join("\n"),
|
|
138
157
|
content: [
|
|
139
158
|
`# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`,
|
|
140
|
-
runCommand
|
|
159
|
+
runCommand,
|
|
141
160
|
],
|
|
142
161
|
},
|
|
143
162
|
];
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"local-apps.spec.d.ts","sourceRoot":"","sources":["../../src/local-apps.spec.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const vitest_1 = require("vitest");
|
|
4
|
+
const local_apps_js_1 = require("./local-apps.js");
|
|
5
|
+
(0, vitest_1.describe)("local-apps", () => {
|
|
6
|
+
(0, vitest_1.it)("llama.cpp conversational", async () => {
|
|
7
|
+
const { snippet: snippetFunc } = local_apps_js_1.LOCAL_APPS["llama.cpp"];
|
|
8
|
+
const model = {
|
|
9
|
+
id: "bartowski/Llama-3.2-3B-Instruct-GGUF",
|
|
10
|
+
tags: ["conversational"],
|
|
11
|
+
inference: "",
|
|
12
|
+
};
|
|
13
|
+
const snippet = snippetFunc(model);
|
|
14
|
+
(0, vitest_1.expect)(snippet[0].content).toEqual(`# Load and run the model:
|
|
15
|
+
llama-cli \\
|
|
16
|
+
--hf-repo "bartowski/Llama-3.2-3B-Instruct-GGUF" \\
|
|
17
|
+
--hf-file {{GGUF_FILE}} \\
|
|
18
|
+
-p "You are a helpful assistant" \\
|
|
19
|
+
--conversation`);
|
|
20
|
+
});
|
|
21
|
+
(0, vitest_1.it)("llama.cpp non-conversational", async () => {
|
|
22
|
+
const { snippet: snippetFunc } = local_apps_js_1.LOCAL_APPS["llama.cpp"];
|
|
23
|
+
const model = {
|
|
24
|
+
id: "mlabonne/gemma-2b-GGUF",
|
|
25
|
+
tags: [],
|
|
26
|
+
inference: "",
|
|
27
|
+
};
|
|
28
|
+
const snippet = snippetFunc(model);
|
|
29
|
+
(0, vitest_1.expect)(snippet[0].content).toEqual(`# Load and run the model:
|
|
30
|
+
llama-cli \\
|
|
31
|
+
--hf-repo "mlabonne/gemma-2b-GGUF" \\
|
|
32
|
+
--hf-file {{GGUF_FILE}} \\
|
|
33
|
+
-p "Once upon a time,"`);
|
|
34
|
+
});
|
|
35
|
+
(0, vitest_1.it)("vLLM conversational llm", async () => {
|
|
36
|
+
const { snippet: snippetFunc } = local_apps_js_1.LOCAL_APPS["vllm"];
|
|
37
|
+
const model = {
|
|
38
|
+
id: "meta-llama/Llama-3.2-3B-Instruct",
|
|
39
|
+
pipeline_tag: "text-generation",
|
|
40
|
+
tags: ["conversational"],
|
|
41
|
+
inference: "",
|
|
42
|
+
};
|
|
43
|
+
const snippet = snippetFunc(model);
|
|
44
|
+
(0, vitest_1.expect)(snippet[0].content.join("\n")).toEqual(`# Load and run the model:
|
|
45
|
+
vllm serve "meta-llama/Llama-3.2-3B-Instruct"
|
|
46
|
+
# Call the server using curl:
|
|
47
|
+
curl -X POST "http://localhost:8000/v1/chat/completions" \\
|
|
48
|
+
-H "Content-Type: application/json" \\
|
|
49
|
+
--data '{
|
|
50
|
+
"model": "meta-llama/Llama-3.2-3B-Instruct",
|
|
51
|
+
"messages": [
|
|
52
|
+
{
|
|
53
|
+
"role": "user",
|
|
54
|
+
"content": "What is the capital of France?"
|
|
55
|
+
}
|
|
56
|
+
]
|
|
57
|
+
}'`);
|
|
58
|
+
});
|
|
59
|
+
(0, vitest_1.it)("vLLM non-conversational llm", async () => {
|
|
60
|
+
const { snippet: snippetFunc } = local_apps_js_1.LOCAL_APPS["vllm"];
|
|
61
|
+
const model = {
|
|
62
|
+
id: "meta-llama/Llama-3.2-3B",
|
|
63
|
+
tags: [""],
|
|
64
|
+
inference: "",
|
|
65
|
+
};
|
|
66
|
+
const snippet = snippetFunc(model);
|
|
67
|
+
(0, vitest_1.expect)(snippet[0].content.join("\n")).toEqual(`# Load and run the model:
|
|
68
|
+
vllm serve "meta-llama/Llama-3.2-3B"
|
|
69
|
+
# Call the server using curl:
|
|
70
|
+
curl -X POST "http://localhost:8000/v1/completions" \\
|
|
71
|
+
-H "Content-Type: application/json" \\
|
|
72
|
+
--data '{
|
|
73
|
+
"model": "meta-llama/Llama-3.2-3B",
|
|
74
|
+
"prompt": "Once upon a time,",
|
|
75
|
+
"max_tokens": 512,
|
|
76
|
+
"temperature": 0.5
|
|
77
|
+
}'`);
|
|
78
|
+
});
|
|
79
|
+
(0, vitest_1.it)("vLLM conversational vlm", async () => {
|
|
80
|
+
const { snippet: snippetFunc } = local_apps_js_1.LOCAL_APPS["vllm"];
|
|
81
|
+
const model = {
|
|
82
|
+
id: "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
|
83
|
+
pipeline_tag: "image-text-to-text",
|
|
84
|
+
tags: ["conversational"],
|
|
85
|
+
inference: "",
|
|
86
|
+
};
|
|
87
|
+
const snippet = snippetFunc(model);
|
|
88
|
+
(0, vitest_1.expect)(snippet[0].content.join("\n")).toEqual(`# Load and run the model:
|
|
89
|
+
vllm serve "meta-llama/Llama-3.2-11B-Vision-Instruct"
|
|
90
|
+
# Call the server using curl:
|
|
91
|
+
curl -X POST "http://localhost:8000/v1/chat/completions" \\
|
|
92
|
+
-H "Content-Type: application/json" \\
|
|
93
|
+
--data '{
|
|
94
|
+
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
|
95
|
+
"messages": [
|
|
96
|
+
{
|
|
97
|
+
"role": "user",
|
|
98
|
+
"content": [
|
|
99
|
+
{
|
|
100
|
+
"type": "text",
|
|
101
|
+
"text": "Describe this image in one sentence."
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
"type": "image_url",
|
|
105
|
+
"image_url": {
|
|
106
|
+
"url": "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
]
|
|
110
|
+
}
|
|
111
|
+
]
|
|
112
|
+
}'`);
|
|
113
|
+
});
|
|
114
|
+
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAuCF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAalD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EA0BzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AASF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAoB7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAOhD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC"}
|
|
@@ -3,6 +3,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.transformersJS = exports.transformers = exports.speechbrain = exports.stanza = exports.span_marker = exports.spacy = exports.setfit = exports.sentenceTransformers = exports.sampleFactory = exports.sam2 = exports.fastai = exports.stable_audio_tools = exports.sklearn = exports.seed_story = exports.saelens = exports.timm = exports.tensorflowtts = exports.relik = exports.pyannote_audio = exports.pyannote_audio_pipeline = exports.paddlenlp = exports.open_clip = exports.mesh_anything = exports.mars5_tts = exports.mamba_ssm = exports.tf_keras = exports.llama_cpp_python = exports.keras_hub = exports.keras_nlp = exports.keras = exports.htrflow = exports.gliner = exports.flair = exports.fairseq = exports.espnet = exports.espnetASR = exports.espnetTTS = exports.edsnlp = exports.cartesia_mlx = exports.cartesia_pytorch = exports.diffusionkit = exports.diffusers = exports.depth_pro = exports.depth_anything_v2 = exports.bm25s = exports.bertopic = exports.audioseal = exports.asteroid = exports.allennlp = exports.adapters = void 0;
|
|
4
4
|
exports.hezar = exports.threedtopia_xl = exports.whisperkit = exports.audiocraft = exports.pythae = exports.pxia = exports.nemo = exports.model2vec = exports.mlxim = exports.mlx = exports.birefnet = exports.yolov10 = exports.chattts = exports.voicecraft = exports.vfimamba = exports.sentis = exports.mlAgents = exports.stableBaselines3 = exports.fasttext = exports.peft = void 0;
|
|
5
5
|
const library_to_tasks_js_1 = require("./library-to-tasks.js");
|
|
6
|
+
const inputs_js_1 = require("./snippets/inputs.js");
|
|
7
|
+
const common_js_1 = require("./snippets/common.js");
|
|
6
8
|
const TAG_CUSTOM_CODE = "custom_code";
|
|
7
9
|
function nameWithoutNamespace(modelId) {
|
|
8
10
|
const splitted = modelId.split("/");
|
|
@@ -401,23 +403,32 @@ model = keras_hub.models.CausalLM.from_preset("hf://${model.id}", dtype="bfloat1
|
|
|
401
403
|
`,
|
|
402
404
|
];
|
|
403
405
|
exports.keras_hub = keras_hub;
|
|
404
|
-
const llama_cpp_python = (model) =>
|
|
405
|
-
|
|
406
|
+
const llama_cpp_python = (model) => {
|
|
407
|
+
const snippets = [
|
|
408
|
+
`from llama_cpp import Llama
|
|
406
409
|
|
|
407
410
|
llm = Llama.from_pretrained(
|
|
408
411
|
repo_id="${model.id}",
|
|
409
412
|
filename="{{GGUF_FILE}}",
|
|
410
413
|
)
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
414
|
+
`,
|
|
415
|
+
];
|
|
416
|
+
if (model.tags.includes("conversational")) {
|
|
417
|
+
const messages = (0, inputs_js_1.getModelInputSnippet)(model);
|
|
418
|
+
snippets.push(`llm.create_chat_completion(
|
|
419
|
+
messages = ${(0, common_js_1.stringifyMessages)(messages, { attributeKeyQuotes: true, indent: "\t" })}
|
|
420
|
+
)`);
|
|
421
|
+
}
|
|
422
|
+
else {
|
|
423
|
+
snippets.push(`output = llm(
|
|
424
|
+
"Once upon a time,",
|
|
425
|
+
max_tokens=512,
|
|
426
|
+
echo=True
|
|
427
|
+
)
|
|
428
|
+
print(output)`);
|
|
429
|
+
}
|
|
430
|
+
return snippets;
|
|
431
|
+
};
|
|
421
432
|
exports.llama_cpp_python = llama_cpp_python;
|
|
422
433
|
const tf_keras = (model) => [
|
|
423
434
|
`# Note: 'keras<3.x' or 'tf_keras' must be installed (legacy)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.spec.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.spec.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const vitest_1 = require("vitest");
|
|
4
|
+
const model_libraries_snippets_js_1 = require("./model-libraries-snippets.js");
|
|
5
|
+
(0, vitest_1.describe)("model-libraries-snippets", () => {
|
|
6
|
+
(0, vitest_1.it)("llama_cpp_python conversational", async () => {
|
|
7
|
+
const model = {
|
|
8
|
+
id: "bartowski/Llama-3.2-3B-Instruct-GGUF",
|
|
9
|
+
pipeline_tag: "text-generation",
|
|
10
|
+
tags: ["conversational"],
|
|
11
|
+
inference: "",
|
|
12
|
+
};
|
|
13
|
+
const snippet = (0, model_libraries_snippets_js_1.llama_cpp_python)(model);
|
|
14
|
+
(0, vitest_1.expect)(snippet.join("\n")).toEqual(`from llama_cpp import Llama
|
|
15
|
+
|
|
16
|
+
llm = Llama.from_pretrained(
|
|
17
|
+
repo_id="bartowski/Llama-3.2-3B-Instruct-GGUF",
|
|
18
|
+
filename="{{GGUF_FILE}}",
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
llm.create_chat_completion(
|
|
22
|
+
messages = [
|
|
23
|
+
{
|
|
24
|
+
"role": "user",
|
|
25
|
+
"content": "What is the capital of France?"
|
|
26
|
+
}
|
|
27
|
+
]
|
|
28
|
+
)`);
|
|
29
|
+
});
|
|
30
|
+
(0, vitest_1.it)("llama_cpp_python non-conversational", async () => {
|
|
31
|
+
const model = {
|
|
32
|
+
id: "mlabonne/gemma-2b-GGUF",
|
|
33
|
+
tags: [""],
|
|
34
|
+
inference: "",
|
|
35
|
+
};
|
|
36
|
+
const snippet = (0, model_libraries_snippets_js_1.llama_cpp_python)(model);
|
|
37
|
+
(0, vitest_1.expect)(snippet.join("\n")).toEqual(`from llama_cpp import Llama
|
|
38
|
+
|
|
39
|
+
llm = Llama.from_pretrained(
|
|
40
|
+
repo_id="mlabonne/gemma-2b-GGUF",
|
|
41
|
+
filename="{{GGUF_FILE}}",
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
output = llm(
|
|
45
|
+
"Once upon a time,",
|
|
46
|
+
max_tokens=512,
|
|
47
|
+
echo=True
|
|
48
|
+
)
|
|
49
|
+
print(output)`);
|
|
50
|
+
});
|
|
51
|
+
});
|
|
@@ -10,7 +10,7 @@ const snippetBasic = (model, accessToken) => ({
|
|
|
10
10
|
-X POST \\
|
|
11
11
|
-d '{"inputs": ${(0, inputs_js_1.getModelInputSnippet)(model, true)}}' \\
|
|
12
12
|
-H 'Content-Type: application/json' \\
|
|
13
|
-
-H
|
|
13
|
+
-H 'Authorization: Bearer ${accessToken || `{API_TOKEN}`}'`,
|
|
14
14
|
});
|
|
15
15
|
exports.snippetBasic = snippetBasic;
|
|
16
16
|
const snippetTextGeneration = (model, accessToken, opts) => {
|
|
@@ -26,7 +26,7 @@ const snippetTextGeneration = (model, accessToken, opts) => {
|
|
|
26
26
|
};
|
|
27
27
|
return {
|
|
28
28
|
content: `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
|
|
29
|
-
-H
|
|
29
|
+
-H 'Authorization: Bearer ${accessToken || `{API_TOKEN}`}' \\
|
|
30
30
|
-H 'Content-Type: application/json' \\
|
|
31
31
|
--data '{
|
|
32
32
|
"model": "${model.id}",
|
|
@@ -54,14 +54,14 @@ const snippetZeroShotClassification = (model, accessToken) => ({
|
|
|
54
54
|
-X POST \\
|
|
55
55
|
-d '{"inputs": ${(0, inputs_js_1.getModelInputSnippet)(model, true)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\
|
|
56
56
|
-H 'Content-Type: application/json' \\
|
|
57
|
-
-H
|
|
57
|
+
-H 'Authorization: Bearer ${accessToken || `{API_TOKEN}`}'`,
|
|
58
58
|
});
|
|
59
59
|
exports.snippetZeroShotClassification = snippetZeroShotClassification;
|
|
60
60
|
const snippetFile = (model, accessToken) => ({
|
|
61
61
|
content: `curl https://api-inference.huggingface.co/models/${model.id} \\
|
|
62
62
|
-X POST \\
|
|
63
63
|
--data-binary '@${(0, inputs_js_1.getModelInputSnippet)(model, true, true)}' \\
|
|
64
|
-
-H
|
|
64
|
+
-H 'Authorization: Bearer ${accessToken || `{API_TOKEN}`}'`,
|
|
65
65
|
});
|
|
66
66
|
exports.snippetFile = snippetFile;
|
|
67
67
|
exports.curlSnippets = {
|
|
@@ -11,7 +11,7 @@ const snippetBasic = (model, accessToken) => ({
|
|
|
11
11
|
"https://api-inference.huggingface.co/models/${model.id}",
|
|
12
12
|
{
|
|
13
13
|
headers: {
|
|
14
|
-
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}"
|
|
14
|
+
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
|
|
15
15
|
"Content-Type": "application/json",
|
|
16
16
|
},
|
|
17
17
|
method: "POST",
|
|
@@ -142,7 +142,7 @@ const snippetZeroShotClassification = (model, accessToken) => ({
|
|
|
142
142
|
"https://api-inference.huggingface.co/models/${model.id}",
|
|
143
143
|
{
|
|
144
144
|
headers: {
|
|
145
|
-
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}"
|
|
145
|
+
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
|
|
146
146
|
"Content-Type": "application/json",
|
|
147
147
|
},
|
|
148
148
|
method: "POST",
|
|
@@ -164,7 +164,7 @@ const snippetTextToImage = (model, accessToken) => ({
|
|
|
164
164
|
"https://api-inference.huggingface.co/models/${model.id}",
|
|
165
165
|
{
|
|
166
166
|
headers: {
|
|
167
|
-
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}"
|
|
167
|
+
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
|
|
168
168
|
"Content-Type": "application/json",
|
|
169
169
|
},
|
|
170
170
|
method: "POST",
|
|
@@ -185,7 +185,7 @@ const snippetTextToAudio = (model, accessToken) => {
|
|
|
185
185
|
"https://api-inference.huggingface.co/models/${model.id}",
|
|
186
186
|
{
|
|
187
187
|
headers: {
|
|
188
|
-
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}"
|
|
188
|
+
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
|
|
189
189
|
"Content-Type": "application/json",
|
|
190
190
|
},
|
|
191
191
|
method: "POST",
|
|
@@ -226,7 +226,7 @@ const snippetFile = (model, accessToken) => ({
|
|
|
226
226
|
"https://api-inference.huggingface.co/models/${model.id}",
|
|
227
227
|
{
|
|
228
228
|
headers: {
|
|
229
|
-
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}"
|
|
229
|
+
Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
|
|
230
230
|
"Content-Type": "application/json",
|
|
231
231
|
},
|
|
232
232
|
method: "POST",
|
|
@@ -13,17 +13,18 @@ export interface AudioClassificationInput {
|
|
|
13
13
|
*/
|
|
14
14
|
inputs: string;
|
|
15
15
|
/**
|
|
16
|
-
* Additional inference parameters
|
|
16
|
+
* Additional inference parameters for Audio Classification
|
|
17
17
|
*/
|
|
18
18
|
parameters?: AudioClassificationParameters;
|
|
19
19
|
[property: string]: unknown;
|
|
20
20
|
}
|
|
21
21
|
/**
|
|
22
|
-
* Additional inference parameters
|
|
23
|
-
*
|
|
24
22
|
* Additional inference parameters for Audio Classification
|
|
25
23
|
*/
|
|
26
24
|
export interface AudioClassificationParameters {
|
|
25
|
+
/**
|
|
26
|
+
* The function to apply to the model outputs in order to retrieve the scores.
|
|
27
|
+
*/
|
|
27
28
|
function_to_apply?: ClassificationOutputTransform;
|
|
28
29
|
/**
|
|
29
30
|
* When specified, limits the output to the top K most probable classes.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,WAAW,6BAA6B;IAC7C;;OAEG;IACH,iBAAiB,CAAC,EAAE,6BAA6B,CAAC;IAClD;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,6BAA6B,GAAG,SAAS,GAAG,SAAS,GAAG,MAAM,CAAC;AAC3E,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -13,14 +13,12 @@ export interface AutomaticSpeechRecognitionInput {
|
|
|
13
13
|
*/
|
|
14
14
|
inputs: string;
|
|
15
15
|
/**
|
|
16
|
-
* Additional inference parameters
|
|
16
|
+
* Additional inference parameters for Automatic Speech Recognition
|
|
17
17
|
*/
|
|
18
18
|
parameters?: AutomaticSpeechRecognitionParameters;
|
|
19
19
|
[property: string]: unknown;
|
|
20
20
|
}
|
|
21
21
|
/**
|
|
22
|
-
* Additional inference parameters
|
|
23
|
-
*
|
|
24
22
|
* Additional inference parameters for Automatic Speech Recognition
|
|
25
23
|
*/
|
|
26
24
|
export interface AutomaticSpeechRecognitionParameters {
|
|
@@ -36,8 +34,6 @@ export interface AutomaticSpeechRecognitionParameters {
|
|
|
36
34
|
}
|
|
37
35
|
/**
|
|
38
36
|
* Parametrization of the text generation process
|
|
39
|
-
*
|
|
40
|
-
* Ad-hoc parametrization of the text generation process
|
|
41
37
|
*/
|
|
42
38
|
export interface GenerationParameters {
|
|
43
39
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,oCAAoC,CAAC;IAClD,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,oCAAoC,CAAC;IAClD,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,oCAAoC;IACpD;;OAEG;IACH,qBAAqB,CAAC,EAAE,oBAAoB,CAAC;IAC7C;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;OAMG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,OAAO,GAAG,OAAO,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;;OAGG;IACH,MAAM,CAAC,EAAE,qCAAqC,EAAE,CAAC;IACjD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED,MAAM,WAAW,qCAAqC;IACrD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -78,7 +78,7 @@ export interface ChatCompletionInput {
|
|
|
78
78
|
* We generally recommend altering this or `top_p` but not both.
|
|
79
79
|
*/
|
|
80
80
|
temperature?: number;
|
|
81
|
-
tool_choice?:
|
|
81
|
+
tool_choice?: ChatCompletionInputToolChoice;
|
|
82
82
|
/**
|
|
83
83
|
* A prompt to be appended before the tools
|
|
84
84
|
*/
|
|
@@ -88,7 +88,7 @@ export interface ChatCompletionInput {
|
|
|
88
88
|
* Use this to provide a list of
|
|
89
89
|
* functions the model may generate JSON inputs for.
|
|
90
90
|
*/
|
|
91
|
-
tools?:
|
|
91
|
+
tools?: ChatCompletionInputTool[];
|
|
92
92
|
/**
|
|
93
93
|
* An integer between 0 and 5 specifying the number of most likely tokens to return at each
|
|
94
94
|
* token position, each with
|
|
@@ -144,16 +144,28 @@ export interface ChatCompletionInputStreamOptions {
|
|
|
144
144
|
include_usage: boolean;
|
|
145
145
|
[property: string]: unknown;
|
|
146
146
|
}
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
147
|
+
/**
|
|
148
|
+
*
|
|
149
|
+
* <https://platform.openai.com/docs/guides/function-calling/configuring-function-calling-behavior-using-the-tool_choice-parameter>
|
|
150
|
+
*/
|
|
151
|
+
export type ChatCompletionInputToolChoice = ChatCompletionInputToolChoiceEnum | ChatCompletionInputToolChoiceObject;
|
|
152
|
+
/**
|
|
153
|
+
* Means the model can pick between generating a message or calling one or more tools.
|
|
154
|
+
*
|
|
155
|
+
* Means the model will not call any tool and instead generates a message.
|
|
156
|
+
*
|
|
157
|
+
* Means the model must call one or more tools.
|
|
158
|
+
*/
|
|
159
|
+
export type ChatCompletionInputToolChoiceEnum = "auto" | "none" | "required";
|
|
160
|
+
export interface ChatCompletionInputToolChoiceObject {
|
|
161
|
+
function: ChatCompletionInputFunctionName;
|
|
150
162
|
[property: string]: unknown;
|
|
151
163
|
}
|
|
152
164
|
export interface ChatCompletionInputFunctionName {
|
|
153
165
|
name: string;
|
|
154
166
|
[property: string]: unknown;
|
|
155
167
|
}
|
|
156
|
-
export interface
|
|
168
|
+
export interface ChatCompletionInputTool {
|
|
157
169
|
function: ChatCompletionInputFunctionDefinition;
|
|
158
170
|
type: string;
|
|
159
171
|
[property: string]: unknown;
|