@huggingface/tasks 0.10.18 → 0.10.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -4800,7 +4800,7 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
4800
4800
  repoName: "audioseal",
4801
4801
  repoUrl: "https://github.com/facebookresearch/audioseal",
4802
4802
  filter: false,
4803
- countDownloads: "path_extension:pt",
4803
+ countDownloads: `path_extension:"pth"`,
4804
4804
  snippets: audioseal
4805
4805
  },
4806
4806
  bertopic: {
@@ -4918,6 +4918,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
4918
4918
  filter: false,
4919
4919
  countDownloads: `path:"gliner_config.json"`
4920
4920
  },
4921
+ "glyph-byt5": {
4922
+ prettyLabel: "Glyph-ByT5",
4923
+ repoName: "Glyph-ByT5",
4924
+ repoUrl: "https://github.com/AIGText/Glyph-ByT5",
4925
+ filter: false,
4926
+ countDownloads: `path:"checkpoints/byt5_model.pt"`
4927
+ },
4921
4928
  grok: {
4922
4929
  prettyLabel: "Grok",
4923
4930
  repoName: "Grok",
@@ -6077,7 +6084,7 @@ var SKUS = {
6077
6084
  function isGgufModel(model) {
6078
6085
  return model.tags.includes("gguf");
6079
6086
  }
6080
- var snippetLlamacpp = (model) => {
6087
+ var snippetLlamacpp = (model, filepath) => {
6081
6088
  return [
6082
6089
  `# Option 1: use llama.cpp with brew
6083
6090
  brew install llama.cpp
@@ -6085,7 +6092,7 @@ brew install llama.cpp
6085
6092
  # Load and run the model
6086
6093
  llama \\
6087
6094
  --hf-repo "${model.id}" \\
6088
- --hf-file {{GGUF_FILE}} \\
6095
+ --hf-file ${filepath ?? "{{GGUF_FILE}}"} \\
6089
6096
  -p "I believe the meaning of life is" \\
6090
6097
  -n 128`,
6091
6098
  `# Option 2: build llama.cpp from source with curl support
@@ -6096,7 +6103,7 @@ LLAMA_CURL=1 make
6096
6103
  # Load and run the model
6097
6104
  ./main \\
6098
6105
  --hf-repo "${model.id}" \\
6099
- -m {{GGUF_FILE}} \\
6106
+ -m ${filepath ?? "{{GGUF_FILE}}"} \\
6100
6107
  -p "I believe the meaning of life is" \\
6101
6108
  -n 128`
6102
6109
  ];
@@ -6114,7 +6121,7 @@ var LOCAL_APPS = {
6114
6121
  docsUrl: "https://lmstudio.ai",
6115
6122
  mainTask: "text-generation",
6116
6123
  displayOnModelPage: isGgufModel,
6117
- deeplink: (model) => new URL(`lmstudio://open_from_hf?model=${model.id}`)
6124
+ deeplink: (model, filepath) => new URL(`lmstudio://open_from_hf?model=${model.id}` + filepath ? `&file=${filepath}` : "")
6118
6125
  },
6119
6126
  jan: {
6120
6127
  prettyLabel: "Jan",
package/dist/index.js CHANGED
@@ -4761,7 +4761,7 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
4761
4761
  repoName: "audioseal",
4762
4762
  repoUrl: "https://github.com/facebookresearch/audioseal",
4763
4763
  filter: false,
4764
- countDownloads: "path_extension:pt",
4764
+ countDownloads: `path_extension:"pth"`,
4765
4765
  snippets: audioseal
4766
4766
  },
4767
4767
  bertopic: {
@@ -4879,6 +4879,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
4879
4879
  filter: false,
4880
4880
  countDownloads: `path:"gliner_config.json"`
4881
4881
  },
4882
+ "glyph-byt5": {
4883
+ prettyLabel: "Glyph-ByT5",
4884
+ repoName: "Glyph-ByT5",
4885
+ repoUrl: "https://github.com/AIGText/Glyph-ByT5",
4886
+ filter: false,
4887
+ countDownloads: `path:"checkpoints/byt5_model.pt"`
4888
+ },
4882
4889
  grok: {
4883
4890
  prettyLabel: "Grok",
4884
4891
  repoName: "Grok",
@@ -6038,7 +6045,7 @@ var SKUS = {
6038
6045
  function isGgufModel(model) {
6039
6046
  return model.tags.includes("gguf");
6040
6047
  }
6041
- var snippetLlamacpp = (model) => {
6048
+ var snippetLlamacpp = (model, filepath) => {
6042
6049
  return [
6043
6050
  `# Option 1: use llama.cpp with brew
6044
6051
  brew install llama.cpp
@@ -6046,7 +6053,7 @@ brew install llama.cpp
6046
6053
  # Load and run the model
6047
6054
  llama \\
6048
6055
  --hf-repo "${model.id}" \\
6049
- --hf-file {{GGUF_FILE}} \\
6056
+ --hf-file ${filepath ?? "{{GGUF_FILE}}"} \\
6050
6057
  -p "I believe the meaning of life is" \\
6051
6058
  -n 128`,
6052
6059
  `# Option 2: build llama.cpp from source with curl support
@@ -6057,7 +6064,7 @@ LLAMA_CURL=1 make
6057
6064
  # Load and run the model
6058
6065
  ./main \\
6059
6066
  --hf-repo "${model.id}" \\
6060
- -m {{GGUF_FILE}} \\
6067
+ -m ${filepath ?? "{{GGUF_FILE}}"} \\
6061
6068
  -p "I believe the meaning of life is" \\
6062
6069
  -n 128`
6063
6070
  ];
@@ -6075,7 +6082,7 @@ var LOCAL_APPS = {
6075
6082
  docsUrl: "https://lmstudio.ai",
6076
6083
  mainTask: "text-generation",
6077
6084
  displayOnModelPage: isGgufModel,
6078
- deeplink: (model) => new URL(`lmstudio://open_from_hf?model=${model.id}`)
6085
+ deeplink: (model, filepath) => new URL(`lmstudio://open_from_hf?model=${model.id}` + filepath ? `&file=${filepath}` : "")
6079
6086
  },
6080
6087
  jan: {
6081
6088
  prettyLabel: "Jan",
@@ -29,13 +29,13 @@ export type LocalApp = {
29
29
  /**
30
30
  * If the app supports deeplink, URL to open.
31
31
  */
32
- deeplink: (model: ModelData) => URL;
32
+ deeplink: (model: ModelData, filepath?: string) => URL;
33
33
  } | {
34
34
  /**
35
35
  * And if not (mostly llama.cpp), snippet to copy/paste in your terminal
36
36
  * Support the placeholder {{GGUF_FILE}} that will be replaced by the gguf file path or the list of available files.
37
37
  */
38
- snippet: (model: ModelData) => string | string[];
38
+ snippet: (model: ModelData, filepath?: string) => string | string[];
39
39
  });
40
40
  declare function isGgufModel(model: ModelData): boolean;
41
41
  /**
@@ -55,14 +55,14 @@ export declare const LOCAL_APPS: {
55
55
  docsUrl: string;
56
56
  mainTask: "text-generation";
57
57
  displayOnModelPage: typeof isGgufModel;
58
- snippet: (model: ModelData) => string[];
58
+ snippet: (model: ModelData, filepath?: string) => string[];
59
59
  };
60
60
  lmstudio: {
61
61
  prettyLabel: string;
62
62
  docsUrl: string;
63
63
  mainTask: "text-generation";
64
64
  displayOnModelPage: typeof isGgufModel;
65
- deeplink: (model: ModelData) => URL;
65
+ deeplink: (model: ModelData, filepath: string | undefined) => URL;
66
66
  };
67
67
  jan: {
68
68
  prettyLabel: string;
@@ -1 +1 @@
1
- {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,GAAG,CAAC;CACnC,GACD;IACA;;;OAGG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,GAAG,MAAM,EAAE,CAAC;CAChD,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AA2BD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBApCS,SAAS,KAAG,MAAM,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkIhB,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
1
+ {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;OAGG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,CAAC;CACnE,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AA2BD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBApCS,SAAS,aAAa,MAAM,KAAG,MAAM,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAmInC,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
@@ -209,6 +209,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
209
209
  filter: false;
210
210
  countDownloads: string;
211
211
  };
212
+ "glyph-byt5": {
213
+ prettyLabel: string;
214
+ repoName: string;
215
+ repoUrl: string;
216
+ filter: false;
217
+ countDownloads: string;
218
+ };
212
219
  grok: {
213
220
  prettyLabel: string;
214
221
  repoName: string;
@@ -487,6 +494,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
487
494
  };
488
495
  };
489
496
  export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
490
- export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "chat_tts" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "grok" | "hunyuan-dit" | "keras" | "keras-nlp" | "k2" | "mindspore" | "ml-agents" | "mlx" | "mlx-image" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
491
- export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "chat_tts" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "grok" | "hunyuan-dit" | "keras" | "keras-nlp" | "k2" | "mindspore" | "ml-agents" | "mlx" | "mlx-image" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
497
+ export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "chat_tts" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hunyuan-dit" | "keras" | "keras-nlp" | "k2" | "mindspore" | "ml-agents" | "mlx" | "mlx-image" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
498
+ export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "chat_tts" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hunyuan-dit" | "keras" | "keras-nlp" | "k2" | "mindspore" | "ml-agents" | "mlx" | "mlx-image" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
492
499
  //# sourceMappingURL=model-libraries.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAibI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,oxBAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,oxBAQ1B,CAAC"}
1
+ {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwbI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,myBAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,myBAQ1B,CAAC"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.10.18",
4
+ "version": "0.10.20",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
package/src/local-apps.ts CHANGED
@@ -32,14 +32,14 @@ export type LocalApp = {
32
32
  /**
33
33
  * If the app supports deeplink, URL to open.
34
34
  */
35
- deeplink: (model: ModelData) => URL;
35
+ deeplink: (model: ModelData, filepath?: string) => URL;
36
36
  }
37
37
  | {
38
38
  /**
39
39
  * And if not (mostly llama.cpp), snippet to copy/paste in your terminal
40
40
  * Support the placeholder {{GGUF_FILE}} that will be replaced by the gguf file path or the list of available files.
41
41
  */
42
- snippet: (model: ModelData) => string | string[];
42
+ snippet: (model: ModelData, filepath?: string) => string | string[];
43
43
  }
44
44
  );
45
45
 
@@ -47,7 +47,7 @@ function isGgufModel(model: ModelData) {
47
47
  return model.tags.includes("gguf");
48
48
  }
49
49
 
50
- const snippetLlamacpp = (model: ModelData): string[] => {
50
+ const snippetLlamacpp = (model: ModelData, filepath?: string): string[] => {
51
51
  return [
52
52
  `# Option 1: use llama.cpp with brew
53
53
  brew install llama.cpp
@@ -55,7 +55,7 @@ brew install llama.cpp
55
55
  # Load and run the model
56
56
  llama \\
57
57
  --hf-repo "${model.id}" \\
58
- --hf-file {{GGUF_FILE}} \\
58
+ --hf-file ${filepath ?? "{{GGUF_FILE}}"} \\
59
59
  -p "I believe the meaning of life is" \\
60
60
  -n 128`,
61
61
  `# Option 2: build llama.cpp from source with curl support
@@ -66,7 +66,7 @@ LLAMA_CURL=1 make
66
66
  # Load and run the model
67
67
  ./main \\
68
68
  --hf-repo "${model.id}" \\
69
- -m {{GGUF_FILE}} \\
69
+ -m ${filepath ?? "{{GGUF_FILE}}"} \\
70
70
  -p "I believe the meaning of life is" \\
71
71
  -n 128`,
72
72
  ];
@@ -96,7 +96,8 @@ export const LOCAL_APPS = {
96
96
  docsUrl: "https://lmstudio.ai",
97
97
  mainTask: "text-generation",
98
98
  displayOnModelPage: isGgufModel,
99
- deeplink: (model) => new URL(`lmstudio://open_from_hf?model=${model.id}`),
99
+ deeplink: (model, filepath) =>
100
+ new URL(`lmstudio://open_from_hf?model=${model.id}` + filepath ? `&file=${filepath}` : ""),
100
101
  },
101
102
  jan: {
102
103
  prettyLabel: "Jan",
@@ -96,7 +96,7 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
96
96
  repoName: "audioseal",
97
97
  repoUrl: "https://github.com/facebookresearch/audioseal",
98
98
  filter: false,
99
- countDownloads: "path_extension:pt",
99
+ countDownloads: `path_extension:"pth"`,
100
100
  snippets: snippets.audioseal,
101
101
  },
102
102
  bertopic: {
@@ -214,6 +214,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
214
214
  filter: false,
215
215
  countDownloads: `path:"gliner_config.json"`,
216
216
  },
217
+ "glyph-byt5": {
218
+ prettyLabel: "Glyph-ByT5",
219
+ repoName: "Glyph-ByT5",
220
+ repoUrl: "https://github.com/AIGText/Glyph-ByT5",
221
+ filter: false,
222
+ countDownloads: `path:"checkpoints/byt5_model.pt"`,
223
+ },
217
224
  grok: {
218
225
  prettyLabel: "Grok",
219
226
  repoName: "Grok",