@digipair/skill-ollama 0.8.25 → 0.8.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.cjs.js CHANGED
@@ -38433,7 +38433,7 @@ var OllamaEmbeddings = /*#__PURE__*/ function(Embeddings) {
38433
38433
  let OllamaService = class OllamaService {
38434
38434
  async model(params, _pinsSettingsList, context) {
38435
38435
  var _context_privates_OLLAMA_SERVER, _ref;
38436
- const { model = 'mistral', temperature = 0, keepAlive = '1440m', baseUrl = (_ref = (_context_privates_OLLAMA_SERVER = context.privates.OLLAMA_SERVER) != null ? _context_privates_OLLAMA_SERVER : process.env['OLLAMA_SERVER']) != null ? _ref : 'http://localhost:11434' } = params;
38436
+ const { model = 'mistral', temperature = 0, keepAlive = '1440m', baseUrl = (_ref = (_context_privates_OLLAMA_SERVER = context.privates.OLLAMA_SERVER) != null ? _context_privates_OLLAMA_SERVER : process.env['OLLAMA_SERVER']) != null ? _ref : 'http://localhost:11434', format } = params;
38437
38437
  const modelInstance = new Ollama({
38438
38438
  model,
38439
38439
  temperature,
package/index.esm.js CHANGED
@@ -38429,7 +38429,7 @@ var OllamaEmbeddings = /*#__PURE__*/ function(Embeddings) {
38429
38429
  let OllamaService = class OllamaService {
38430
38430
  async model(params, _pinsSettingsList, context) {
38431
38431
  var _context_privates_OLLAMA_SERVER, _ref;
38432
- const { model = 'mistral', temperature = 0, keepAlive = '1440m', baseUrl = (_ref = (_context_privates_OLLAMA_SERVER = context.privates.OLLAMA_SERVER) != null ? _context_privates_OLLAMA_SERVER : process.env['OLLAMA_SERVER']) != null ? _ref : 'http://localhost:11434' } = params;
38432
+ const { model = 'mistral', temperature = 0, keepAlive = '1440m', baseUrl = (_ref = (_context_privates_OLLAMA_SERVER = context.privates.OLLAMA_SERVER) != null ? _context_privates_OLLAMA_SERVER : process.env['OLLAMA_SERVER']) != null ? _ref : 'http://localhost:11434', format } = params;
38433
38433
  const modelInstance = new Ollama({
38434
38434
  model,
38435
38435
  temperature,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@digipair/skill-ollama",
3
- "version": "0.8.25",
3
+ "version": "0.8.27",
4
4
  "dependencies": {},
5
5
  "main": "./index.cjs.js",
6
6
  "module": "./index.esm.js"
package/schema.json CHANGED
@@ -40,6 +40,15 @@
40
40
  "schema": {
41
41
  "type": "string"
42
42
  }
43
+ },
44
+ {
45
+ "name": "format",
46
+ "summary": "Format de sortie",
47
+ "required": false,
48
+ "description": "Format de sortie des données générées par le model",
49
+ "schema": {
50
+ "type": "string"
51
+ }
43
52
  }
44
53
  ],
45
54
  "x-events": []