@infrarix/locopilot 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +239 -0
- package/dist/api/index.js +79 -0
- package/dist/api/index.js.map +1 -0
- package/dist/api/middleware/rateLimiter.js +27 -0
- package/dist/api/middleware/rateLimiter.js.map +1 -0
- package/dist/api/routes/chat.js +75 -0
- package/dist/api/routes/chat.js.map +1 -0
- package/dist/api/routes/completions.js +72 -0
- package/dist/api/routes/completions.js.map +1 -0
- package/dist/api/routes/health.js +52 -0
- package/dist/api/routes/health.js.map +1 -0
- package/dist/api/routes/models.js +50 -0
- package/dist/api/routes/models.js.map +1 -0
- package/dist/api/routes/training.js +10 -0
- package/dist/api/routes/training.js.map +1 -0
- package/dist/api/services/localRouter.js +201 -0
- package/dist/api/services/localRouter.js.map +1 -0
- package/dist/api/services/localStubs.js +28 -0
- package/dist/api/services/localStubs.js.map +1 -0
- package/dist/api/services/ollama.js +22 -0
- package/dist/api/services/ollama.js.map +1 -0
- package/dist/api/types.js +3 -0
- package/dist/api/types.js.map +1 -0
- package/dist/api/utils/sse.js +78 -0
- package/dist/api/utils/sse.js.map +1 -0
- package/dist/cli/commands/doctor.js +230 -0
- package/dist/cli/commands/doctor.js.map +1 -0
- package/dist/cli/commands/expose.js +98 -0
- package/dist/cli/commands/expose.js.map +1 -0
- package/dist/cli/commands/init.js +340 -0
- package/dist/cli/commands/init.js.map +1 -0
- package/dist/cli/commands/login.js +116 -0
- package/dist/cli/commands/login.js.map +1 -0
- package/dist/cli/commands/logout.js +38 -0
- package/dist/cli/commands/logout.js.map +1 -0
- package/dist/cli/commands/logs.js +95 -0
- package/dist/cli/commands/logs.js.map +1 -0
- package/dist/cli/commands/models.js +106 -0
- package/dist/cli/commands/models.js.map +1 -0
- package/dist/cli/commands/start.js +132 -0
- package/dist/cli/commands/start.js.map +1 -0
- package/dist/cli/commands/train.js +211 -0
- package/dist/cli/commands/train.js.map +1 -0
- package/dist/cli/commands/usage.js +43 -0
- package/dist/cli/commands/usage.js.map +1 -0
- package/dist/cli/commands/whoami.js +54 -0
- package/dist/cli/commands/whoami.js.map +1 -0
- package/dist/cli/index.js +49 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/cli/utils/banner.js +177 -0
- package/dist/cli/utils/banner.js.map +1 -0
- package/dist/cli/utils/paths.js +37 -0
- package/dist/cli/utils/paths.js.map +1 -0
- package/dist/cloud/client.js +157 -0
- package/dist/cloud/client.js.map +1 -0
- package/dist/shared/constants.js +39 -0
- package/dist/shared/constants.js.map +1 -0
- package/dist/shared/crypto.js +26 -0
- package/dist/shared/crypto.js.map +1 -0
- package/dist/shared/db/pool.js +83 -0
- package/dist/shared/db/pool.js.map +1 -0
- package/dist/shared/errors.js +59 -0
- package/dist/shared/errors.js.map +1 -0
- package/dist/shared/index.js +24 -0
- package/dist/shared/index.js.map +1 -0
- package/dist/shared/ndjson.js +39 -0
- package/dist/shared/ndjson.js.map +1 -0
- package/dist/shared/runtime/ollama/index.js +55 -0
- package/dist/shared/runtime/ollama/index.js.map +1 -0
- package/dist/shared/types.js +3 -0
- package/dist/shared/types.js.map +1 -0
- package/dist/training/adapters/axolotl.js +83 -0
- package/dist/training/adapters/axolotl.js.map +1 -0
- package/dist/training/adapters/axolotl_runner.py +38 -0
- package/dist/training/adapters/mlx.js +57 -0
- package/dist/training/adapters/mlx.js.map +1 -0
- package/dist/training/adapters/mlx_runner.py +175 -0
- package/dist/training/adapters/unsloth.js +57 -0
- package/dist/training/adapters/unsloth.js.map +1 -0
- package/dist/training/adapters/unsloth_runner.py +116 -0
- package/dist/training/index.js +47 -0
- package/dist/training/index.js.map +1 -0
- package/dist/training/types.js +18 -0
- package/dist/training/types.js.map +1 -0
- package/dist/training/validator.js +67 -0
- package/dist/training/validator.js.map +1 -0
- package/dist/worker/executor.js +98 -0
- package/dist/worker/executor.js.map +1 -0
- package/dist/worker/handlers.js +197 -0
- package/dist/worker/handlers.js.map +1 -0
- package/dist/worker/index.js +45 -0
- package/dist/worker/index.js.map +1 -0
- package/dist/worker/logStore.js +24 -0
- package/dist/worker/logStore.js.map +1 -0
- package/dist/worker/types.js +3 -0
- package/dist/worker/types.js.map +1 -0
- package/dist/worker/worker.js +12 -0
- package/dist/worker/worker.js.map +1 -0
- package/package.json +81 -0
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getDatabase = getDatabase;
|
|
7
|
+
exports.query = query;
|
|
8
|
+
exports.withTransaction = withTransaction;
|
|
9
|
+
exports.shutdown = shutdown;
|
|
10
|
+
const path_1 = require("path");
|
|
11
|
+
const os_1 = require("os");
|
|
12
|
+
const sqlite3_1 = __importDefault(require("sqlite3"));
|
|
13
|
+
const DEFAULT_SQLITE_PATH = (0, path_1.join)((0, os_1.homedir)(), '.locopilot', 'db.sqlite');
|
|
14
|
+
class SQLiteDatabase {
|
|
15
|
+
db;
|
|
16
|
+
constructor(filename) {
|
|
17
|
+
this.db = new sqlite3_1.default.Database(filename);
|
|
18
|
+
console.log(`[db] Using SQLite at ${filename}`);
|
|
19
|
+
}
|
|
20
|
+
async query(text, params) {
|
|
21
|
+
const isSelect = text.trim().toUpperCase().startsWith('SELECT');
|
|
22
|
+
return new Promise((resolve, reject) => {
|
|
23
|
+
if (isSelect) {
|
|
24
|
+
this.db.all(text, params ?? [], (err, rows) => {
|
|
25
|
+
if (err)
|
|
26
|
+
return reject(err);
|
|
27
|
+
resolve({ rows: rows, rowCount: rows.length });
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
else {
|
|
31
|
+
this.db.run(text, params ?? [], function (err) {
|
|
32
|
+
if (err)
|
|
33
|
+
return reject(err);
|
|
34
|
+
resolve({ rows: [], rowCount: this.changes });
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
async withTransaction(fn) {
|
|
40
|
+
await this.query('BEGIN');
|
|
41
|
+
try {
|
|
42
|
+
await fn(this);
|
|
43
|
+
await this.query('COMMIT');
|
|
44
|
+
}
|
|
45
|
+
catch (err) {
|
|
46
|
+
await this.query('ROLLBACK');
|
|
47
|
+
throw err;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
async shutdown() {
|
|
51
|
+
return new Promise((resolve, reject) => {
|
|
52
|
+
this.db.close((err) => {
|
|
53
|
+
if (err)
|
|
54
|
+
return reject(err);
|
|
55
|
+
resolve();
|
|
56
|
+
});
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
async checkHealth() {
|
|
60
|
+
await this.query('SELECT 1');
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
let _instance = null;
|
|
64
|
+
function getDatabase() {
|
|
65
|
+
if (!_instance) {
|
|
66
|
+
const sqlitePath = process.env.SQLITE_PATH || DEFAULT_SQLITE_PATH;
|
|
67
|
+
_instance = new SQLiteDatabase(sqlitePath);
|
|
68
|
+
}
|
|
69
|
+
return _instance;
|
|
70
|
+
}
|
|
71
|
+
async function query(text, params) {
|
|
72
|
+
return getDatabase().query(text, params);
|
|
73
|
+
}
|
|
74
|
+
async function withTransaction(fn) {
|
|
75
|
+
return getDatabase().withTransaction(fn);
|
|
76
|
+
}
|
|
77
|
+
async function shutdown() {
|
|
78
|
+
if (_instance) {
|
|
79
|
+
await _instance.shutdown();
|
|
80
|
+
_instance = null;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
//# sourceMappingURL=pool.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pool.js","sourceRoot":"","sources":["../../../src/shared/db/pool.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;;;;;AA6Eb,kCAMC;AAED,sBAEC;AAED,0CAEC;AAED,4BAKC;AAhGD,+BAA4B;AAC5B,2BAA6B;AAC7B,sDAA8B;AAkB9B,MAAM,mBAAmB,GAAG,IAAA,WAAI,EAAC,IAAA,YAAO,GAAE,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;AAEvE,MAAM,cAAc;IACV,EAAE,CAAmB;IAE7B,YAAY,QAAgB;QAC1B,IAAI,CAAC,EAAE,GAAG,IAAI,iBAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QACzC,OAAO,CAAC,GAAG,CAAC,wBAAwB,QAAQ,EAAE,CAAC,CAAC;IAClD,CAAC;IAED,KAAK,CAAC,KAAK,CAAI,IAAY,EAAE,MAAkB;QAC7C,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;QAEhE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,IAAI,QAAQ,EAAE,CAAC;gBACb,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,IAAI,EAAE,EAAE,CAAC,GAAiB,EAAE,IAAe,EAAE,EAAE;oBACrE,IAAI,GAAG;wBAAE,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;oBAC5B,OAAO,CAAC,EAAE,IAAI,EAAE,IAAW,EAAE,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;gBACxD,CAAC,CAAC,CAAC;YACL,CAAC;iBAAM,CAAC;gBACN,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,IAAI,EAAE,EAAE,UAAmC,GAAiB;oBAClF,IAAI,GAAG;wBAAE,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;oBAC5B,OAAO,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC;gBAChD,CAAC,CAAC,CAAC;YACL,CAAC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,eAAe,CAAC,EAAgD;QACpE,MAAM,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC1B,IAAI,CAAC;YACH,MAAM,EAAE,CAAC,IAAoC,CAAC,CAAC;YAC/C,MAAM,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAC7B,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,MAAM,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;YAC7B,MAAM,GAAG,CAAC;QACZ,CAAC;IACH,CAAC;IAED,KAAK,CAAC,QAAQ;QACZ,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,IAAI,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,GAAiB,EAAE,EAAE;gBAClC,IAAI,GAAG;oBAAE,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC5B,OAAO,EAAE,CAAC;YACZ,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED,KAAK,CAAC,WAAW;QACf,MAAM,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;IAC/B,CAAC;CACF;AAED,IAAI,SAAS,GAAoB,IAAI,CAAC;AAEtC,SAAgB,WAAW;IACzB,IAAI,CAAC,SAAS,EAAE,CAAC;QACf,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,WAAW,IAAI,mBAAmB,CAAC;QAClE,SAAS,GAAG,IAAI,cAAc,CAAC,UAAU,CAAC,CAAC;IAC7C,CAAC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAEM,KAAK,UAAU,KAAK,CAAU,IAAY,EAAE,MAAkB;IACnE,OAAO,WAAW,EAAE,CAAC,KAAK,CAAI,IAAI,EAAE,MAAM,CAAC,CAAC;AAC9C,CAAC;AAEM,KAAK,UAAU,eAAe,CAAC,EAAgD;IACpF,OAAO,WAAW,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,CAAC;AAC3C,CAAC;AAEM,KAAK,UAAU,QAAQ;IAC5B,IAAI,SAAS,EAAE,CAAC;QACd,MAAM,SAAS,CAAC,QAAQ,EAAE,CAAC;QAC3B,SAAS,GAAG,IAAI,CAAC;IACnB,CAAC;AACH,CAAC"}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.NotFoundError = exports.ProviderError = exports.ValidationError = exports.RateLimitError = exports.AuthError = exports.AppError = void 0;
|
|
4
|
+
class AppError extends Error {
|
|
5
|
+
statusCode;
|
|
6
|
+
code;
|
|
7
|
+
extra;
|
|
8
|
+
constructor(message, statusCode, code, extra) {
|
|
9
|
+
super(message);
|
|
10
|
+
this.name = this.constructor.name;
|
|
11
|
+
this.statusCode = statusCode;
|
|
12
|
+
this.code = code;
|
|
13
|
+
this.extra = extra;
|
|
14
|
+
Object.setPrototypeOf(this, new.target.prototype);
|
|
15
|
+
}
|
|
16
|
+
toJSON() {
|
|
17
|
+
const body = { error: this.code ?? this.message };
|
|
18
|
+
if (this.extra)
|
|
19
|
+
Object.assign(body, this.extra);
|
|
20
|
+
return body;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
exports.AppError = AppError;
|
|
24
|
+
/** 401 — Invalid or missing API key */
|
|
25
|
+
class AuthError extends AppError {
|
|
26
|
+
constructor(message = 'Invalid API key') {
|
|
27
|
+
super(message, 401, message);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
exports.AuthError = AuthError;
|
|
31
|
+
/** 429 — Rate limit exceeded */
|
|
32
|
+
class RateLimitError extends AppError {
|
|
33
|
+
constructor(retryAfterSeconds) {
|
|
34
|
+
super('Rate limit exceeded', 429, 'rate_limit_exceeded', { retryAfterSeconds });
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
exports.RateLimitError = RateLimitError;
|
|
38
|
+
/** 400 — Bad request / validation failure */
|
|
39
|
+
class ValidationError extends AppError {
|
|
40
|
+
constructor(message) {
|
|
41
|
+
super(message, 400, message);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
exports.ValidationError = ValidationError;
|
|
45
|
+
/** 503 — All providers unavailable */
|
|
46
|
+
class ProviderError extends AppError {
|
|
47
|
+
constructor() {
|
|
48
|
+
super('All providers unavailable', 503, 'providers_unavailable');
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
exports.ProviderError = ProviderError;
|
|
52
|
+
/** 404 — Resource not found */
|
|
53
|
+
class NotFoundError extends AppError {
|
|
54
|
+
constructor(resource = 'Resource') {
|
|
55
|
+
super(`${resource} not found`, 404, `${resource.toLowerCase()}_not_found`);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
exports.NotFoundError = NotFoundError;
|
|
59
|
+
//# sourceMappingURL=errors.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"errors.js","sourceRoot":"","sources":["../../src/shared/errors.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;;;AAEb,MAAa,QAAS,SAAQ,KAAK;IACxB,UAAU,CAAS;IACnB,IAAI,CAAqB;IACzB,KAAK,CAAsC;IAEpD,YAAY,OAAe,EAAE,UAAkB,EAAE,IAAa,EAAE,KAA+B;QAC7F,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC;QAClC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACpD,CAAC;IAED,MAAM;QACJ,MAAM,IAAI,GAA4B,EAAE,KAAK,EAAE,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;QAC3E,IAAI,IAAI,CAAC,KAAK;YAAE,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC;QAChD,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAnBD,4BAmBC;AAED,uCAAuC;AACvC,MAAa,SAAU,SAAQ,QAAQ;IACrC,YAAY,OAAO,GAAG,iBAAiB;QACrC,KAAK,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,CAAC;IAC/B,CAAC;CACF;AAJD,8BAIC;AAED,gCAAgC;AAChC,MAAa,cAAe,SAAQ,QAAQ;IAC1C,YAAY,iBAAyB;QACnC,KAAK,CAAC,qBAAqB,EAAE,GAAG,EAAE,qBAAqB,EAAE,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAClF,CAAC;CACF;AAJD,wCAIC;AAED,6CAA6C;AAC7C,MAAa,eAAgB,SAAQ,QAAQ;IAC3C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,EAAE,GAAG,EAAE,OAAO,CAAC,CAAC;IAC/B,CAAC;CACF;AAJD,0CAIC;AAED,sCAAsC;AACtC,MAAa,aAAc,SAAQ,QAAQ;IACzC;QACE,KAAK,CAAC,2BAA2B,EAAE,GAAG,EAAE,uBAAuB,CAAC,CAAC;IACnE,CAAC;CACF;AAJD,sCAIC;AAED,+BAA+B;AAC/B,MAAa,aAAc,SAAQ,QAAQ;IACzC,YAAY,QAAQ,GAAG,UAAU;QAC/B,KAAK,CAAC,GAAG,QAAQ,YAAY,EAAE,GAAG,EAAE,GAAG,QAAQ,CAAC,WAAW,EAAE,YAAY,CAAC,CAAC;IAC7E,CAAC;CACF;AAJD,sCAIC"}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./constants"), exports);
|
|
18
|
+
__exportStar(require("./errors"), exports);
|
|
19
|
+
__exportStar(require("./crypto"), exports);
|
|
20
|
+
__exportStar(require("./ndjson"), exports);
|
|
21
|
+
__exportStar(require("./types"), exports);
|
|
22
|
+
__exportStar(require("./db/pool"), exports);
|
|
23
|
+
__exportStar(require("./runtime/ollama"), exports);
|
|
24
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/shared/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8CAA4B;AAC5B,2CAAyB;AACzB,2CAAyB;AACzB,2CAAyB;AACzB,0CAAwB;AACxB,4CAA0B;AAC1B,mDAAiC"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createNdjsonParser = createNdjsonParser;
|
|
4
|
+
function createNdjsonParser() {
|
|
5
|
+
let buffer = '';
|
|
6
|
+
return {
|
|
7
|
+
feed(chunk) {
|
|
8
|
+
buffer += chunk;
|
|
9
|
+
const results = [];
|
|
10
|
+
const lines = buffer.split('\n');
|
|
11
|
+
buffer = lines.pop() ?? '';
|
|
12
|
+
for (const line of lines) {
|
|
13
|
+
const trimmed = line.trim();
|
|
14
|
+
if (!trimmed)
|
|
15
|
+
continue;
|
|
16
|
+
try {
|
|
17
|
+
results.push(JSON.parse(trimmed));
|
|
18
|
+
}
|
|
19
|
+
catch {
|
|
20
|
+
// skip unparseable lines (SSE prefixes, comments, etc.)
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
return results;
|
|
24
|
+
},
|
|
25
|
+
flush() {
|
|
26
|
+
const trimmed = buffer.trim();
|
|
27
|
+
buffer = '';
|
|
28
|
+
if (!trimmed)
|
|
29
|
+
return [];
|
|
30
|
+
try {
|
|
31
|
+
return [JSON.parse(trimmed)];
|
|
32
|
+
}
|
|
33
|
+
catch {
|
|
34
|
+
return [];
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
//# sourceMappingURL=ndjson.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ndjson.js","sourceRoot":"","sources":["../../src/shared/ndjson.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;;AAOb,gDAkCC;AAlCD,SAAgB,kBAAkB;IAChC,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO;QACL,IAAI,CAAC,KAAa;YAChB,MAAM,IAAI,KAAK,CAAC;YAChB,MAAM,OAAO,GAAc,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YACjC,MAAM,GAAG,KAAK,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC;YAE3B,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;gBACzB,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;gBAC5B,IAAI,CAAC,OAAO;oBAAE,SAAS;gBACvB,IAAI,CAAC;oBACH,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;gBACpC,CAAC;gBAAC,MAAM,CAAC;oBACP,wDAAwD;gBAC1D,CAAC;YACH,CAAC;YAED,OAAO,OAAO,CAAC;QACjB,CAAC;QAED,KAAK;YACH,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC9B,MAAM,GAAG,EAAE,CAAC;YACZ,IAAI,CAAC,OAAO;gBAAE,OAAO,EAAE,CAAC;YACxB,IAAI,CAAC;gBACH,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC;YAC/B,CAAC;YAAC,MAAM,CAAC;gBACP,OAAO,EAAE,CAAC;YACZ,CAAC;QACH,CAAC;KACF,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getLocalModels = getLocalModels;
|
|
4
|
+
exports.streamChat = streamChat;
|
|
5
|
+
exports.checkOllama = checkOllama;
|
|
6
|
+
exports.pullModel = pullModel;
|
|
7
|
+
const constants_1 = require("../../constants");
|
|
8
|
+
const HEALTH_TIMEOUT_MS = 5_000;
|
|
9
|
+
const LIST_TIMEOUT_MS = 8_000;
|
|
10
|
+
const STREAM_TIMEOUT_MS = 30_000;
|
|
11
|
+
const PULL_TIMEOUT_MS = 300_000; // model pulls can take minutes
|
|
12
|
+
async function getLocalModels() {
|
|
13
|
+
const res = await fetch(`${constants_1.OLLAMA_HOST}/api/tags`, {
|
|
14
|
+
signal: AbortSignal.timeout(LIST_TIMEOUT_MS),
|
|
15
|
+
});
|
|
16
|
+
if (!res.ok)
|
|
17
|
+
throw new Error(`Ollama unreachable: ${res.status}`);
|
|
18
|
+
const data = (await res.json());
|
|
19
|
+
return data.models ?? [];
|
|
20
|
+
}
|
|
21
|
+
async function streamChat(body, signal) {
|
|
22
|
+
const timeoutSignal = AbortSignal.timeout(STREAM_TIMEOUT_MS);
|
|
23
|
+
const effectiveSignal = signal
|
|
24
|
+
? AbortSignal.any
|
|
25
|
+
? AbortSignal.any([timeoutSignal, signal])
|
|
26
|
+
: timeoutSignal
|
|
27
|
+
: timeoutSignal;
|
|
28
|
+
const res = await fetch(`${constants_1.OLLAMA_HOST}/api/chat`, {
|
|
29
|
+
method: 'POST',
|
|
30
|
+
headers: { 'Content-Type': 'application/json' },
|
|
31
|
+
body: JSON.stringify({ ...body, stream: true }),
|
|
32
|
+
signal: effectiveSignal,
|
|
33
|
+
});
|
|
34
|
+
if (!res.ok)
|
|
35
|
+
throw new Error(`Ollama chat error: ${res.status}`);
|
|
36
|
+
return res;
|
|
37
|
+
}
|
|
38
|
+
async function checkOllama() {
|
|
39
|
+
const res = await fetch(`${constants_1.OLLAMA_HOST}/`, {
|
|
40
|
+
signal: AbortSignal.timeout(HEALTH_TIMEOUT_MS),
|
|
41
|
+
});
|
|
42
|
+
if (!res.ok)
|
|
43
|
+
throw new Error(`Ollama health check failed: ${res.status}`);
|
|
44
|
+
}
|
|
45
|
+
async function pullModel(modelName) {
|
|
46
|
+
const res = await fetch(`${constants_1.OLLAMA_HOST}/api/pull`, {
|
|
47
|
+
method: 'POST',
|
|
48
|
+
headers: { 'Content-Type': 'application/json' },
|
|
49
|
+
body: JSON.stringify({ name: modelName }),
|
|
50
|
+
signal: AbortSignal.timeout(PULL_TIMEOUT_MS),
|
|
51
|
+
});
|
|
52
|
+
if (!res.ok)
|
|
53
|
+
throw new Error(`Failed to pull model ${modelName}: ${res.status}`);
|
|
54
|
+
}
|
|
55
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/shared/runtime/ollama/index.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;;AAeb,wCAOC;AAED,gCAgBC;AAED,kCAKC;AAED,8BAQC;AAvDD,+CAA8C;AAE9C,MAAM,iBAAiB,GAAG,KAAK,CAAC;AAChC,MAAM,eAAe,GAAG,KAAK,CAAC;AAC9B,MAAM,iBAAiB,GAAG,MAAM,CAAC;AACjC,MAAM,eAAe,GAAG,OAAO,CAAC,CAAC,+BAA+B;AAQzD,KAAK,UAAU,cAAc;IAClC,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,uBAAW,WAAW,EAAE;QACjD,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,eAAe,CAAC;KAC7C,CAAC,CAAC;IACH,IAAI,CAAC,GAAG,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,uBAAuB,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC;IAClE,MAAM,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,EAAE,CAA+B,CAAC;IAC9D,OAAO,IAAI,CAAC,MAAM,IAAI,EAAE,CAAC;AAC3B,CAAC;AAEM,KAAK,UAAU,UAAU,CAAC,IAA6B,EAAE,MAAoB;IAClF,MAAM,aAAa,GAAG,WAAW,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;IAC7D,MAAM,eAAe,GAAG,MAAM;QAC5B,CAAC,CAAC,WAAW,CAAC,GAAG;YACf,CAAC,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;YAC1C,CAAC,CAAC,aAAa;QACjB,CAAC,CAAC,aAAa,CAAC;IAElB,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,uBAAW,WAAW,EAAE;QACjD,MAAM,EAAE,MAAM;QACd,OAAO,EAAE,EAAE,cAAc,EAAE,kBAAkB,EAAE;QAC/C,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;QAC/C,MAAM,EAAE,eAAe;KACxB,CAAC,CAAC;IACH,IAAI,CAAC,GAAG,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,sBAAsB,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC;IACjE,OAAO,GAAG,CAAC;AACb,CAAC;AAEM,KAAK,UAAU,WAAW;IAC/B,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,uBAAW,GAAG,EAAE;QACzC,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,iBAAiB,CAAC;KAC/C,CAAC,CAAC;IACH,IAAI,CAAC,GAAG,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,+BAA+B,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC;AAC5E,CAAC;AAEM,KAAK,UAAU,SAAS,CAAC,SAAiB;IAC/C,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,uBAAW,WAAW,EAAE;QACjD,MAAM,EAAE,MAAM;QACd,OAAO,EAAE,EAAE,cAAc,EAAE,kBAAkB,EAAE;QAC/C,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;QACzC,MAAM,EAAE,WAAW,CAAC,OAAO,CAAC,eAAe,CAAC;KAC7C,CAAC,CAAC;IACH,IAAI,CAAC,GAAG,CAAC,EAAE;QAAE,MAAM,IAAI,KAAK,CAAC,wBAAwB,SAAS,KAAK,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC;AACnF,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/shared/types.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.run = run;
|
|
7
|
+
const child_process_1 = require("child_process");
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const fs_1 = __importDefault(require("fs"));
|
|
10
|
+
const js_yaml_1 = __importDefault(require("js-yaml"));
|
|
11
|
+
const types_1 = require("../types");
|
|
12
|
+
const validator_1 = require("../validator");
|
|
13
|
+
const RUNNER = path_1.default.resolve(__dirname, 'axolotl_runner.py');
|
|
14
|
+
async function run(config, logEmitter) {
|
|
15
|
+
const cfg = (0, types_1.applyDefaults)(config);
|
|
16
|
+
let datasetType = 'alpaca';
|
|
17
|
+
try {
|
|
18
|
+
const firstLine = fs_1.default.readFileSync(cfg.datasetPath, 'utf8').split('\n')[0].trim();
|
|
19
|
+
if (firstLine && (0, validator_1.isSharegptFormat)(JSON.parse(firstLine))) {
|
|
20
|
+
datasetType = 'sharegpt';
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
catch {
|
|
24
|
+
// dataset already validated upstream; default to alpaca
|
|
25
|
+
}
|
|
26
|
+
const axolotlCfg = {
|
|
27
|
+
base_model: cfg.baseModel,
|
|
28
|
+
datasets: [{ path: cfg.datasetPath, type: datasetType }],
|
|
29
|
+
output_dir: cfg.outputDir,
|
|
30
|
+
num_epochs: cfg.epochs,
|
|
31
|
+
micro_batch_size: cfg.batchSize,
|
|
32
|
+
gradient_accumulation_steps: cfg.gradientAccumulation,
|
|
33
|
+
learning_rate: cfg.learningRate,
|
|
34
|
+
lora_r: cfg.loraR,
|
|
35
|
+
lora_alpha: cfg.loraAlpha,
|
|
36
|
+
sequence_len: cfg.maxSeqLength,
|
|
37
|
+
load_in_4bit: false, // minimal: no quantisation on Free tier
|
|
38
|
+
adapter: 'lora',
|
|
39
|
+
};
|
|
40
|
+
const cfgPath = `/tmp/qs-axolotl-${Date.now()}.yml`;
|
|
41
|
+
fs_1.default.writeFileSync(cfgPath, js_yaml_1.default.dump(axolotlCfg));
|
|
42
|
+
return new Promise((resolve, reject) => {
|
|
43
|
+
const proc = (0, child_process_1.spawn)('python3', [RUNNER, '--config', cfgPath], {
|
|
44
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
45
|
+
});
|
|
46
|
+
proc.stdout?.on('data', (d) => {
|
|
47
|
+
for (const line of d.toString().split('\n')) {
|
|
48
|
+
if (line.trim())
|
|
49
|
+
logEmitter.emit('log', line);
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
proc.stderr?.on('data', (d) => {
|
|
53
|
+
for (const line of d.toString().split('\n')) {
|
|
54
|
+
if (line.trim())
|
|
55
|
+
logEmitter.emit('log', line);
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
proc.on('close', (code) => {
|
|
59
|
+
try {
|
|
60
|
+
fs_1.default.unlinkSync(cfgPath);
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
/* best effort cleanup */
|
|
64
|
+
}
|
|
65
|
+
if (code === 0) {
|
|
66
|
+
resolve({ outputPath: cfg.outputDir });
|
|
67
|
+
}
|
|
68
|
+
else {
|
|
69
|
+
reject(new Error(`Axolotl runner exited with code ${code}`));
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
proc.on('error', (err) => {
|
|
73
|
+
try {
|
|
74
|
+
fs_1.default.unlinkSync(cfgPath);
|
|
75
|
+
}
|
|
76
|
+
catch {
|
|
77
|
+
/* best effort cleanup */
|
|
78
|
+
}
|
|
79
|
+
reject(err);
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
//# sourceMappingURL=axolotl.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"axolotl.js","sourceRoot":"","sources":["../../../src/training/adapters/axolotl.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;;;;;AAYb,kBAqEC;AA/ED,iDAAsC;AACtC,gDAAwB;AACxB,4CAAoB;AACpB,sDAA2B;AAE3B,oCAAgE;AAChE,4CAAgD;AAEhD,MAAM,MAAM,GAAG,cAAI,CAAC,OAAO,CAAC,SAAS,EAAE,mBAAmB,CAAC,CAAC;AAErD,KAAK,UAAU,GAAG,CAAC,MAA6B,EAAE,UAAwB;IAC/E,MAAM,GAAG,GAAG,IAAA,qBAAa,EAAC,MAAM,CAAC,CAAC;IAElC,IAAI,WAAW,GAA0B,QAAQ,CAAC;IAClD,IAAI,CAAC;QACH,MAAM,SAAS,GAAG,YAAE,CAAC,YAAY,CAAC,GAAG,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;QACjF,IAAI,SAAS,IAAI,IAAA,4BAAgB,EAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC;YACzD,WAAW,GAAG,UAAU,CAAC;QAC3B,CAAC;IACH,CAAC;IAAC,MAAM,CAAC;QACP,wDAAwD;IAC1D,CAAC;IAED,MAAM,UAAU,GAAG;QACjB,UAAU,EAAE,GAAG,CAAC,SAAS;QACzB,QAAQ,EAAE,CAAC,EAAE,IAAI,EAAE,GAAG,CAAC,WAAW,EAAE,IAAI,EAAE,WAAW,EAAE,CAAC;QACxD,UAAU,EAAE,GAAG,CAAC,SAAS;QACzB,UAAU,EAAE,GAAG,CAAC,MAAM;QACtB,gBAAgB,EAAE,GAAG,CAAC,SAAS;QAC/B,2BAA2B,EAAE,GAAG,CAAC,oBAAoB;QACrD,aAAa,EAAE,GAAG,CAAC,YAAY;QAC/B,MAAM,EAAE,GAAG,CAAC,KAAK;QACjB,UAAU,EAAE,GAAG,CAAC,SAAS;QACzB,YAAY,EAAE,GAAG,CAAC,YAAY;QAC9B,YAAY,EAAE,KAAK,EAAE,wCAAwC;QAC7D,OAAO,EAAE,MAAM;KAChB,CAAC;IAEF,MAAM,OAAO,GAAG,mBAAmB,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC;IACpD,YAAE,CAAC,aAAa,CAAC,OAAO,EAAE,iBAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC;IAEjD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,IAAI,GAAG,IAAA,qBAAK,EAAC,SAAS,EAAE,CAAC,MAAM,EAAE,UAAU,EAAE,OAAO,CAAC,EAAE;YAC3D,KAAK,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC;SAClC,CAAC,CAAC;QAEH,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,CAAC,CAAS,EAAE,EAAE;YACpC,KAAK,MAAM,IAAI,IAAI,CAAC,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC5C,IAAI,IAAI,CAAC,IAAI,EAAE;oBAAE,UAAU,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;YAChD,CAAC;QACH,CAAC,CAAC,CAAC;QACH,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,CAAC,CAAS,EAAE,EAAE;YACpC,KAAK,MAAM,IAAI,IAAI,CAAC,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC5C,IAAI,IAAI,CAAC,IAAI,EAAE;oBAAE,UAAU,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;YAChD,CAAC;QACH,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAmB,EAAE,EAAE;YACvC,IAAI,CAAC;gBACH,YAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;YACzB,CAAC;YAAC,MAAM,CAAC;gBACP,yBAAyB;YAC3B,CAAC;YACD,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;gBACf,OAAO,CAAC,EAAE,UAAU,EAAE,GAAG,CAAC,SAAS,EAAE,CAAC,CAAC;YACzC,CAAC;iBAAM,CAAC;gBACN,MAAM,CAAC,IAAI,KAAK,CAAC,mCAAmC,IAAI,EAAE,CAAC,CAAC,CAAC;YAC/D,CAAC;QACH,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,CAAC;gBACH,YAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;YACzB,CAAC;YAAC,MAAM,CAAC;gBACP,yBAAyB;YAC3B,CAAC;YACD,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Minimal Axolotl fine-tuning runner (Free Tier).
|
|
4
|
+
No DeepSpeed, no multi-GPU — designed for single-GPU / CPU.
|
|
5
|
+
|
|
6
|
+
Requirements:
|
|
7
|
+
pip install axolotl
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import argparse
|
|
11
|
+
import sys
|
|
12
|
+
import os
|
|
13
|
+
|
|
14
|
+
parser = argparse.ArgumentParser(description='LocoPilot Axolotl training runner')
|
|
15
|
+
parser.add_argument('--config', required=True, help='Path to YAML config file')
|
|
16
|
+
args = parser.parse_args()
|
|
17
|
+
|
|
18
|
+
if not os.path.exists(args.config):
|
|
19
|
+
print(f'[axolotl] ERROR: Config file not found: {args.config}', file=sys.stderr)
|
|
20
|
+
sys.exit(1)
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
from axolotl.cli import train
|
|
24
|
+
except ImportError as e:
|
|
25
|
+
print(f'[axolotl] ERROR: Missing dependency — {e}', file=sys.stderr)
|
|
26
|
+
print('[axolotl] Install with: pip install axolotl', file=sys.stderr)
|
|
27
|
+
sys.exit(1)
|
|
28
|
+
|
|
29
|
+
print(f'[axolotl] Starting training with config: {args.config}')
|
|
30
|
+
try:
|
|
31
|
+
# axolotl.cli.train accepts a config path and kicks off training
|
|
32
|
+
train([args.config])
|
|
33
|
+
print('[axolotl] Training complete')
|
|
34
|
+
except SystemExit as e:
|
|
35
|
+
# axolotl may call sys.exit(0) on success
|
|
36
|
+
if e.code not in (0, None):
|
|
37
|
+
print(f'[axolotl] ERROR: Training failed with exit code {e.code}', file=sys.stderr)
|
|
38
|
+
sys.exit(e.code)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.run = run;
|
|
7
|
+
const child_process_1 = require("child_process");
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const fs_1 = __importDefault(require("fs"));
|
|
10
|
+
const types_1 = require("../types");
|
|
11
|
+
const RUNNER = path_1.default.resolve(__dirname, 'mlx_runner.py');
|
|
12
|
+
async function run(config, logEmitter) {
|
|
13
|
+
const cfg = (0, types_1.applyDefaults)(config);
|
|
14
|
+
const configPath = `/tmp/qs-mlx-${Date.now()}.json`;
|
|
15
|
+
fs_1.default.writeFileSync(configPath, JSON.stringify(cfg));
|
|
16
|
+
return new Promise((resolve, reject) => {
|
|
17
|
+
const proc = (0, child_process_1.spawn)('python3', [RUNNER, '--config', configPath], {
|
|
18
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
19
|
+
});
|
|
20
|
+
proc.stdout?.on('data', (d) => {
|
|
21
|
+
for (const line of d.toString().split('\n')) {
|
|
22
|
+
if (line.trim())
|
|
23
|
+
logEmitter.emit('log', line);
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
proc.stderr?.on('data', (d) => {
|
|
27
|
+
for (const line of d.toString().split('\n')) {
|
|
28
|
+
if (line.trim())
|
|
29
|
+
logEmitter.emit('log', line);
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
proc.on('close', (code) => {
|
|
33
|
+
try {
|
|
34
|
+
fs_1.default.unlinkSync(configPath);
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
/* best effort cleanup */
|
|
38
|
+
}
|
|
39
|
+
if (code === 0) {
|
|
40
|
+
resolve({ outputPath: cfg.outputDir });
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
reject(new Error(`MLX runner exited with code ${code}`));
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
proc.on('error', (err) => {
|
|
47
|
+
try {
|
|
48
|
+
fs_1.default.unlinkSync(configPath);
|
|
49
|
+
}
|
|
50
|
+
catch {
|
|
51
|
+
/* best effort cleanup */
|
|
52
|
+
}
|
|
53
|
+
reject(err);
|
|
54
|
+
});
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
//# sourceMappingURL=mlx.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mlx.js","sourceRoot":"","sources":["../../../src/training/adapters/mlx.ts"],"names":[],"mappings":"AAAA,YAAY,CAAC;;;;;AAUb,kBA2CC;AAnDD,iDAAsC;AACtC,gDAAwB;AACxB,4CAAoB;AAEpB,oCAAgE;AAEhE,MAAM,MAAM,GAAG,cAAI,CAAC,OAAO,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;AAEjD,KAAK,UAAU,GAAG,CAAC,MAA6B,EAAE,UAAwB;IAC/E,MAAM,GAAG,GAAG,IAAA,qBAAa,EAAC,MAAM,CAAC,CAAC;IAClC,MAAM,UAAU,GAAG,eAAe,IAAI,CAAC,GAAG,EAAE,OAAO,CAAC;IACpD,YAAE,CAAC,aAAa,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC;IAElD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,IAAI,GAAG,IAAA,qBAAK,EAAC,SAAS,EAAE,CAAC,MAAM,EAAE,UAAU,EAAE,UAAU,CAAC,EAAE;YAC9D,KAAK,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC;SAClC,CAAC,CAAC;QAEH,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,CAAC,CAAS,EAAE,EAAE;YACpC,KAAK,MAAM,IAAI,IAAI,CAAC,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC5C,IAAI,IAAI,CAAC,IAAI,EAAE;oBAAE,UAAU,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;YAChD,CAAC;QACH,CAAC,CAAC,CAAC;QACH,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,CAAC,CAAS,EAAE,EAAE;YACpC,KAAK,MAAM,IAAI,IAAI,CAAC,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC5C,IAAI,IAAI,CAAC,IAAI,EAAE;oBAAE,UAAU,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;YAChD,CAAC;QACH,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAmB,EAAE,EAAE;YACvC,IAAI,CAAC;gBACH,YAAE,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC5B,CAAC;YAAC,MAAM,CAAC;gBACP,yBAAyB;YAC3B,CAAC;YACD,IAAI,IAAI,KAAK,CAAC,EAAE,CAAC;gBACf,OAAO,CAAC,EAAE,UAAU,EAAE,GAAG,CAAC,SAAS,EAAE,CAAC,CAAC;YACzC,CAAC;iBAAM,CAAC;gBACN,MAAM,CAAC,IAAI,KAAK,CAAC,+BAA+B,IAAI,EAAE,CAAC,CAAC,CAAC;YAC3D,CAAC;QACH,CAAC,CAAC,CAAC;QAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;YACvB,IAAI,CAAC;gBACH,YAAE,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;YAC5B,CAAC;YAAC,MAAM,CAAC;gBACP,yBAAyB;YAC3B,CAAC;YACD,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC"}
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
MLX fine-tuning runner (Free Tier — Apple Silicon).
|
|
4
|
+
Uses mlx-lm for LoRA training on Apple Metal GPU.
|
|
5
|
+
|
|
6
|
+
Requirements:
|
|
7
|
+
pip install mlx-lm
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import argparse
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
import shutil
|
|
14
|
+
import sys
|
|
15
|
+
import tempfile
|
|
16
|
+
|
|
17
|
+
parser = argparse.ArgumentParser(description='LocoPilot MLX training runner')
|
|
18
|
+
parser.add_argument('--config', required=True, help='Path to JSON config file')
|
|
19
|
+
args = parser.parse_args()
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
with open(args.config) as f:
|
|
23
|
+
cfg = json.load(f)
|
|
24
|
+
except Exception as e:
|
|
25
|
+
print(f'[mlx] ERROR: Could not read config: {e}', file=sys.stderr)
|
|
26
|
+
sys.exit(1)
|
|
27
|
+
|
|
28
|
+
required = ['baseModel', 'datasetPath', 'outputDir', 'epochs', 'batchSize',
|
|
29
|
+
'loraR', 'loraAlpha', 'maxSeqLength', 'learningRate']
|
|
30
|
+
for key in required:
|
|
31
|
+
if key not in cfg:
|
|
32
|
+
print(f'[mlx] ERROR: Missing config key: {key}', file=sys.stderr)
|
|
33
|
+
sys.exit(1)
|
|
34
|
+
|
|
35
|
+
try:
|
|
36
|
+
import mlx.core # noqa: F401
|
|
37
|
+
import mlx_lm # noqa: F401
|
|
38
|
+
except ImportError as e:
|
|
39
|
+
print(f'[mlx] ERROR: Missing dependency — {e}', file=sys.stderr)
|
|
40
|
+
print('[mlx] Install with: pip install mlx-lm', file=sys.stderr)
|
|
41
|
+
sys.exit(1)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def convert_to_mlx_format(dataset_path):
|
|
45
|
+
"""Convert alpaca/sharegpt JSONL to mlx-lm text format."""
|
|
46
|
+
examples = []
|
|
47
|
+
with open(dataset_path) as f:
|
|
48
|
+
for line in f:
|
|
49
|
+
line = line.strip()
|
|
50
|
+
if not line:
|
|
51
|
+
continue
|
|
52
|
+
row = json.loads(line)
|
|
53
|
+
|
|
54
|
+
if 'instruction' in row:
|
|
55
|
+
instruction = row.get('instruction', '')
|
|
56
|
+
inp = row.get('input', '')
|
|
57
|
+
output = row.get('output', '')
|
|
58
|
+
if inp:
|
|
59
|
+
text = (f'### Instruction:\n{instruction}\n\n'
|
|
60
|
+
f'### Input:\n{inp}\n\n'
|
|
61
|
+
f'### Response:\n{output}')
|
|
62
|
+
else:
|
|
63
|
+
text = f'### Instruction:\n{instruction}\n\n### Response:\n{output}'
|
|
64
|
+
examples.append({'text': text})
|
|
65
|
+
|
|
66
|
+
elif 'conversations' in row:
|
|
67
|
+
turns = row.get('conversations', [])
|
|
68
|
+
text = ''
|
|
69
|
+
for turn in turns:
|
|
70
|
+
role = 'Human' if turn.get('from') == 'human' else 'Assistant'
|
|
71
|
+
text += f'{role}: {turn.get("value", "")}\n'
|
|
72
|
+
examples.append({'text': text.strip()})
|
|
73
|
+
|
|
74
|
+
return examples
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
print(f'[mlx] Preparing dataset from: {cfg["datasetPath"]}')
|
|
78
|
+
|
|
79
|
+
# Initialize before try so the finally block can always reference it safely
|
|
80
|
+
data_dir = None
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
data_dir = tempfile.mkdtemp(prefix='qs-mlx-data-')
|
|
84
|
+
|
|
85
|
+
examples = convert_to_mlx_format(cfg['datasetPath'])
|
|
86
|
+
if not examples:
|
|
87
|
+
print('[mlx] ERROR: Dataset produced no examples after conversion', file=sys.stderr)
|
|
88
|
+
sys.exit(1)
|
|
89
|
+
|
|
90
|
+
# Split 90/10 train/valid
|
|
91
|
+
split = max(1, int(len(examples) * 0.9))
|
|
92
|
+
train_examples = examples[:split]
|
|
93
|
+
valid_examples = examples[split:] or examples[:1]
|
|
94
|
+
|
|
95
|
+
with open(os.path.join(data_dir, 'train.jsonl'), 'w') as f:
|
|
96
|
+
for ex in train_examples:
|
|
97
|
+
f.write(json.dumps(ex) + '\n')
|
|
98
|
+
with open(os.path.join(data_dir, 'valid.jsonl'), 'w') as f:
|
|
99
|
+
for ex in valid_examples:
|
|
100
|
+
f.write(json.dumps(ex) + '\n')
|
|
101
|
+
|
|
102
|
+
print(f'[mlx] Dataset: {len(train_examples)} train, {len(valid_examples)} valid examples')
|
|
103
|
+
|
|
104
|
+
os.makedirs(cfg['outputDir'], exist_ok=True)
|
|
105
|
+
print(f'[mlx] Loading model: {cfg["baseModel"]}')
|
|
106
|
+
print(f'[mlx] Starting LoRA training — {cfg["epochs"]} epoch(s), batch size {cfg["batchSize"]}')
|
|
107
|
+
|
|
108
|
+
_used_python_api = False
|
|
109
|
+
try:
|
|
110
|
+
from mlx_lm import load
|
|
111
|
+
from mlx_lm.tuner.trainer import TrainingArgs, train
|
|
112
|
+
from mlx_lm.tuner.datasets import load_dataset as mlx_load_dataset
|
|
113
|
+
|
|
114
|
+
model, tokenizer = load(cfg['baseModel'])
|
|
115
|
+
|
|
116
|
+
training_args = TrainingArgs(
|
|
117
|
+
batch_size=cfg['batchSize'],
|
|
118
|
+
iters=cfg['epochs'] * max(len(train_examples) // cfg['batchSize'], 1),
|
|
119
|
+
val_batches=5,
|
|
120
|
+
steps_per_report=10,
|
|
121
|
+
steps_per_eval=50,
|
|
122
|
+
adapter_path=cfg['outputDir'],
|
|
123
|
+
max_seq_length=cfg['maxSeqLength'],
|
|
124
|
+
grad_checkpoint=False,
|
|
125
|
+
learning_rate=cfg['learningRate'],
|
|
126
|
+
lora_parameters={
|
|
127
|
+
'rank': cfg['loraR'],
|
|
128
|
+
'alpha': cfg['loraAlpha'],
|
|
129
|
+
'dropout': 0.0,
|
|
130
|
+
'scale': cfg['loraAlpha'] / cfg['loraR'],
|
|
131
|
+
},
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
train_set, valid_set, _ = mlx_load_dataset(training_args, tokenizer, data_dir)
|
|
135
|
+
train(model, tokenizer, train_set, valid_set, training_args)
|
|
136
|
+
_used_python_api = True
|
|
137
|
+
|
|
138
|
+
except (ImportError, AttributeError, TypeError):
|
|
139
|
+
# Python API unavailable or API changed — fall back to stable mlx_lm.lora CLI
|
|
140
|
+
pass
|
|
141
|
+
|
|
142
|
+
if not _used_python_api:
|
|
143
|
+
import subprocess
|
|
144
|
+
print('[mlx] Using mlx-lm CLI (stable fallback)')
|
|
145
|
+
iters = cfg['epochs'] * max(len(train_examples) // cfg['batchSize'], 1)
|
|
146
|
+
cmd = [
|
|
147
|
+
sys.executable, '-m', 'mlx_lm.lora',
|
|
148
|
+
'--model', cfg['baseModel'],
|
|
149
|
+
'--train',
|
|
150
|
+
'--data', data_dir,
|
|
151
|
+
'--batch-size', str(cfg['batchSize']),
|
|
152
|
+
'--num-layers', str(cfg['loraR']),
|
|
153
|
+
'--iters', str(iters),
|
|
154
|
+
'--adapter-path', cfg['outputDir'],
|
|
155
|
+
'--learning-rate', str(cfg['learningRate']),
|
|
156
|
+
'--max-seq-length', str(cfg['maxSeqLength']),
|
|
157
|
+
]
|
|
158
|
+
result = subprocess.run(cmd)
|
|
159
|
+
if result.returncode != 0:
|
|
160
|
+
print(f'[mlx] ERROR: Training failed with exit code {result.returncode}', file=sys.stderr)
|
|
161
|
+
sys.exit(result.returncode)
|
|
162
|
+
|
|
163
|
+
print(f'[mlx] Adapter saved to {cfg["outputDir"]}')
|
|
164
|
+
|
|
165
|
+
except SystemExit:
|
|
166
|
+
raise
|
|
167
|
+
except Exception as e:
|
|
168
|
+
print(f'[mlx] ERROR: Training failed: {e}', file=sys.stderr)
|
|
169
|
+
sys.exit(1)
|
|
170
|
+
finally:
|
|
171
|
+
if data_dir is not None:
|
|
172
|
+
try:
|
|
173
|
+
shutil.rmtree(data_dir)
|
|
174
|
+
except Exception:
|
|
175
|
+
pass
|