@kernel.chat/kbot 3.29.0 → 3.30.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +63 -0
- package/dist/cli.js.map +1 -1
- package/dist/inference.d.ts +34 -0
- package/dist/inference.d.ts.map +1 -1
- package/dist/inference.js +230 -0
- package/dist/inference.js.map +1 -1
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -883,6 +883,69 @@ async function main() {
|
|
|
883
883
|
process.stderr.write(formatMachineProfile(profile));
|
|
884
884
|
}
|
|
885
885
|
});
|
|
886
|
+
program
|
|
887
|
+
.command('hardware')
|
|
888
|
+
.description('Detect your hardware tier and get personalized model recommendations for local AI')
|
|
889
|
+
.action(async () => {
|
|
890
|
+
const chalk = (await import('chalk')).default;
|
|
891
|
+
const { detectHardwareTier, getMultiModelConfig, QUANT_OPTIONS, DEFAULT_MODELS } = await import('./inference.js');
|
|
892
|
+
const hw = detectHardwareTier();
|
|
893
|
+
const mm = getMultiModelConfig();
|
|
894
|
+
console.log();
|
|
895
|
+
console.log(` ${chalk.bold('kbot hardware')} — your local AI capability`);
|
|
896
|
+
console.log();
|
|
897
|
+
const tierColors = {
|
|
898
|
+
basic: chalk.dim, standard: chalk.white, pro: chalk.cyan, ultra: chalk.hex('#A78BFA'), datacenter: chalk.hex('#FFD700'),
|
|
899
|
+
};
|
|
900
|
+
const colorFn = tierColors[hw.tier] || chalk.white;
|
|
901
|
+
console.log(` ${chalk.bold('Tier')}: ${colorFn(hw.tier.toUpperCase())}`);
|
|
902
|
+
console.log(` ${chalk.dim(hw.description)}`);
|
|
903
|
+
console.log(` ${chalk.bold('Max model')}: ${hw.maxModelParams}`);
|
|
904
|
+
console.log();
|
|
905
|
+
// Model catalog by tier
|
|
906
|
+
console.log(` ${chalk.bold('Model Catalog')} ${chalk.dim(`(${Object.keys(DEFAULT_MODELS).length} models)`)}`);
|
|
907
|
+
console.log(` ${chalk.dim('─'.repeat(50))}`);
|
|
908
|
+
const tiers = [
|
|
909
|
+
{ label: 'Light (2-4 GB)', filter: tags => tags.includes('lightweight') || tags.includes('fast') },
|
|
910
|
+
{ label: 'Standard (4-6 GB)', filter: tags => !tags.includes('lightweight') && !tags.includes('fast') && !tags.includes('large') && !tags.includes('frontier') && !tags.includes('ultra') && !tags.includes('legacy') },
|
|
911
|
+
{ label: 'Heavy (8-16 GB)', filter: tags => tags.includes('large') },
|
|
912
|
+
{ label: 'Frontier (32-64 GB)', filter: tags => tags.includes('frontier') },
|
|
913
|
+
{ label: 'Ultra (100+ GB)', filter: tags => tags.includes('ultra') },
|
|
914
|
+
];
|
|
915
|
+
for (const tier of tiers) {
|
|
916
|
+
const models = Object.entries(DEFAULT_MODELS).filter(([, m]) => tier.filter(m.tags));
|
|
917
|
+
if (models.length === 0)
|
|
918
|
+
continue;
|
|
919
|
+
console.log(` ${chalk.bold(tier.label)}`);
|
|
920
|
+
for (const [name, model] of models) {
|
|
921
|
+
const fits = parseFloat(model.size.replace(/[^0-9.]/g, '')) <= mm.totalRAM * 0.6;
|
|
922
|
+
const icon = fits ? chalk.green('✓') : chalk.red('✗');
|
|
923
|
+
const rec = model.tags.includes('recommended') ? chalk.yellow(' ★') : '';
|
|
924
|
+
console.log(` ${icon} ${name.padEnd(22)} ${chalk.dim(model.size.padEnd(10))} ${model.description.slice(0, 55)}${rec}`);
|
|
925
|
+
}
|
|
926
|
+
console.log();
|
|
927
|
+
}
|
|
928
|
+
// Multi-model config
|
|
929
|
+
if (mm.canMultiModel) {
|
|
930
|
+
console.log(` ${chalk.bold('Multi-Model Setup')} ${chalk.dim('(run two models simultaneously)')}`);
|
|
931
|
+
console.log(` ${chalk.dim('─'.repeat(50))}`);
|
|
932
|
+
for (const slot of mm.recommended) {
|
|
933
|
+
console.log(` ${chalk.cyan(slot.slot.padEnd(8))} → ${slot.model} (${slot.size})`);
|
|
934
|
+
}
|
|
935
|
+
console.log();
|
|
936
|
+
}
|
|
937
|
+
// Recommendations
|
|
938
|
+
console.log(` ${chalk.bold('Quick Start')}`);
|
|
939
|
+
console.log(` ${chalk.dim('─'.repeat(50))}`);
|
|
940
|
+
for (const rec of hw.recommendations) {
|
|
941
|
+
console.log(` ${chalk.white(rec)}`);
|
|
942
|
+
}
|
|
943
|
+
console.log();
|
|
944
|
+
// Quantization info
|
|
945
|
+
console.log(` ${chalk.dim('Quantization options: Q2 (smallest) → Q4 (default) → Q8 (best) → F16 (original)')}`);
|
|
946
|
+
console.log(` ${chalk.dim('Higher quant = better quality, more RAM. Use: kbot models pull hf:user/repo:file-Q6_K.gguf')}`);
|
|
947
|
+
console.log();
|
|
948
|
+
});
|
|
886
949
|
const synthCmd = program
|
|
887
950
|
.command('synthesis')
|
|
888
951
|
.description('Closed-loop intelligence compounding — bridge self-discovery and universe discovery');
|