llmist 0.1.6 โ†’ 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -288,7 +288,7 @@ for await (const event of agent.run()) {
288
288
  ### ๐Ÿงช Mock Testing
289
289
 
290
290
  ```typescript
291
- import { mockLLM, createMockClient } from 'llmist';
291
+ import { LLMist, mockLLM, createMockClient } from 'llmist';
292
292
 
293
293
  mockLLM()
294
294
  .forModel('gpt-5')
@@ -296,8 +296,8 @@ mockLLM()
296
296
  .returns('The answer is 42')
297
297
  .register();
298
298
 
299
- const answer = await LLMist.createAgent()
300
- .withClient(createMockClient())
299
+ const mockClient = createMockClient();
300
+ const answer = await mockClient.createAgent()
301
301
  .withModel('gpt-5')
302
302
  .askAndCollect('Calculate 2 + 2');
303
303
 
@@ -312,12 +312,12 @@ console.log(answer); // "The answer is 42" - no API call made!
312
312
  const client = new LLMist();
313
313
 
314
314
  // Get model specs
315
- const gpt4 = client.modelRegistry.getModelSpec('openai:gpt-4');
316
- console.log(gpt4.contextWindow); // 128000
317
- console.log(gpt4.pricing.input); // 10.0 per 1M tokens
315
+ const gpt5 = client.modelRegistry.getModelSpec('gpt-5');
316
+ console.log(gpt5.contextWindow); // 272000
317
+ console.log(gpt5.pricing.input); // 1.25 per 1M tokens
318
318
 
319
319
  // Estimate costs
320
- const cost = client.modelRegistry.estimateCost('openai:gpt-4', 10_000, 2_000);
320
+ const cost = client.modelRegistry.estimateCost('gpt-5', 10_000, 2_000);
321
321
  console.log(`$${cost.totalCost.toFixed(4)}`);
322
322
 
323
323
  // Find cheapest model
@@ -335,7 +335,7 @@ const messages = [
335
335
  ];
336
336
 
337
337
  const tokens = await client.countTokens('openai:gpt-5', messages);
338
- const cost = client.modelRegistry.estimateCost('openai:gpt-5', tokens, 1000);
338
+ const cost = client.modelRegistry.estimateCost('gpt-5', tokens, 1000);
339
339
  ```
340
340
 
341
341
  Uses provider-specific methods (tiktoken for OpenAI, native APIs for Anthropic/Gemini).
@@ -438,6 +438,53 @@ Contributions welcome! Please ensure:
438
438
  4. โœ… Types are properly defined
439
439
  5. โœ… Examples/docs updated for API changes
440
440
 
441
+ ### Commit Message Convention
442
+
443
+ This project follows [Conventional Commits](https://www.conventionalcommits.org/) specification. All commit messages must be formatted as:
444
+
445
+ ```
446
+ <type>(<scope>): <subject>
447
+ ```
448
+
449
+ **Types:**
450
+ - `feat:` - New feature (triggers minor version bump)
451
+ - `fix:` - Bug fix (triggers patch version bump)
452
+ - `docs:` - Documentation only changes
453
+ - `style:` - Code style changes (formatting, missing semi-colons, etc)
454
+ - `refactor:` - Code refactoring without feature changes
455
+ - `perf:` - Performance improvements
456
+ - `test:` - Adding or updating tests
457
+ - `build:` - Build system or dependency changes
458
+ - `ci:` - CI configuration changes
459
+ - `chore:` - Other changes that don't modify src or test files
460
+
461
+ **Breaking Changes:** Add `BREAKING CHANGE:` in the footer to trigger major version bump.
462
+
463
+ **Examples:**
464
+ ```bash
465
+ feat(agent): add support for streaming tool calls
466
+ fix(cli): prevent crash on invalid gadget path
467
+ docs: update API documentation for v2
468
+ ```
469
+
470
+ **Note:** Git hooks will validate your commit messages locally.
471
+
472
+ ### Release Process
473
+
474
+ Releases are fully automated using [semantic-release](https://github.com/semantic-release/semantic-release):
475
+
476
+ 1. Merge PR to `main` branch
477
+ 2. CI workflow runs automatically
478
+ 3. If CI passes, release workflow:
479
+ - Analyzes commits since last release
480
+ - Determines version bump based on commit types
481
+ - Updates `package.json` and `CHANGELOG.md`
482
+ - Creates git tag and GitHub release
483
+ - Publishes to npm
484
+ - Syncs changes back to `dev` branch
485
+
486
+ **No manual version bumps needed!**
487
+
441
488
  ---
442
489
 
443
490
  ## ๐Ÿ“„ License
package/dist/cli.cjs CHANGED
@@ -4253,7 +4253,7 @@ var import_commander3 = require("commander");
4253
4253
  // package.json
4254
4254
  var package_default = {
4255
4255
  name: "llmist",
4256
- version: "0.1.6",
4256
+ version: "0.2.0",
4257
4257
  description: "Universal TypeScript LLM client with streaming-first agent framework. Works with any model - no structured outputs or native tool calling required. Implements its own flexible grammar for function calling.",
4258
4258
  type: "module",
4259
4259
  main: "dist/index.cjs",
@@ -4295,7 +4295,8 @@ var package_default = {
4295
4295
  "test:e2e:watch": "bun test src/e2e --watch --timeout 60000",
4296
4296
  "test:all": "bun run test && bun run test:e2e",
4297
4297
  clean: "rimraf dist",
4298
- prepare: "node scripts/install-hooks.js || true"
4298
+ prepare: "node scripts/install-hooks.js || true",
4299
+ "release:dry": "bunx semantic-release --dry-run"
4299
4300
  },
4300
4301
  bin: {
4301
4302
  llmist: "dist/cli.js"
@@ -4344,11 +4345,16 @@ var package_default = {
4344
4345
  },
4345
4346
  devDependencies: {
4346
4347
  "@biomejs/biome": "^2.3.2",
4348
+ "@commitlint/cli": "^20.1.0",
4349
+ "@commitlint/config-conventional": "^20.0.0",
4350
+ "@semantic-release/changelog": "^6.0.3",
4351
+ "@semantic-release/git": "^10.0.1",
4347
4352
  "@types/js-yaml": "^4.0.9",
4348
4353
  "@types/node": "^20.12.7",
4349
4354
  "bun-types": "^1.3.2",
4350
4355
  dotenv: "^17.2.3",
4351
4356
  rimraf: "^5.0.5",
4357
+ "semantic-release": "^25.0.2",
4352
4358
  tsup: "^8.3.5",
4353
4359
  typescript: "^5.4.5"
4354
4360
  }