net-llm 0.5.0 → 0.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a7168d17a456b69a77ada9a8c5eb855ae6730a124cdd55dd9e72fee2bfa6fef1
4
- data.tar.gz: 0fde66f7b0304486f3c5da851d7bbe97347f58fa0692dbd48c8462932ebe1bda
3
+ metadata.gz: c9733e1493bc018f7d3fa449f204e97442f6af11e8d16c7765200481b2868e85
4
+ data.tar.gz: 3ba8bae89d98a182bf24c46fb7df75d35af331214c96bbe79201e728180b2dcd
5
5
  SHA512:
6
- metadata.gz: 1969cb1afc0e322f9899f97c95898c9ebf825fd826ce43af0fc0dc66fe86b298ca4538d3426519791661824265d6ff0ce51f3ecd2012b464639a969df4c1142f
7
- data.tar.gz: 2d765715615d2d66f36fd00e8a64360061f740c8e19af1d1ef80c9d5d1b744964e5f3e741bc7af414d877acec7840adf855ba28159131bad31bcdebbe99dd5b5
6
+ metadata.gz: dca71ba48b74bf76b2f947899e1c7b546e4cb56496fe2f62ec3034d268c6db18de8088a9b13ef148c6a3b752adfd123e786a78dafeb369254fc20b50a17a821a
7
+ data.tar.gz: 45c44adac67f68b256d013923a04df308229a7a196ec089467eb792d9bff8d829e5cc35b56b71e4bf524ff83653706a895c9b26ad331401a914164bf0cf0e333
data/CHANGELOG.md CHANGED
@@ -1,5 +1,22 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.6.1] - 2025-01-14
4
+
5
+ ### Fixed
6
+ - Reduced default `max_tokens` from 65536 to 64000 for Claude models
7
+ - Fixes HTTP 400 errors on Vertex AI which caps at 64000
8
+
9
+ ## [0.6.0] - 2025-01-14
10
+
11
+ ### Added
12
+ - `GOOGLE_OAUTH_ACCESS_TOKEN` environment variable support for VertexAI provider
13
+ - Token is fetched fresh on each request (no memoization) for long-running processes
14
+ - Falls back to `gcloud auth application-default print-access-token` when not set
15
+
16
+ ### Changed
17
+ - Increased default `max_tokens` from 1024 to 65536 for Claude models
18
+ - Matches Claude Opus 4.5 / Sonnet 4 max output capacity
19
+
3
20
  ## [0.5.0] - 2025-01-07
4
21
 
5
22
  ### Added
@@ -17,7 +17,7 @@ module Net
17
17
  @headers_source.respond_to?(:call) ? @headers_source.call : @headers_source
18
18
  end
19
19
 
20
- def messages(messages, system: nil, max_tokens: 1024, tools: nil, &block)
20
+ def messages(messages, system: nil, max_tokens: 64000, tools: nil, &block)
21
21
  payload = build_payload(messages, system, max_tokens, tools, block_given?)
22
22
 
23
23
  if block_given?
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Net
4
4
  module Llm
5
- VERSION = "0.5.0"
5
+ VERSION = "0.6.1"
6
6
  end
7
7
  end
@@ -31,7 +31,9 @@ module Net
31
31
  end
32
32
 
33
33
  def access_token
34
- @access_token ||= `gcloud auth application-default print-access-token`.strip
34
+ ENV.fetch("GOOGLE_OAUTH_ACCESS_TOKEN") do
35
+ `gcloud auth application-default print-access-token`.strip
36
+ end
35
37
  end
36
38
  end
37
39
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: net-llm
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.0
4
+ version: 0.6.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - mo khan
@@ -148,7 +148,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
148
148
  - !ruby/object:Gem::Version
149
149
  version: '0'
150
150
  requirements: []
151
- rubygems_version: 3.7.2
151
+ rubygems_version: 4.0.3
152
152
  specification_version: 4
153
153
  summary: Ruby client for OpenAI, Ollama, and Anthropic LLM APIs
154
154
  test_files: []