discourse_ai-tokenizers 0.3.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c2d3921cc11a89b45ff8e5f7a58c4ae4cb170791a45dc445d1f908a5cc83a88a
4
- data.tar.gz: 7d5d4a725d97d608baea0c3946bde1156c5f7808ad5d6e5bbbb420670e195287
3
+ metadata.gz: 36b5e98f002fe493df0c192a5ba86cf1a65d7c5d58207a3ee51a151c71d25002
4
+ data.tar.gz: c20fdaa5692731610370d9c8bf790a12ace12a5b3513d95f238e64369396dfcf
5
5
  SHA512:
6
- metadata.gz: 407774d2cfd411c88e4b43fb31aa572ea8a59bc285887f24dc96cf4843d7b8c1dc5b0c35b5731223a4e269772146ff8a9b499829bcace07b5b954f540d534bdf
7
- data.tar.gz: da30167cc708d12dbba2763bdb31802c6ba165018d5ca7a1e698898ac5df41ee4b11c91aa24153588a238f98d1ebd886316e9716cf9bfef8c2ea65fbd9fec2a5
6
+ metadata.gz: f83d3e648f680f40c099add8596111d25e74cadb21109bcc7eb1914b12c19b42b118435cd0c99e2781002ea5090325a59b69ded692a05fc8ea98c86a6f13bd5e
7
+ data.tar.gz: 0bc123e4127d01bb85650147b4c56134c2789b12ec3edffa0377512482e731e1718b0811f7df18b9b8978e0f4556a10de030de1d4548496e11bc180979d9cf4b
data/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.3.1] - 2025-07-07
4
+
5
+ - Refactor OpenAiO200kTokenizer class to OpenAiTokenizer as primary class name
6
+ - Update backward compatibility alias (OpenAiO200kTokenizer now aliases OpenAiTokenizer)
7
+ - Update version to 0.3.1
8
+
3
9
  ## [0.3.0] - 2025-07-04
4
10
 
5
11
  - Add OpenAiCl100kTokenizer class for cl100k_base encoding
@@ -3,7 +3,7 @@
3
3
  module DiscourseAi
4
4
  module Tokenizer
5
5
  # Wrapper for OpenAI tokenizer library for compatibility with Discourse AI API
6
- class OpenAiO200kTokenizer < BasicTokenizer
6
+ class OpenAiTokenizer < BasicTokenizer
7
7
  class << self
8
8
  def tokenizer
9
9
  @tokenizer ||= Tiktoken.get_encoding("o200k_base")
@@ -55,6 +55,6 @@ module DiscourseAi
55
55
  end
56
56
  end
57
57
 
58
- OpenAiTokenizer = OpenAiO200kTokenizer
58
+ OpenAiO200kTokenizer = OpenAiTokenizer
59
59
  end
60
60
  end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module DiscourseAi
4
4
  module Tokenizers
5
- VERSION = "0.3.0"
5
+ VERSION = "0.3.1"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: discourse_ai-tokenizers
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Rafael Silva