kaba 0.3.2 → 0.4.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cf8ec2d335097ccfe7a540e2d2fa7253d181db9a255517fd32118707c3bbee0d
4
- data.tar.gz: f6d150a528511f20b0523a1c1e30268c526386ed3d309b3faa6ff524cb2b7659
3
+ metadata.gz: 0add8ecf9ac4613decb084b678c044809896db95136b59e24602ae47a426d448
4
+ data.tar.gz: 2dda3c5fa0152b4928c4a2fadc11b18ed429cc724e87c5b38c794083cb731c69
5
5
  SHA512:
6
- metadata.gz: c2fa2925775abd5bc7e04e0ff58ed3066f321dbe6c796f40eb66048945f8b3860ab8ee721b2aa4e612fac7b79f3dc57880c7b35e2e4e0ba29cc01de72db894b7
7
- data.tar.gz: ba270bbcb84f5e89ad5154e1a789515c5200cf92dacaab1252f2bc60440e93d1c1fe999eaad98d9c2eb29bf73b720bb27608f22fff62f1d3d4eec52c9b5aefc1
6
+ metadata.gz: 622be2e55740919481236e525f2c2045f9f633eccfa36363a55d959c8d25cd859cfc349e6668df429571052d9501077f8068c35e1a72bac25ced650188c3407e
7
+ data.tar.gz: 56c1a740604cb48c6bfb14f5c93854597fa53922c2befd03e58cf192d29e2b2b1ac87d9480255ae6c6ef175d2652299b67b0cba8cb1e0deccf405adce9efa22b
data/README.md CHANGED
@@ -30,4 +30,15 @@ alias kaba='docker run -it --rm -v "${PWD}:/workdir" ghcr.io/mjason/kaba:latest'
30
30
  ## 关联项目
31
31
  - [lisa_typechat_server](https://github.com/mjason/lisa_typechat_server)
32
32
 
33
- 如果要修改服务地址你有两个方式,一个通过 `.env` 来处理,还有就是自己设置环境变量,变量名 `LISA_TYPECHAT_ENDPOINT`
33
+ 如果要修改服务地址你有两个方式,一个通过 `.env` 来处理,还有就是自己设置环境变量,变量名 `LISA_TYPECHAT_ENDPOINT`
34
+
35
+ ## changelog
36
+
37
+ .env 需要更新
38
+ ```
39
+ ; LISA_TYPECHAT_ENDPOINT=https://lisa-typechat.listenai.com
40
+ LISA_ACCESS_TOKEN=聆思平台的KEY
41
+
42
+ JUDGE_ACCCESS_TOKEN=可以和LISA_ACCESS_TOKEN
43
+ JUDGE_LLM_URI_BASE=如果需要其他提供商可以填,默认不填
44
+ ```
@@ -19,6 +19,17 @@ class Application
19
19
  end
20
20
  end
21
21
 
22
+ def judge_llm_client
23
+ @judge_llm_client ||= OpenAI::Client.new(
24
+ log_errors: true,
25
+ access_token: env!("JUDGE_ACCCESS_TOKEN"),
26
+ request_timeout: ENV.fetch("LISA_LLM_REQUEST_TIMEOUT", 120).to_i,
27
+ uri_base: ENV.fetch("JUDGE_LLM_URI_BASE", "https://api.listenai.com")
28
+ ) do |faraday|
29
+ faraday.adapter Faraday.default_adapter, clients: Async::HTTP::Faraday::PersistentClients
30
+ end
31
+ end
32
+
22
33
  def llm_client_extra_headers=(headers)
23
34
  OpenAI.configure do |config|
24
35
  config.extra_headers = headers
@@ -26,7 +26,8 @@ class TestRunner
26
26
  model: 'spark-general-4.0',
27
27
  judge_model: 'spark-general-4.0',
28
28
  judge_temperature: 0.1,
29
- temperature: 0.1
29
+ temperature: 0.1,
30
+ semaphore_limit: 5
30
31
  )
31
32
 
32
33
  progressbar = TTY::ProgressBar.new(
@@ -37,8 +38,9 @@ class TestRunner
37
38
  progressbar.start
38
39
 
39
40
  Async do
41
+ semaphore = Async::Semaphore.new(semaphore_limit)
40
42
  _each(limit: limit) do |row|
41
- Async do |task|
43
+ semaphore.async do |task|
42
44
  input = @prompt.render(File.read row.input_file)
43
45
 
44
46
  target = <<~Markdown
@@ -64,7 +66,7 @@ class TestRunner
64
66
  @type_right_total += 1 if type_check_response["success"]
65
67
 
66
68
  judge_input = Judge.new(input: input, output: output, target: target).render
67
- judge_response = Application.llm_client.chat(
69
+ judge_response = Application.judge_llm_client.chat(
68
70
  parameters: {
69
71
  model: judge_model,
70
72
  messages: [ { role: 'user', content: judge_input } ],
data/lib/kaba/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Kaba
4
- VERSION = "0.3.2"
4
+ VERSION = "0.4.0"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: kaba
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.2
4
+ version: 0.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - MJ
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-11-16 00:00:00.000000000 Z
11
+ date: 2024-11-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: async