gpt-function 0.2.0 → 0.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile.lock +3 -1
- data/README.md +21 -19
- data/gpt-function.gemspec +4 -2
- data/lib/gpt/function.rb +1 -81
- data/lib/gpt-function.rb +120 -2
- data/lib/gpt_function/batch.rb +250 -0
- data/lib/gpt_function/file.rb +205 -0
- data/lib/gpt_function/version.rb +5 -0
- data/lib/gpt_functions.rb +56 -0
- metadata +22 -6
- data/lib/gpt/function/version.rb +0 -7
- data/lib/gpt/functions.rb +0 -57
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c103f09f18b5ef5a39f26b5ede8a3948dd4b2305fb754908c6b090c7f78d1e79
|
4
|
+
data.tar.gz: 686b1a27e1f955b45827abedcd665662c07386eb0008d6ad012a81a0dff6aedd
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2237aa77cb338408e6d237c07036d9d7998f356939e292e481305a760386c9282b34b683d6c3ba41b51b6fbb911802077804a563372beaab4c0bfcf48d357ee0
|
7
|
+
data.tar.gz: 0573a13abda0ae4a58c5b5cdfa0bdbf4062a68aba8b04802773c0efc8192728ce0ffa0bf30b8feddd70b2fb910b3b5248f6f55b3d40da94352e261febb8c9771
|
data/Gemfile.lock
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
gpt-function (0.
|
4
|
+
gpt-function (0.3.0)
|
5
5
|
|
6
6
|
GEM
|
7
7
|
remote: https://rubygems.org/
|
@@ -13,6 +13,7 @@ GEM
|
|
13
13
|
crack (0.4.5)
|
14
14
|
rexml
|
15
15
|
diff-lcs (1.5.0)
|
16
|
+
dotenv (3.1.2)
|
16
17
|
hashdiff (1.0.1)
|
17
18
|
json (2.6.3)
|
18
19
|
language_server-protocol (3.17.0.3)
|
@@ -64,6 +65,7 @@ PLATFORMS
|
|
64
65
|
|
65
66
|
DEPENDENCIES
|
66
67
|
byebug (~> 11.1)
|
68
|
+
dotenv
|
67
69
|
gpt-function!
|
68
70
|
rake (~> 13.0)
|
69
71
|
rspec (~> 3.0)
|
data/README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
這個套件支援你在 Ruby 程式中使用 GPT 函數。
|
4
4
|
|
5
|
-
你可以確保每次呼叫 GPT
|
5
|
+
你可以確保每次呼叫 GPT 函數時,接近 100% 都會得到相同的結果。
|
6
6
|
|
7
7
|
目前能夠使用的模型有:
|
8
8
|
|
@@ -12,7 +12,13 @@
|
|
12
12
|
|
13
13
|
## Installation
|
14
14
|
|
15
|
-
|
15
|
+
在你的 Gemfile 中加入下面這行:
|
16
|
+
|
17
|
+
```ruby
|
18
|
+
gem 'gpt-function'
|
19
|
+
```
|
20
|
+
|
21
|
+
就可以使用 `bundle install` 安裝這個套件。
|
16
22
|
|
17
23
|
## Usage
|
18
24
|
|
@@ -23,27 +29,23 @@ require 'gpt-function'
|
|
23
29
|
# 你需要設定你的 api key 和 model name
|
24
30
|
Gpt::Function.configure(api_key: '...', model: 'gpt-3.5-turbo-1106')
|
25
31
|
|
26
|
-
#
|
27
|
-
|
28
|
-
|
29
|
-
# 然後就可以使用這個函數了
|
30
|
-
result = translater.call("apple")
|
32
|
+
# 使用內建的翻譯方法
|
33
|
+
p Gpt::Functions.翻譯成中文("banana") # "香蕉"
|
31
34
|
|
32
|
-
#
|
33
|
-
|
35
|
+
# 使用內建的擷取關鍵字方法
|
36
|
+
p Gpt::Functions.擷取關鍵字("臺北市政府推動綠色交通計劃,鼓勵民眾使用公共運輸和自行車") # ["臺北市政府", "綠色交通計劃", "民眾", "公共運輸", "自行車"]
|
34
37
|
|
35
|
-
#
|
36
|
-
|
38
|
+
# 你也可以自己定義方法
|
39
|
+
def 擷取關鍵字(input)
|
40
|
+
# 創建一個簡單的 GPT 函數,你需要描述這個函數的功能,以及提供一些範例
|
41
|
+
Gpt::Function.new("Extract all keywords",
|
42
|
+
[
|
37
43
|
[
|
38
|
-
"
|
39
|
-
["
|
44
|
+
"臺灣最新5G網路覆蓋率達95%,推動智慧城市發展,領先亞洲多國",
|
45
|
+
["臺灣", "5G網路", "覆蓋率", "智慧城市", "亞洲"]
|
40
46
|
]
|
41
|
-
]
|
42
|
-
|
43
|
-
result = keywords_extractor.call("藍白「3%各表」翻臉倒數?學者曝1關鍵指標")
|
44
|
-
|
45
|
-
# 可以看到回傳的型別是陣列
|
46
|
-
puts result # ["藍白", "3%各表", "翻臉", "關鍵指標"]
|
47
|
+
]).call(input)
|
48
|
+
end
|
47
49
|
```
|
48
50
|
|
49
51
|
## License
|
data/gpt-function.gemspec
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require_relative "lib/
|
3
|
+
require_relative "lib/gpt_function/version"
|
4
4
|
|
5
5
|
Gem::Specification.new do |spec|
|
6
6
|
spec.name = "gpt-function"
|
7
|
-
spec.version =
|
7
|
+
spec.version = GptFunction::VERSION
|
8
8
|
spec.authors = ["etrex kuo"]
|
9
9
|
spec.email = ["et284vu065k3@gmail.com"]
|
10
10
|
|
@@ -32,6 +32,8 @@ Gem::Specification.new do |spec|
|
|
32
32
|
# Uncomment to register a new dependency of your gem
|
33
33
|
# spec.add_dependency "example-gem", "~> 1.0"
|
34
34
|
|
35
|
+
spec.add_development_dependency "dotenv"
|
36
|
+
|
35
37
|
# For more information and examples about making a new gem, check out our
|
36
38
|
# guide at: https://bundler.io/guides/creating_gem.html
|
37
39
|
end
|
data/lib/gpt/function.rb
CHANGED
@@ -2,89 +2,9 @@
|
|
2
2
|
|
3
3
|
require "net/http"
|
4
4
|
require "json"
|
5
|
-
require_relative "function/
|
5
|
+
require_relative "function/batch"
|
6
6
|
|
7
7
|
module Gpt
|
8
8
|
# 這是一個簡單的 GPT 函數類別
|
9
|
-
class Function
|
10
|
-
@api_key = nil
|
11
|
-
@model = nil
|
12
9
|
|
13
|
-
class << self
|
14
|
-
attr_accessor :api_key, :model
|
15
|
-
|
16
|
-
def configure(api_key:, model:)
|
17
|
-
@api_key = api_key
|
18
|
-
@model = model
|
19
|
-
end
|
20
|
-
end
|
21
|
-
|
22
|
-
def initialize(prompt, examples = [], temperature = 0)
|
23
|
-
@temperature = temperature
|
24
|
-
@messages = [
|
25
|
-
{
|
26
|
-
role: "system",
|
27
|
-
content: "#{prompt}\n Note: The response format is always a JSON with the key output like this:{output: ...}"
|
28
|
-
},
|
29
|
-
*examples.flat_map do |example|
|
30
|
-
[
|
31
|
-
{
|
32
|
-
role: "user",
|
33
|
-
content: example[0]
|
34
|
-
},
|
35
|
-
{
|
36
|
-
role: "assistant",
|
37
|
-
content: { output: example[1] }.to_json
|
38
|
-
}
|
39
|
-
]
|
40
|
-
end
|
41
|
-
]
|
42
|
-
end
|
43
|
-
|
44
|
-
def call(input)
|
45
|
-
# 使用類別級別的變量來發送請求
|
46
|
-
response = send_request(input)
|
47
|
-
body = response.body.force_encoding("UTF-8")
|
48
|
-
json = JSON.parse(body)
|
49
|
-
# 處理可能的錯誤回應
|
50
|
-
raise StandardError, json.dig("error", "message") if json.dig("error", "code")
|
51
|
-
|
52
|
-
# 處理正常的回應
|
53
|
-
JSON.parse(json.dig("choices", 0, "message", "content"))["output"]
|
54
|
-
end
|
55
|
-
|
56
|
-
private
|
57
|
-
|
58
|
-
def send_request(input)
|
59
|
-
uri = URI.parse("https://api.openai.com/v1/chat/completions")
|
60
|
-
request = Net::HTTP::Post.new(uri)
|
61
|
-
request.content_type = "application/json"
|
62
|
-
request["Authorization"] = "Bearer #{Function.api_key}"
|
63
|
-
request.body = {
|
64
|
-
model: Function.model,
|
65
|
-
response_format: {
|
66
|
-
type: "json_object"
|
67
|
-
},
|
68
|
-
seed: 0,
|
69
|
-
messages: [
|
70
|
-
*@messages,
|
71
|
-
{
|
72
|
-
"role": "user",
|
73
|
-
"content": input
|
74
|
-
}
|
75
|
-
],
|
76
|
-
temperature: @temperature
|
77
|
-
}.to_json
|
78
|
-
|
79
|
-
req_options = {
|
80
|
-
use_ssl: uri.scheme == "https",
|
81
|
-
open_timeout: 60, # opening a connection timeout
|
82
|
-
read_timeout: 300 # reading one block of response timeout
|
83
|
-
}
|
84
|
-
|
85
|
-
Net::HTTP.start(uri.hostname, uri.port, req_options) do |http|
|
86
|
-
http.request(request)
|
87
|
-
end
|
88
|
-
end
|
89
|
-
end
|
90
10
|
end
|
data/lib/gpt-function.rb
CHANGED
@@ -1,4 +1,122 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
|
4
|
-
|
3
|
+
require "net/http"
|
4
|
+
require "json"
|
5
|
+
|
6
|
+
require_relative "gpt_function/version"
|
7
|
+
require_relative "gpt_function/file"
|
8
|
+
require_relative "gpt_function/batch"
|
9
|
+
require_relative "gpt_functions"
|
10
|
+
|
11
|
+
class GptFunction
|
12
|
+
class Error < StandardError; end
|
13
|
+
|
14
|
+
@api_key = nil
|
15
|
+
@model = nil
|
16
|
+
|
17
|
+
class << self
|
18
|
+
attr_accessor :api_key, :model
|
19
|
+
|
20
|
+
def configure(api_key:, model:)
|
21
|
+
@api_key = api_key
|
22
|
+
@model = model
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def initialize(prompt, examples = [], temperature = 0)
|
27
|
+
@temperature = temperature
|
28
|
+
@messages = [
|
29
|
+
{
|
30
|
+
role: "system",
|
31
|
+
content: "#{prompt}\n Note: The response format is always a JSON with the key output like this:{output: ...}"
|
32
|
+
},
|
33
|
+
*examples.flat_map do |example|
|
34
|
+
[
|
35
|
+
{
|
36
|
+
role: "user",
|
37
|
+
content: example[0]
|
38
|
+
},
|
39
|
+
{
|
40
|
+
role: "assistant",
|
41
|
+
content: { output: example[1] }.to_json
|
42
|
+
}
|
43
|
+
]
|
44
|
+
end
|
45
|
+
]
|
46
|
+
end
|
47
|
+
|
48
|
+
def call(input)
|
49
|
+
# 使用類別級別的變量來發送請求
|
50
|
+
response = send_request(input)
|
51
|
+
body = response.body.force_encoding("UTF-8")
|
52
|
+
json = JSON.parse(body)
|
53
|
+
# 處理可能的錯誤回應
|
54
|
+
raise StandardError, json.dig("error", "message") if json.dig("error", "code")
|
55
|
+
|
56
|
+
# 處理正常的回應
|
57
|
+
JSON.parse(json.dig("choices", 0, "message", "content"))["output"]
|
58
|
+
end
|
59
|
+
|
60
|
+
def to_request_body(input)
|
61
|
+
{
|
62
|
+
model: GptFunction.model,
|
63
|
+
response_format: {
|
64
|
+
type: "json_object"
|
65
|
+
},
|
66
|
+
seed: 42,
|
67
|
+
messages: [
|
68
|
+
*@messages,
|
69
|
+
{
|
70
|
+
"role": "user",
|
71
|
+
"content": input
|
72
|
+
}
|
73
|
+
],
|
74
|
+
temperature: @temperature
|
75
|
+
}
|
76
|
+
end
|
77
|
+
|
78
|
+
def batch(inputs, post_processor_class)
|
79
|
+
file_content = inputs.map.with_index do |input, index|
|
80
|
+
{
|
81
|
+
"custom_id": "request-#{index + 1}",
|
82
|
+
"method": "POST",
|
83
|
+
"url": "/v1/chat/completions",
|
84
|
+
"body": to_request_body(input)
|
85
|
+
}
|
86
|
+
end
|
87
|
+
|
88
|
+
batch_instance = Batch.new(GptFunction.api_key)
|
89
|
+
batch_id = batch_instance.request(file_content)
|
90
|
+
puts "Batch created with ID: #{batch_id}"
|
91
|
+
|
92
|
+
# 創建 BatchRequest 並啟動 ProcessBatchJob
|
93
|
+
batch_request = BatchRequest.create(
|
94
|
+
batch_id: batch_id,
|
95
|
+
status: 'created',
|
96
|
+
total_request_counts: inputs.size,
|
97
|
+
completed_request_counts: 0,
|
98
|
+
post_processor_class: post_processor_class.to_s
|
99
|
+
)
|
100
|
+
ProcessBatchJob.perform_later(batch_request.id)
|
101
|
+
end
|
102
|
+
|
103
|
+
private
|
104
|
+
|
105
|
+
def send_request(input)
|
106
|
+
uri = URI.parse("https://api.openai.com/v1/chat/completions")
|
107
|
+
request = Net::HTTP::Post.new(uri)
|
108
|
+
request.content_type = "application/json"
|
109
|
+
request["Authorization"] = "Bearer #{GptFunction.api_key}"
|
110
|
+
request.body = to_request_body(input).to_json
|
111
|
+
|
112
|
+
req_options = {
|
113
|
+
use_ssl: uri.scheme == "https",
|
114
|
+
open_timeout: 60, # opening a connection timeout
|
115
|
+
read_timeout: 300 # reading one block of response timeout
|
116
|
+
}
|
117
|
+
|
118
|
+
Net::HTTP.start(uri.hostname, uri.port, req_options) do |http|
|
119
|
+
http.request(request)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
@@ -0,0 +1,250 @@
|
|
1
|
+
# lib/gpt_function/batch.rb
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require "net/http"
|
5
|
+
require "json"
|
6
|
+
require "byebug"
|
7
|
+
|
8
|
+
class GptFunction
|
9
|
+
class Batch
|
10
|
+
attr_reader :id
|
11
|
+
attr_reader :object
|
12
|
+
attr_reader :endpoint
|
13
|
+
attr_reader :errors
|
14
|
+
attr_reader :input_file_id
|
15
|
+
attr_reader :completion_window
|
16
|
+
attr_reader :status
|
17
|
+
attr_reader :output_file_id
|
18
|
+
attr_reader :error_file_id
|
19
|
+
attr_reader :created_at
|
20
|
+
attr_reader :in_progress_at
|
21
|
+
attr_reader :expires_at
|
22
|
+
attr_reader :finalizing_at
|
23
|
+
attr_reader :completed_at
|
24
|
+
attr_reader :failed_at
|
25
|
+
attr_reader :expired_at
|
26
|
+
attr_reader :cancelling_at
|
27
|
+
attr_reader :cancelled_at
|
28
|
+
|
29
|
+
attr_reader :request_counts_total
|
30
|
+
attr_reader :request_counts_completed
|
31
|
+
attr_reader :request_counts_failed
|
32
|
+
|
33
|
+
attr_reader :metadata_customer_id
|
34
|
+
attr_reader :metadata_batch_description
|
35
|
+
|
36
|
+
def initialize(hash)
|
37
|
+
@id = hash["id"]
|
38
|
+
@object = hash["object"]
|
39
|
+
@endpoint = hash["endpoint"]
|
40
|
+
@errors = hash["errors"]
|
41
|
+
@input_file_id = hash["input_file_id"]
|
42
|
+
@completion_window = hash["completion_window"]
|
43
|
+
@status = hash["status"]
|
44
|
+
@output_file_id = hash["output_file_id"]
|
45
|
+
@error_file_id = hash["error_file_id"]
|
46
|
+
@created_at = hash["created_at"]
|
47
|
+
@in_progress_at = hash["in_progress_at"]
|
48
|
+
@expires_at = hash["expires_at"]
|
49
|
+
@finalizing_at = hash["finalizing_at"]
|
50
|
+
@completed_at = hash["completed_at"]
|
51
|
+
@failed_at = hash["failed_at"]
|
52
|
+
@expired_at = hash["expired_at"]
|
53
|
+
@cancelling_at = hash["cancelling_at"]
|
54
|
+
@cancelled_at = hash["cancelled_at"]
|
55
|
+
|
56
|
+
@request_counts_total = hash.dig("request_counts", "total")
|
57
|
+
@request_counts_completed = hash.dig("request_counts", "completed")
|
58
|
+
@request_counts_failed = hash.dig("request_counts", "failed")
|
59
|
+
|
60
|
+
@metadata_customer_id = hash.dig("metadata", "customer_id")
|
61
|
+
@metadata_batch_description = hash.dig("metadata", "batch_description")
|
62
|
+
end
|
63
|
+
|
64
|
+
def to_hash
|
65
|
+
{
|
66
|
+
id: id,
|
67
|
+
object: object,
|
68
|
+
endpoint: endpoint,
|
69
|
+
errors: errors,
|
70
|
+
input_file_id: input_file_id,
|
71
|
+
completion_window: completion_window,
|
72
|
+
status: status,
|
73
|
+
output_file_id: output_file_id,
|
74
|
+
error_file_id: error_file_id,
|
75
|
+
created_at: created_at,
|
76
|
+
in_progress_at: in_progress_at,
|
77
|
+
expires_at: expires_at,
|
78
|
+
finalizing_at: finalizing_at,
|
79
|
+
completed_at: completed_at,
|
80
|
+
failed_at: failed_at,
|
81
|
+
expired_at: expired_at,
|
82
|
+
cancelling_at: cancelling_at,
|
83
|
+
cancelled_at: cancelled_at,
|
84
|
+
request_counts_total: request_counts_total,
|
85
|
+
request_counts_completed: request_counts_completed,
|
86
|
+
request_counts_failed: request_counts_failed,
|
87
|
+
metadata_customer_id: metadata_customer_id,
|
88
|
+
metadata_batch_description: metadata_batch_description,
|
89
|
+
}
|
90
|
+
end
|
91
|
+
|
92
|
+
def to_s
|
93
|
+
to_hash.to_json
|
94
|
+
end
|
95
|
+
|
96
|
+
def inspect
|
97
|
+
to_hash.to_json
|
98
|
+
end
|
99
|
+
|
100
|
+
def input_file
|
101
|
+
@input_file ||= File.from_id(input_file_id)
|
102
|
+
end
|
103
|
+
|
104
|
+
def output_file
|
105
|
+
@output_file ||= File.from_id(output_file_id)
|
106
|
+
end
|
107
|
+
|
108
|
+
def input_jsonl
|
109
|
+
@input_jsonl ||= input_file.jsonl
|
110
|
+
end
|
111
|
+
|
112
|
+
def output_jsonl
|
113
|
+
@output_jsonl ||= output_file.jsonl
|
114
|
+
end
|
115
|
+
|
116
|
+
def inputs
|
117
|
+
@inputs ||= input_jsonl.map do |hash|
|
118
|
+
{
|
119
|
+
"custom_id" => hash.dig("custom_id"),
|
120
|
+
"content" => hash.dig("body", "messages", -1, "content")
|
121
|
+
}
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
def outputs
|
126
|
+
@outputs ||= output_jsonl.map do |hash|
|
127
|
+
content = hash.dig("response", "body", "choices", 0, "message", "content")
|
128
|
+
content = JSON.parse(content)["output"] rescue content
|
129
|
+
{
|
130
|
+
"custom_id" => hash.dig("custom_id"),
|
131
|
+
"content" => content
|
132
|
+
}
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
def pairs
|
137
|
+
hash = {}
|
138
|
+
inputs.each do |input|
|
139
|
+
hash[input["custom_id"]] = {
|
140
|
+
"input" => input["content"],
|
141
|
+
}
|
142
|
+
end
|
143
|
+
outputs.each do |output|
|
144
|
+
hash[output["custom_id"]]["output"] = output["content"]
|
145
|
+
end
|
146
|
+
hash.values
|
147
|
+
end
|
148
|
+
|
149
|
+
def cancel
|
150
|
+
Batch.cancel(id)
|
151
|
+
end
|
152
|
+
|
153
|
+
class << self
|
154
|
+
def list(limit: 20, after: nil)
|
155
|
+
# 創建批次請求
|
156
|
+
uri = URI('https://api.openai.com/v1/batches')
|
157
|
+
request = Net::HTTP::Get.new(uri, 'Content-Type' => 'application/json')
|
158
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
159
|
+
|
160
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
161
|
+
http.request(request)
|
162
|
+
end
|
163
|
+
|
164
|
+
raise "Batch creation failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
165
|
+
|
166
|
+
body_hash = JSON.parse(response.body)
|
167
|
+
body_hash.dig("data").map do |hash|
|
168
|
+
Batch.new(hash)
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
172
|
+
def create(requests)
|
173
|
+
requests = requests.each_with_index.map do |request, index|
|
174
|
+
{
|
175
|
+
custom_id: "request-#{index + 1}",
|
176
|
+
method: "POST",
|
177
|
+
url: "/v1/chat/completions",
|
178
|
+
body: request,
|
179
|
+
}
|
180
|
+
end
|
181
|
+
|
182
|
+
# 上傳資料
|
183
|
+
file = File.create(requests)
|
184
|
+
|
185
|
+
# 創建批次請求
|
186
|
+
uri = URI('https://api.openai.com/v1/batches')
|
187
|
+
request = Net::HTTP::Post.new(uri, 'Content-Type' => 'application/json')
|
188
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
189
|
+
request.body = {
|
190
|
+
input_file_id: file.id,
|
191
|
+
endpoint: '/v1/chat/completions',
|
192
|
+
completion_window: '24h'
|
193
|
+
}.to_json
|
194
|
+
|
195
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
196
|
+
http.request(request)
|
197
|
+
end
|
198
|
+
|
199
|
+
raise "Batch creation failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
200
|
+
|
201
|
+
hash = JSON.parse(response.body)
|
202
|
+
Batch.new(hash)
|
203
|
+
rescue => e
|
204
|
+
file&.delete
|
205
|
+
raise e
|
206
|
+
end
|
207
|
+
|
208
|
+
def from_id(batch_id)
|
209
|
+
# 檢查批次狀態
|
210
|
+
uri = URI("https://api.openai.com/v1/batches/#{batch_id}")
|
211
|
+
request = Net::HTTP::Get.new(uri)
|
212
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
213
|
+
request['Content-Type'] = 'application/json'
|
214
|
+
|
215
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
216
|
+
http.request(request)
|
217
|
+
end
|
218
|
+
|
219
|
+
raise "Batch status check failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
220
|
+
|
221
|
+
hash = JSON.parse(response.body)
|
222
|
+
Batch.new(hash)
|
223
|
+
end
|
224
|
+
|
225
|
+
def cancel(batch_id)
|
226
|
+
uri = URI("https://api.openai.com/v1/batches/#{batch_id}/cancel")
|
227
|
+
request = Net::HTTP::Post.new(uri)
|
228
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
229
|
+
request['Content-Type'] = 'application/json'
|
230
|
+
|
231
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
232
|
+
http.request(request)
|
233
|
+
end
|
234
|
+
|
235
|
+
# {
|
236
|
+
# "error": {
|
237
|
+
# "message": "Cannot cancel a batch with status 'completed'.",
|
238
|
+
# "type": "invalid_request_error",
|
239
|
+
# "param": null,
|
240
|
+
# "code": null
|
241
|
+
# }
|
242
|
+
# }
|
243
|
+
raise "Batch cancel failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
244
|
+
|
245
|
+
response.body
|
246
|
+
end
|
247
|
+
|
248
|
+
end
|
249
|
+
end
|
250
|
+
end
|
@@ -0,0 +1,205 @@
|
|
1
|
+
# lib/gpt_function/batch.rb
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require "net/http"
|
5
|
+
require "json"
|
6
|
+
|
7
|
+
class GptFunction
|
8
|
+
class File
|
9
|
+
attr_reader :object
|
10
|
+
attr_reader :id
|
11
|
+
attr_reader :purpose
|
12
|
+
attr_reader :filename
|
13
|
+
attr_reader :bytes
|
14
|
+
attr_reader :created_at
|
15
|
+
attr_reader :status
|
16
|
+
attr_reader :status_details
|
17
|
+
|
18
|
+
def initialize(hash)
|
19
|
+
@object = hash["object"]
|
20
|
+
@id = hash["id"]
|
21
|
+
@purpose = hash["purpose"]
|
22
|
+
@filename = hash["filename"]
|
23
|
+
@bytes = hash["bytes"]
|
24
|
+
@created_at = hash["created_at"]
|
25
|
+
@status = hash["status"]
|
26
|
+
@status_details = hash["status_details"]
|
27
|
+
end
|
28
|
+
|
29
|
+
def to_hash
|
30
|
+
{
|
31
|
+
object: object,
|
32
|
+
id: id,
|
33
|
+
purpose: purpose,
|
34
|
+
filename: filename,
|
35
|
+
bytes: bytes,
|
36
|
+
created_at: created_at,
|
37
|
+
status: status,
|
38
|
+
status_details: status_details,
|
39
|
+
}
|
40
|
+
end
|
41
|
+
|
42
|
+
def content
|
43
|
+
File.content(id)
|
44
|
+
end
|
45
|
+
|
46
|
+
def jsonl
|
47
|
+
File.jsonl(id)
|
48
|
+
end
|
49
|
+
|
50
|
+
def delete
|
51
|
+
File.delete(id)
|
52
|
+
end
|
53
|
+
|
54
|
+
def to_s
|
55
|
+
to_hash.to_json
|
56
|
+
end
|
57
|
+
|
58
|
+
def inspect
|
59
|
+
to_hash.to_json
|
60
|
+
end
|
61
|
+
|
62
|
+
class << self
|
63
|
+
def list
|
64
|
+
uri = URI("https://api.openai.com/v1/files")
|
65
|
+
request = Net::HTTP::Get.new(uri)
|
66
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
67
|
+
|
68
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
69
|
+
http.request(request)
|
70
|
+
end
|
71
|
+
|
72
|
+
raise "File retrieval failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
73
|
+
|
74
|
+
# example response body
|
75
|
+
# {
|
76
|
+
# "object": "list",
|
77
|
+
# "data": [
|
78
|
+
# {
|
79
|
+
# "object": "file",
|
80
|
+
# "id": "file-uYu4HIFAoq0OeZDGBD5Ci8wL",
|
81
|
+
# "purpose": "batch_output",
|
82
|
+
# "filename": "batch_YMZbhJWcBYETMTfOfEf041qF_output.jsonl",
|
83
|
+
# "bytes": 1934,
|
84
|
+
# "created_at": 1722327874,
|
85
|
+
# "status": "processed",
|
86
|
+
# "status_details": null
|
87
|
+
# },
|
88
|
+
# {
|
89
|
+
# "object": "file",
|
90
|
+
# "id": "file-5AW0tCvRFKomu5s5G90yfWhs",
|
91
|
+
# "purpose": "batch",
|
92
|
+
# "filename": "batchinput.jsonl",
|
93
|
+
# "bytes": 728,
|
94
|
+
# "created_at": 1722327858,
|
95
|
+
# "status": "processed",
|
96
|
+
# "status_details": null
|
97
|
+
# },
|
98
|
+
# ]
|
99
|
+
# }
|
100
|
+
body_hash = JSON.parse(response.body)
|
101
|
+
body_hash.dig("data").map do |hash|
|
102
|
+
File.new(hash)
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
def create(hash_array)
|
107
|
+
# 將請求資料轉換為 JSONL 格式的字串
|
108
|
+
jsonl = hash_array.map(&:to_json).join("\n")
|
109
|
+
|
110
|
+
# 上傳資料
|
111
|
+
uri = URI('https://api.openai.com/v1/files')
|
112
|
+
request = Net::HTTP::Post.new(uri)
|
113
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
114
|
+
request['Content-Type'] = 'multipart/form-data'
|
115
|
+
|
116
|
+
# 創建 multipart form data
|
117
|
+
boundary = "CustomBoundary"
|
118
|
+
post_body = []
|
119
|
+
post_body << "--#{boundary}\r\n"
|
120
|
+
post_body << "Content-Disposition: form-data; name=\"purpose\"\r\n\r\n"
|
121
|
+
post_body << "batch\r\n"
|
122
|
+
post_body << "--#{boundary}\r\n"
|
123
|
+
post_body << "Content-Disposition: form-data; name=\"file\"; filename=\"batchinput.jsonl\"\r\n"
|
124
|
+
post_body << "Content-Type: application/json\r\n\r\n"
|
125
|
+
post_body << jsonl
|
126
|
+
post_body << "\r\n--#{boundary}--\r\n"
|
127
|
+
|
128
|
+
request.body = post_body.join
|
129
|
+
request['Content-Type'] = "multipart/form-data; boundary=#{boundary}"
|
130
|
+
|
131
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
132
|
+
http.request(request)
|
133
|
+
end
|
134
|
+
|
135
|
+
raise "File upload failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
136
|
+
|
137
|
+
hash = JSON.parse(response.body)
|
138
|
+
File.new(hash)
|
139
|
+
end
|
140
|
+
|
141
|
+
def from_id(file_id)
|
142
|
+
uri = URI("https://api.openai.com/v1/files/#{file_id}")
|
143
|
+
request = Net::HTTP::Get.new(uri)
|
144
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
145
|
+
|
146
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
147
|
+
http.request(request)
|
148
|
+
end
|
149
|
+
|
150
|
+
raise "File retrieval failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
151
|
+
|
152
|
+
hash = JSON.parse(response.body)
|
153
|
+
File.new(hash)
|
154
|
+
end
|
155
|
+
|
156
|
+
def content(file_id)
|
157
|
+
uri = URI("https://api.openai.com/v1/files/#{file_id}/content")
|
158
|
+
request = Net::HTTP::Get.new(uri)
|
159
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
160
|
+
|
161
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
162
|
+
http.request(request)
|
163
|
+
end
|
164
|
+
|
165
|
+
# {
|
166
|
+
# "error": {
|
167
|
+
# "message": "No such File object: #{file_id}",
|
168
|
+
# "type": "invalid_request_error",
|
169
|
+
# "param": "id",
|
170
|
+
# "code": null
|
171
|
+
# }
|
172
|
+
# }
|
173
|
+
raise "File retrieval failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
174
|
+
response.body
|
175
|
+
end
|
176
|
+
|
177
|
+
def jsonl(file_id)
|
178
|
+
content(file_id).split("\n").map { |line| JSON.parse(line) }
|
179
|
+
end
|
180
|
+
|
181
|
+
def delete(file_id)
|
182
|
+
uri = URI("https://api.openai.com/v1/files/#{file_id}")
|
183
|
+
request = Net::HTTP::Delete.new(uri)
|
184
|
+
request['Authorization'] = "Bearer #{GptFunction.api_key}"
|
185
|
+
|
186
|
+
response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true) do |http|
|
187
|
+
http.request(request)
|
188
|
+
end
|
189
|
+
|
190
|
+
# {
|
191
|
+
# "error": {
|
192
|
+
# "message": "No such File object: file-5m1Cn4M36GOfd7bEVAoTCmcC",
|
193
|
+
# "type": "invalid_request_error",
|
194
|
+
# "param": "id",
|
195
|
+
# "code": null
|
196
|
+
# }
|
197
|
+
# }
|
198
|
+
raise "File deletion failed: #{response.body}" unless response.is_a?(Net::HTTPSuccess)
|
199
|
+
|
200
|
+
# {"object"=>"file", "deleted"=>true, "id"=>"file-vsCH6lJkiFzi6gF9B8un3ZLT"}
|
201
|
+
JSON.parse(response.body)
|
202
|
+
end
|
203
|
+
end
|
204
|
+
end
|
205
|
+
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# 這是一個簡單的 GPT 函數類別
|
4
|
+
module GptFunctions
|
5
|
+
class << self
|
6
|
+
def 翻譯成中文
|
7
|
+
GptFunction.new("Translate into Taiwanese traditional Chinese", [%w[apple 蘋果]])
|
8
|
+
end
|
9
|
+
|
10
|
+
def 擷取關鍵字
|
11
|
+
GptFunction.new("Extract all keywords",
|
12
|
+
[
|
13
|
+
[
|
14
|
+
"臺灣最新5G網路覆蓋率達95%,推動智慧城市發展,領先亞洲多國",
|
15
|
+
["臺灣", "5G網路", "覆蓋率", "95%", "智慧城市", "發展", "領先", "亞洲", "多國"]
|
16
|
+
]
|
17
|
+
]
|
18
|
+
)
|
19
|
+
end
|
20
|
+
|
21
|
+
def 擷取文章標題
|
22
|
+
document = <<~DOCUMENT
|
23
|
+
今日頭條
|
24
|
+
科技日報|臺灣科技業最新突破,AI技術大躍進
|
25
|
+
科技日報
|
26
|
+
科技日報
|
27
|
+
2023-11-17
|
28
|
+
102
|
29
|
+
生活新聞|臺北市最新公共交通計畫公開
|
30
|
+
生活日報
|
31
|
+
生活日報
|
32
|
+
2023-11-16
|
33
|
+
89
|
34
|
+
健康專欄|最新研究:日常運動對心臟健康的重要性
|
35
|
+
健康雜誌
|
36
|
+
健康雜誌
|
37
|
+
2023-11-15
|
38
|
+
76
|
39
|
+
旅遊特輯|探索臺灣東部的隱藏美食與景點
|
40
|
+
旅遊週刊
|
41
|
+
旅遊週刊
|
42
|
+
2023-11-14
|
43
|
+
65
|
44
|
+
DOCUMENT
|
45
|
+
|
46
|
+
keywords = [
|
47
|
+
"科技日報|臺灣科技業最新突破,AI技術大躍進",
|
48
|
+
"生活新聞|臺北市最新公共交通計畫公開",
|
49
|
+
"健康專欄|最新研究:日常運動對心臟健康的重要性",
|
50
|
+
"旅遊特輯|探索臺灣東部的隱藏美食與景點"
|
51
|
+
]
|
52
|
+
|
53
|
+
GptFunction.new("Extract all titles", [[document, keywords]])
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
metadata
CHANGED
@@ -1,15 +1,29 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: gpt-function
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- etrex kuo
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
12
|
-
dependencies:
|
11
|
+
date: 2024-08-02 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: dotenv
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '0'
|
20
|
+
type: :development
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '0'
|
13
27
|
description: This gem allows users to create simple and complex GPT functions for
|
14
28
|
various applications such as translation and keyword extraction.
|
15
29
|
email:
|
@@ -28,8 +42,10 @@ files:
|
|
28
42
|
- gpt-function.gemspec
|
29
43
|
- lib/gpt-function.rb
|
30
44
|
- lib/gpt/function.rb
|
31
|
-
- lib/
|
32
|
-
- lib/
|
45
|
+
- lib/gpt_function/batch.rb
|
46
|
+
- lib/gpt_function/file.rb
|
47
|
+
- lib/gpt_function/version.rb
|
48
|
+
- lib/gpt_functions.rb
|
33
49
|
- workflows/main.yml
|
34
50
|
homepage: https://github.com/etrex/gpt-function
|
35
51
|
licenses:
|
@@ -52,7 +68,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
52
68
|
- !ruby/object:Gem::Version
|
53
69
|
version: '0'
|
54
70
|
requirements: []
|
55
|
-
rubygems_version: 3.
|
71
|
+
rubygems_version: 3.5.6
|
56
72
|
signing_key:
|
57
73
|
specification_version: 4
|
58
74
|
summary: A Ruby gem for creating simple GPT-based functions.
|
data/lib/gpt/function/version.rb
DELETED
data/lib/gpt/functions.rb
DELETED
@@ -1,57 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Gpt
|
4
|
-
# 這是一個簡單的 GPT 函數類別
|
5
|
-
module Functions
|
6
|
-
class << self
|
7
|
-
def 翻譯成中文(input)
|
8
|
-
Gpt::Function.new("Translate into Taiwanese traditional Chinese", [%w[apple 蘋果]]).call(input)
|
9
|
-
end
|
10
|
-
|
11
|
-
def 擷取關鍵字(input)
|
12
|
-
Gpt::Function.new("Extract all keywords",
|
13
|
-
[
|
14
|
-
[
|
15
|
-
"臺灣最新5G網路覆蓋率達95%,推動智慧城市發展,領先亞洲多國",
|
16
|
-
%w[臺灣 5G網路 覆蓋率 智慧城市 亞洲]
|
17
|
-
]
|
18
|
-
]).call(input)
|
19
|
-
end
|
20
|
-
|
21
|
-
def 擷取文章標題(input)
|
22
|
-
document = <<~DOCUMENT
|
23
|
-
今日頭條
|
24
|
-
科技日報|臺灣科技業最新突破,AI技術大躍進
|
25
|
-
科技日報
|
26
|
-
科技日報
|
27
|
-
2023-11-17
|
28
|
-
102
|
29
|
-
生活新聞|臺北市最新公共交通計畫公開
|
30
|
-
生活日報
|
31
|
-
生活日報
|
32
|
-
2023-11-16
|
33
|
-
89
|
34
|
-
健康專欄|最新研究:日常運動對心臟健康的重要性
|
35
|
-
健康雜誌
|
36
|
-
健康雜誌
|
37
|
-
2023-11-15
|
38
|
-
76
|
39
|
-
旅遊特輯|探索臺灣東部的隱藏美食與景點
|
40
|
-
旅遊週刊
|
41
|
-
旅遊週刊
|
42
|
-
2023-11-14
|
43
|
-
65
|
44
|
-
DOCUMENT
|
45
|
-
|
46
|
-
keywords = [
|
47
|
-
"科技日報|臺灣科技業最新突破,AI技術大躍進",
|
48
|
-
"生活新聞|臺北市最新公共交通計畫公開",
|
49
|
-
"健康專欄|最新研究:日常運動對心臟健康的重要性",
|
50
|
-
"旅遊特輯|探索臺灣東部的隱藏美食與景點"
|
51
|
-
]
|
52
|
-
|
53
|
-
Gpt::Function.new("Extract all titles", [[document, keywords]]).call(input)
|
54
|
-
end
|
55
|
-
end
|
56
|
-
end
|
57
|
-
end
|