copilot2gpt 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 32606e9a7161ded307d99a403172d093d146430c4babbab94ae676954df475fa
4
- data.tar.gz: 3d437a887449e4ae990680b1ccd727725b6aea6050e258b670ef0b65afad6d54
3
+ metadata.gz: 2b2b16e1ad8aee613de39b65d97dc975098a35f6ef010ef4a2a48149b34973f5
4
+ data.tar.gz: c74ff1d7a85c3dcc89673aeed18bc26774951754ce60f65871998b15a1879ac3
5
5
  SHA512:
6
- metadata.gz: d3ca1d79ef92340459b286c8a4cca4dd29f8ba120238989d0ba2c59f5f6afa49a77d25e17a02ad3813338ab299fd07e3548875cfc8ce7e001e7e1fdbbeb0ca8f
7
- data.tar.gz: 5a408834c49b8349d4a12b765d28e79033f8e7cd9894c8454f93a42663c733df84e424e5e2663a7bae55caf419d846732c025edb5f8912f0f71f8a4e0d9e601b
6
+ metadata.gz: 25b146a0196c5f369ab81995deb4bd164319e450f362652f135315b95c126f1967ed85873c654e796155cb63293a3ba76747c601fbcc6fe4a2d97d3bd49a3b62
7
+ data.tar.gz: ddefe77421e5c3558cbd9185e9e82c1706714745a0dda727194028c872029fec3633de647f5886cac4c9ee4fd699a11d3ae40ab70033f54aee6619bb2b7f8c23
data/Dockerfile CHANGED
@@ -6,8 +6,6 @@ RUN gem install copilot2gpt
6
6
 
7
7
  WORKDIR /
8
8
 
9
- EXPOSE 4567
9
+ EXPOSE 8080
10
10
 
11
- ENTRYPOINT ["copilot2gpt"]
12
-
13
- CMD ["-p", "4567"]
11
+ ENTRYPOINT ["copilot2gpt"]
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- copilot2gpt (0.1.0)
4
+ copilot2gpt (0.1.1)
5
5
  activesupport (~> 7.0.0)
6
6
  faraday (~> 2.8.1)
7
7
  puma (~> 6.3.0)
data/exe/copilot2gpt CHANGED
@@ -1,6 +1,5 @@
1
1
  #!/usr/bin/env ruby
2
2
  # frozen_string_literal: true
3
3
 
4
- require "sinatra"
5
4
  require "copilot2gpt"
6
5
  require "copilot2gpt/app"
@@ -1,179 +1,146 @@
1
1
  require 'copilot2gpt/token'
2
+ require 'copilot2gpt/chat_request'
2
3
  require 'faraday'
3
4
  require 'json'
4
5
  require 'active_support/all'
6
+ require "sinatra/base"
5
7
 
6
- MODEL = { "id": "gpt-4", "object": "model", "created": 1687882411, "owned_by": "openai" }
8
+ module Copilot2GPT
9
+ class App < Sinatra::Base
7
10
 
8
- before do
9
- headers 'Access-Control-Allow-Origin' => '*',
10
- 'Access-Control-Allow-Methods' => ['OPTIONS', 'GET', 'POST'],
11
- 'Access-Control-Allow-Headers' => 'Content-Type'
12
- halt 200 if request.request_method == 'OPTIONS'
13
- @user_agent = request.env['HTTP_USER_AGENT'].to_s
14
- @chatbox = @user_agent.include?('chatbox')
15
- end
11
+ MODEL = { "id": "gpt-4", "object": "model", "created": 1687882411, "owned_by": "openai" }
16
12
 
17
- get('/openai/models') do
18
- {
19
- object: 'list',
20
- data: [MODEL]
21
- }.to_json
22
- end
13
+ set :bind, '0.0.0.0'
14
+ set :port, 8080
23
15
 
24
- post('/openai/chat/completions') do
25
- @mock_ai_gateway = true
26
- complete
27
- end
16
+ before do
17
+ headers 'Access-Control-Allow-Origin' => '*',
18
+ 'Access-Control-Allow-Methods' => ['OPTIONS', 'GET', 'POST'],
19
+ 'Access-Control-Allow-Headers' => 'Content-Type'
20
+ halt 200 if request.request_method == 'OPTIONS'
21
+ @user_agent = request.env['HTTP_USER_AGENT'].to_s
22
+ @chatbox = @user_agent.include?('chatbox')
23
+ end
28
24
 
29
- post('/v1/chat/completions') do
30
- complete
31
- end
25
+ get('/openai/models') do
26
+ {
27
+ object: 'list',
28
+ data: [MODEL]
29
+ }.to_json
30
+ end
32
31
 
33
- def complete
34
- github_token = request.env['HTTP_AUTHORIZATION'].to_s.sub('Bearer ', '')
35
- if github_token.empty?
36
- halt 401, {'Content-Type' => 'application/json'}, {:message => 'Unauthorized'}.to_json
37
- end
38
- @copilot_token = Copilot2gpt::Token.get_copilot_token(github_token)
39
- content = params['content']
40
- url = "https://api.githubcopilot.com/chat/completions"
41
- chat_request = ChatRequest.with_default(content, JSON.parse(request.body.read, symbolize_names: true))
42
- conn = Faraday.new(url: url)
32
+ post('/openai/chat/completions') do
33
+ @mock_ai_gateway = true
34
+ complete
35
+ end
43
36
 
44
- if !chat_request.one_time_return
45
- stream do |response_stream|
46
- resp = conn.post do |req|
47
- req.headers = build_headers(@copilot_token)
48
- req.body = chat_request.to_json
49
- buffered_line = ""
50
- req.options.on_data = Proc.new do |chunk, overall_received_bytes, env|
51
- chunk.each_line do |line|
52
- line.chomp!
53
- next unless line.present?
54
- if line.start_with?("data: ")
55
- buffered_line = line
56
- message = JSON.parse(line.sub(/^data: /, '')) rescue next
57
- else
58
- buffered_line += line
59
- message = JSON.parse(buffered_line.sub(/^data: /, '')) rescue next
60
- end
61
- message = message.with_indifferent_access
62
- if @chatbox
63
- message[:choices].select! do |choice|
64
- choice.dig(:delta, :content)
37
+ post('/v1/chat/completions') do
38
+ complete
39
+ end
40
+
41
+ def complete
42
+ github_token = request.env['HTTP_AUTHORIZATION'].to_s.sub('Bearer ', '')
43
+ if github_token.empty?
44
+ halt 401, {'Content-Type' => 'application/json'}, {:message => 'Unauthorized'}.to_json
45
+ end
46
+ @copilot_token = Copilot2gpt::Token.get_copilot_token(github_token)
47
+ content = params['content']
48
+ url = "https://api.githubcopilot.com/chat/completions"
49
+ chat_request = Copilot2GPT::ChatRequest.with_default(content, JSON.parse(request.body.read, symbolize_names: true))
50
+ conn = Faraday.new(url: url)
51
+
52
+ if !chat_request.one_time_return
53
+ stream do |response_stream|
54
+ resp = conn.post do |req|
55
+ req.headers = build_headers(@copilot_token)
56
+ req.body = chat_request.to_json
57
+ buffered_line = ""
58
+ req.options.on_data = Proc.new do |chunk, overall_received_bytes, env|
59
+ chunk.each_line do |line|
60
+ line.chomp!
61
+ next unless line.present?
62
+ if line.start_with?("data: ")
63
+ buffered_line = line
64
+ message = JSON.parse(line.sub(/^data: /, '')) rescue next
65
+ else
66
+ buffered_line += line
67
+ message = JSON.parse(buffered_line.sub(/^data: /, '')) rescue next
68
+ end
69
+ message = message.with_indifferent_access
70
+ if @chatbox
71
+ message[:choices].select! do |choice|
72
+ choice.dig(:delta, :content)
73
+ end
74
+ next unless message[:choices].any?
75
+ end
76
+ if @mock_ai_gateway
77
+ message.merge!(object: "chat.completion.chunk", model: "gpt-4")
78
+ end
79
+ message_json = message.to_json + "\n\n"
80
+ message_json = "data: " + message_json unless @mock_ai_gateway
81
+ response_stream << message_json
65
82
  end
66
- next unless message[:choices].any?
67
83
  end
68
- if @mock_ai_gateway
69
- message.merge!(object: "chat.completion.chunk", model: "gpt-4")
70
- end
71
- message_json = message.to_json + "\n\n"
72
- message_json = "data: " + message_json unless @mock_ai_gateway
73
- response_stream << message_json
74
84
  end
75
- end
76
- end
77
85
 
78
- if resp.status != 200
79
- halt resp.status, {'Content-Type' => 'application/json'}, {:error => resp.body}.to_json
80
- return
81
- end
82
- end
83
- else
84
- resp = conn.post do |req|
85
- req.headers = build_headers(@copilot_token)
86
- req.body = chat_request.to_json
87
- end
86
+ if resp.status != 200
87
+ halt resp.status, {'Content-Type' => 'application/json'}, {:error => resp.body}.to_json
88
+ return
89
+ end
90
+ end
91
+ else
92
+ resp = conn.post do |req|
93
+ req.headers = build_headers(@copilot_token)
94
+ req.body = chat_request.to_json
95
+ end
88
96
 
89
- if resp.status != 200
90
- halt resp.status, {'Content-Type' => 'application/json'}, {:error => resp.body}.to_json
91
- return
92
- end
97
+ if resp.status != 200
98
+ halt resp.status, {'Content-Type' => 'application/json'}, {:error => resp.body}.to_json
99
+ return
100
+ end
93
101
 
94
- buffer = ""
95
- res.body.each_line do |line|
96
- if line.start_with?("data: ")
97
- data = line.sub("data: ", "")
98
- obj = JSON.parse(data)
99
- if obj.key?("choices") && obj["choices"].is_a?(Array) && !obj["choices"].empty?
100
- choice = obj["choices"][0]
101
- if choice.is_a?(Hash) && choice.key?("delta") && choice["delta"].is_a?(Hash)
102
- delta = choice["delta"]
103
- if delta.key?("content") && delta["content"].is_a?(String)
104
- buffer += delta["content"]
102
+ buffer = ""
103
+ res.body.each_line do |line|
104
+ if line.start_with?("data: ")
105
+ data = line.sub("data: ", "")
106
+ obj = JSON.parse(data)
107
+ if obj.key?("choices") && obj["choices"].is_a?(Array) && !obj["choices"].empty?
108
+ choice = obj["choices"][0]
109
+ if choice.is_a?(Hash) && choice.key?("delta") && choice["delta"].is_a?(Hash)
110
+ delta = choice["delta"]
111
+ if delta.key?("content") && delta["content"].is_a?(String)
112
+ buffer += delta["content"]
113
+ end
114
+ end
105
115
  end
106
116
  end
107
117
  end
118
+ return [200, {'Content-Type' => 'text/event-stream; charset=utf-8'}, buffer]
108
119
  end
109
120
  end
110
- return [200, {'Content-Type' => 'text/event-stream; charset=utf-8'}, buffer]
111
- end
112
- end
113
-
114
-
115
- class ChatRequest
116
- attr_accessor :messages, :model, :temperature, :top_p, :n, :stream, :intent, :one_time_return
117
121
 
118
- def initialize(args)
119
- @messages = args[:messages]
120
- @model = args[:model]
121
- @temperature = args[:temperature]
122
- @top_p = args[:top_p]
123
- @n = args[:n]
124
- @stream = args[:stream]
125
- @intent = args[:intent]
126
- @one_time_return = args[:one_time_return]
127
- end
128
-
129
- def to_json
130
- {
131
- messages: @messages,
132
- model: @model,
133
- temperature: @temperature,
134
- top_p: @top_p,
135
- n: @n,
136
- stream: @stream,
137
- intent: @intent,
138
- one_time_return: @one_time_return
139
- }.to_json
140
- end
141
-
142
- class << self
143
- def with_default(content, params)
144
- default = {
145
- messages: [
146
- {"role" => "system", "content" => "\nYou are ChatGPT, a large language model trained by OpenAI.\nKnowledge cutoff: 2021-09\nCurrent model: gpt-4\nCurrent time: 2023/11/7 11: 39: 14\n"},
147
- {"role" => "user", "content" => content}
148
- ],
149
- model: "gpt-4", temperature: 0.5,
150
- top_p: 1, n: 1,
151
- stream: true, intent: true,
152
- one_time_return: false
153
- }.merge(params)
154
- new(default)
122
+ def gen_hex_str(length)
123
+ SecureRandom.hex(length / 2)
155
124
  end
156
- end
157
- end
158
125
 
159
- def gen_hex_str(length)
160
- SecureRandom.hex(length / 2)
161
- end
126
+ def build_headers(copilot_token)
127
+ {
128
+ "Authorization" => "Bearer #{copilot_token}",
129
+ "X-Request-Id" => "#{gen_hex_str(8)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(12)}",
130
+ "Vscode-Sessionid" => "#{gen_hex_str(8)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(25)}",
131
+ "Vscode-Machineid" => gen_hex_str(64),
132
+ "Editor-Version" => "vscode/1.83.1",
133
+ "Editor-Plugin-Version" => "copilot-chat/0.8.0",
134
+ "Openai-Organization" => "github-copilot",
135
+ "Openai-Intent" => "conversation-panel",
136
+ "Content-Type" => "text/event-stream; charset=utf-8",
137
+ "User-Agent" => "GitHubCopilotChat/0.8.0",
138
+ "Accept" => "*/*",
139
+ "Accept-Encoding" => "gzip,deflate,br",
140
+ "Connection" => "close"
141
+ }
142
+ end
162
143
 
163
- def build_headers(copilot_token)
164
- {
165
- "Authorization" => "Bearer #{copilot_token}",
166
- "X-Request-Id" => "#{gen_hex_str(8)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(12)}",
167
- "Vscode-Sessionid" => "#{gen_hex_str(8)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(25)}",
168
- "Vscode-Machineid" => gen_hex_str(64),
169
- "Editor-Version" => "vscode/1.83.1",
170
- "Editor-Plugin-Version" => "copilot-chat/0.8.0",
171
- "Openai-Organization" => "github-copilot",
172
- "Openai-Intent" => "conversation-panel",
173
- "Content-Type" => "text/event-stream; charset=utf-8",
174
- "User-Agent" => "GitHubCopilotChat/0.8.0",
175
- "Accept" => "*/*",
176
- "Accept-Encoding" => "gzip,deflate,br",
177
- "Connection" => "close"
178
- }
144
+ run!
145
+ end
179
146
  end
@@ -0,0 +1,45 @@
1
+ module Copilot2GPT
2
+ class ChatRequest
3
+ attr_accessor :messages, :model, :temperature, :top_p, :n, :stream, :intent, :one_time_return
4
+
5
+ def initialize(args)
6
+ @messages = args[:messages]
7
+ @model = args[:model]
8
+ @temperature = args[:temperature]
9
+ @top_p = args[:top_p]
10
+ @n = args[:n]
11
+ @stream = args[:stream]
12
+ @intent = args[:intent]
13
+ @one_time_return = args[:one_time_return]
14
+ end
15
+
16
+ def to_json
17
+ {
18
+ messages: @messages,
19
+ model: @model,
20
+ temperature: @temperature,
21
+ top_p: @top_p,
22
+ n: @n,
23
+ stream: @stream,
24
+ intent: @intent,
25
+ one_time_return: @one_time_return
26
+ }.to_json
27
+ end
28
+
29
+ class << self
30
+ def with_default(content, params)
31
+ default = {
32
+ messages: [
33
+ {"role" => "system", "content" => "\nYou are ChatGPT, a large language model trained by OpenAI.\nKnowledge cutoff: 2021-09\nCurrent model: gpt-4\nCurrent time: 2023/11/7 11: 39: 14\n"},
34
+ {"role" => "user", "content" => content}
35
+ ],
36
+ model: "gpt-4", temperature: 0.5,
37
+ top_p: 1, n: 1,
38
+ stream: true, intent: true,
39
+ one_time_return: false
40
+ }.merge(params)
41
+ new(default)
42
+ end
43
+ end
44
+ end
45
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Copilot2gpt
4
- VERSION = "0.1.0"
4
+ VERSION = "0.1.1"
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: copilot2gpt
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Liu Xiang
@@ -86,6 +86,7 @@ files:
86
86
  - exe/copilot2gpt
87
87
  - lib/copilot2gpt.rb
88
88
  - lib/copilot2gpt/app.rb
89
+ - lib/copilot2gpt/chat_request.rb
89
90
  - lib/copilot2gpt/test.rb
90
91
  - lib/copilot2gpt/token.rb
91
92
  - lib/copilot2gpt/version.rb