copilot2gpt 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Dockerfile +3 -5
- data/Gemfile.lock +1 -1
- data/README.md +51 -20
- data/exe/copilot2gpt +0 -1
- data/lib/copilot2gpt/app.rb +130 -158
- data/lib/copilot2gpt/chat_request.rb +45 -0
- data/lib/copilot2gpt/version.rb +1 -1
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: da23dae0f4ea1932a9cfc08fb8fe479be3ef082bf505354d606506e3b27511b2
|
4
|
+
data.tar.gz: eca989b27b347b678a1f8013ac2ee18228c6e2acfef33f79e588ca68014c473d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2ea78b0aa79bd8cf5cb0d7297c9cfaabf58c4b4960fcf6ee96435df4e87c9c9b9e1a920c5f027c7fde0b4a6aa91f8b14c896c957d326ca8f59e5c0c84095c4b7
|
7
|
+
data.tar.gz: be6ec3f22729aea3d15ad35065b2612ceea3017021ba0c7c7e5385eae382200599847a836b9002878ca7938fd6edc2097e85a7d22abfce95c9704baad2acd1ec
|
data/Dockerfile
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
FROM
|
1
|
+
FROM rubylang/ruby:3.2
|
2
2
|
|
3
3
|
LABEL maintainer="Liu Xiang<liuxiang921@gmail.com>"
|
4
4
|
|
@@ -6,8 +6,6 @@ RUN gem install copilot2gpt
|
|
6
6
|
|
7
7
|
WORKDIR /
|
8
8
|
|
9
|
-
EXPOSE
|
9
|
+
EXPOSE 8080
|
10
10
|
|
11
|
-
ENTRYPOINT ["copilot2gpt"]
|
12
|
-
|
13
|
-
CMD ["-p", "4567"]
|
11
|
+
ENTRYPOINT ["copilot2gpt"]
|
data/Gemfile.lock
CHANGED
data/README.md
CHANGED
@@ -1,39 +1,70 @@
|
|
1
|
-
#
|
1
|
+
# Copilot 2 GPT-4
|
2
2
|
|
3
|
-
|
3
|
+
This is a Sinatra-based application that serves as a bridge between GitHub Copilot and GPT-4 model. It provides endpoints to interact with the GPT-4 model and handles authorization using GitHub tokens.
|
4
4
|
|
5
|
-
|
5
|
+
## Getting Started
|
6
6
|
|
7
|
-
|
7
|
+
These instructions will get you a copy of the project up and running on your local machine for development and testing purposes.
|
8
8
|
|
9
|
-
|
9
|
+
### Prerequisites
|
10
10
|
|
11
|
-
|
11
|
+
You need to have Ruby installed on your machine. You can check if you have Ruby installed by running:
|
12
12
|
|
13
|
-
|
13
|
+
```bash
|
14
|
+
ruby -v
|
15
|
+
```
|
14
16
|
|
15
|
-
|
17
|
+
### Installing
|
16
18
|
|
17
|
-
|
19
|
+
Clone the repository:
|
18
20
|
|
19
|
-
|
21
|
+
```bash
|
22
|
+
git clone https://github.com/lululau/copilot2gpt.git
|
23
|
+
cd copilot2gpt
|
24
|
+
```
|
20
25
|
|
21
|
-
|
26
|
+
Install the required gems:
|
22
27
|
|
23
|
-
|
28
|
+
```bash
|
29
|
+
bundle install
|
30
|
+
```
|
24
31
|
|
25
|
-
|
32
|
+
### Running the application
|
26
33
|
|
27
|
-
|
34
|
+
You can start the application by running:
|
28
35
|
|
29
|
-
|
36
|
+
```bash
|
37
|
+
bundle exec exe/copilot2gpt
|
38
|
+
```
|
30
39
|
|
31
|
-
|
40
|
+
The application will start on port 8080.
|
32
41
|
|
33
|
-
##
|
42
|
+
## API Endpoints
|
43
|
+
|
44
|
+
The application provides the following endpoints:
|
45
|
+
|
46
|
+
- `GET /openai/models`: Returns a list of available models.
|
47
|
+
- `POST /openai/chat/completions`: Mocks a Cloudflare AI Gateway API
|
48
|
+
- `POST /v1/chat/completions`: Completes a chat with the AI model.
|
49
|
+
|
50
|
+
## Docker
|
51
|
+
|
52
|
+
This application can also be run in a Docker container. Build the Docker image by running:
|
34
53
|
|
35
|
-
|
54
|
+
```bash
|
55
|
+
docker build -t your-image-name .
|
56
|
+
```
|
36
57
|
|
37
|
-
|
58
|
+
Then, you can start the application in a Docker container by running:
|
59
|
+
|
60
|
+
```bash
|
61
|
+
docker run -p 8080:8080 your-image-name
|
62
|
+
```
|
63
|
+
|
64
|
+
## Contributing
|
65
|
+
|
66
|
+
Please read [CONTRIBUTING.md](https://github.com/lululau/copilot2gpt/blob/main/CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests to us.
|
67
|
+
|
68
|
+
## License
|
38
69
|
|
39
|
-
|
70
|
+
This project is licensed under the MIT License - see the [LICENSE.md](https://github.com/lululau/copilot2gpt/blob/main/LICENSE.md) file for details.
|
data/exe/copilot2gpt
CHANGED
data/lib/copilot2gpt/app.rb
CHANGED
@@ -1,179 +1,151 @@
|
|
1
1
|
require 'copilot2gpt/token'
|
2
|
+
require 'copilot2gpt/chat_request'
|
2
3
|
require 'faraday'
|
3
4
|
require 'json'
|
4
5
|
require 'active_support/all'
|
6
|
+
require "sinatra/base"
|
5
7
|
|
6
|
-
|
7
|
-
|
8
|
-
before do
|
9
|
-
headers 'Access-Control-Allow-Origin' => '*',
|
10
|
-
'Access-Control-Allow-Methods' => ['OPTIONS', 'GET', 'POST'],
|
11
|
-
'Access-Control-Allow-Headers' => 'Content-Type'
|
12
|
-
halt 200 if request.request_method == 'OPTIONS'
|
13
|
-
@user_agent = request.env['HTTP_USER_AGENT'].to_s
|
14
|
-
@chatbox = @user_agent.include?('chatbox')
|
15
|
-
end
|
16
|
-
|
17
|
-
get('/openai/models') do
|
18
|
-
{
|
19
|
-
object: 'list',
|
20
|
-
data: [MODEL]
|
21
|
-
}.to_json
|
22
|
-
end
|
23
|
-
|
24
|
-
post('/openai/chat/completions') do
|
25
|
-
@mock_ai_gateway = true
|
26
|
-
complete
|
27
|
-
end
|
28
|
-
|
29
|
-
post('/v1/chat/completions') do
|
30
|
-
complete
|
31
|
-
end
|
32
|
-
|
33
|
-
def complete
|
34
|
-
github_token = request.env['HTTP_AUTHORIZATION'].to_s.sub('Bearer ', '')
|
35
|
-
if github_token.empty?
|
36
|
-
halt 401, {'Content-Type' => 'application/json'}, {:message => 'Unauthorized'}.to_json
|
37
|
-
end
|
38
|
-
@copilot_token = Copilot2gpt::Token.get_copilot_token(github_token)
|
39
|
-
content = params['content']
|
40
|
-
url = "https://api.githubcopilot.com/chat/completions"
|
41
|
-
chat_request = ChatRequest.with_default(content, JSON.parse(request.body.read, symbolize_names: true))
|
42
|
-
conn = Faraday.new(url: url)
|
43
|
-
|
44
|
-
if !chat_request.one_time_return
|
45
|
-
stream do |response_stream|
|
46
|
-
resp = conn.post do |req|
|
47
|
-
req.headers = build_headers(@copilot_token)
|
48
|
-
req.body = chat_request.to_json
|
49
|
-
buffered_line = ""
|
50
|
-
req.options.on_data = Proc.new do |chunk, overall_received_bytes, env|
|
51
|
-
chunk.each_line do |line|
|
52
|
-
line.chomp!
|
53
|
-
next unless line.present?
|
54
|
-
if line.start_with?("data: ")
|
55
|
-
buffered_line = line
|
56
|
-
message = JSON.parse(line.sub(/^data: /, '')) rescue next
|
57
|
-
else
|
58
|
-
buffered_line += line
|
59
|
-
message = JSON.parse(buffered_line.sub(/^data: /, '')) rescue next
|
60
|
-
end
|
61
|
-
message = message.with_indifferent_access
|
62
|
-
if @chatbox
|
63
|
-
message[:choices].select! do |choice|
|
64
|
-
choice.dig(:delta, :content)
|
65
|
-
end
|
66
|
-
next unless message[:choices].any?
|
67
|
-
end
|
68
|
-
if @mock_ai_gateway
|
69
|
-
message.merge!(object: "chat.completion.chunk", model: "gpt-4")
|
70
|
-
end
|
71
|
-
message_json = message.to_json + "\n\n"
|
72
|
-
message_json = "data: " + message_json unless @mock_ai_gateway
|
73
|
-
response_stream << message_json
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
8
|
+
module Copilot2GPT
|
9
|
+
class App < Sinatra::Base
|
77
10
|
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
11
|
+
MODEL = { "id": "gpt-4", "object": "model", "created": 1687882411, "owned_by": "openai" }
|
12
|
+
|
13
|
+
set :bind, '0.0.0.0'
|
14
|
+
set :port, 8080
|
15
|
+
|
16
|
+
before do
|
17
|
+
headers 'Access-Control-Allow-Origin' => '*',
|
18
|
+
'Access-Control-Allow-Methods' => ['OPTIONS', 'GET', 'POST'],
|
19
|
+
'Access-Control-Allow-Headers' => 'Content-Type'
|
20
|
+
halt 200 if request.request_method == 'OPTIONS'
|
21
|
+
@user_agent = request.env['HTTP_USER_AGENT'].to_s
|
22
|
+
@chatbox = @user_agent.include?('chatbox')
|
82
23
|
end
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
24
|
+
|
25
|
+
get('/openai/models') do
|
26
|
+
{
|
27
|
+
object: 'list',
|
28
|
+
data: [MODEL]
|
29
|
+
}.to_json
|
30
|
+
end
|
31
|
+
|
32
|
+
post('/openai/chat/completions') do
|
33
|
+
@mock_ai_gateway = true
|
34
|
+
complete(JSON.parse(request.body.read, symbolize_names: true))
|
87
35
|
end
|
88
36
|
|
89
|
-
|
90
|
-
|
91
|
-
return
|
37
|
+
post('/v1/chat/completions') do
|
38
|
+
complete(JSON.parse(request.body.read, symbolize_names: true))
|
92
39
|
end
|
93
40
|
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
41
|
+
post('/openai/chat/completions/no-stream') do
|
42
|
+
@mock_ai_gateway = true
|
43
|
+
complete(JSON.parse(request.body.read, symbolize_names: true).merge(stream: false))
|
44
|
+
end
|
45
|
+
|
46
|
+
def complete(args)
|
47
|
+
github_token = request.env['HTTP_AUTHORIZATION'].to_s.sub('Bearer ', '')
|
48
|
+
if github_token.empty?
|
49
|
+
halt 401, {'Content-Type' => 'application/json'}, {:message => 'Unauthorized'}.to_json
|
50
|
+
end
|
51
|
+
@copilot_token = Copilot2gpt::Token.get_copilot_token(github_token)
|
52
|
+
content = params['content']
|
53
|
+
url = "https://api.githubcopilot.com/chat/completions"
|
54
|
+
chat_request = Copilot2GPT::ChatRequest.with_default(content, args)
|
55
|
+
conn = Faraday.new(url: url)
|
56
|
+
|
57
|
+
if !chat_request.one_time_return
|
58
|
+
stream do |response_stream|
|
59
|
+
resp = conn.post do |req|
|
60
|
+
req.headers = build_headers(@copilot_token)
|
61
|
+
req.body = chat_request.to_json
|
62
|
+
buffered_line = ""
|
63
|
+
req.options.on_data = Proc.new do |chunk, overall_received_bytes, env|
|
64
|
+
chunk.each_line do |line|
|
65
|
+
line.chomp!
|
66
|
+
next unless line.present?
|
67
|
+
if line.start_with?("data: ")
|
68
|
+
buffered_line = line
|
69
|
+
message = JSON.parse(line.sub(/^data: /, '')) rescue next
|
70
|
+
else
|
71
|
+
buffered_line += line
|
72
|
+
message = JSON.parse(buffered_line.sub(/^data: /, '')) rescue next
|
73
|
+
end
|
74
|
+
message = message.with_indifferent_access
|
75
|
+
if @chatbox
|
76
|
+
message[:choices].select! do |choice|
|
77
|
+
choice.dig(:delta, :content)
|
78
|
+
end
|
79
|
+
next unless message[:choices].any?
|
80
|
+
end
|
81
|
+
if @mock_ai_gateway
|
82
|
+
message.merge!(object: "chat.completion.chunk", model: "gpt-4")
|
83
|
+
end
|
84
|
+
message_json = message.to_json + "\n\n"
|
85
|
+
message_json = "data: " + message_json unless @mock_ai_gateway
|
86
|
+
response_stream << message_json
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
if resp.status != 200
|
92
|
+
halt resp.status, {'Content-Type' => 'application/json'}, {:error => resp.body}.to_json
|
93
|
+
return
|
94
|
+
end
|
95
|
+
end
|
96
|
+
else
|
97
|
+
resp = conn.post do |req|
|
98
|
+
req.headers = build_headers(@copilot_token)
|
99
|
+
req.body = chat_request.to_json
|
100
|
+
end
|
101
|
+
|
102
|
+
if resp.status != 200
|
103
|
+
halt resp.status, {'Content-Type' => 'application/json'}, {:error => resp.body}.to_json
|
104
|
+
return
|
105
|
+
end
|
106
|
+
|
107
|
+
buffer = ""
|
108
|
+
resp.body.each_line do |line|
|
109
|
+
if line.start_with?("data: ")
|
110
|
+
data = line.sub("data: ", "")
|
111
|
+
obj = JSON.parse(data) rescue next
|
112
|
+
if obj.key?("choices") && obj["choices"].is_a?(Array) && !obj["choices"].empty?
|
113
|
+
choice = obj["choices"][0]
|
114
|
+
if choice.is_a?(Hash) && choice.key?("delta") && choice["delta"].is_a?(Hash)
|
115
|
+
delta = choice["delta"]
|
116
|
+
if delta.key?("content") && delta["content"].is_a?(String)
|
117
|
+
buffer += delta["content"]
|
118
|
+
end
|
119
|
+
end
|
105
120
|
end
|
106
121
|
end
|
107
122
|
end
|
123
|
+
return [200, {'Content-Type' => 'text/event-stream; charset=utf-8'}, buffer]
|
108
124
|
end
|
109
125
|
end
|
110
|
-
return [200, {'Content-Type' => 'text/event-stream; charset=utf-8'}, buffer]
|
111
|
-
end
|
112
|
-
end
|
113
|
-
|
114
126
|
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
def initialize(args)
|
119
|
-
@messages = args[:messages]
|
120
|
-
@model = args[:model]
|
121
|
-
@temperature = args[:temperature]
|
122
|
-
@top_p = args[:top_p]
|
123
|
-
@n = args[:n]
|
124
|
-
@stream = args[:stream]
|
125
|
-
@intent = args[:intent]
|
126
|
-
@one_time_return = args[:one_time_return]
|
127
|
-
end
|
128
|
-
|
129
|
-
def to_json
|
130
|
-
{
|
131
|
-
messages: @messages,
|
132
|
-
model: @model,
|
133
|
-
temperature: @temperature,
|
134
|
-
top_p: @top_p,
|
135
|
-
n: @n,
|
136
|
-
stream: @stream,
|
137
|
-
intent: @intent,
|
138
|
-
one_time_return: @one_time_return
|
139
|
-
}.to_json
|
140
|
-
end
|
127
|
+
def gen_hex_str(length)
|
128
|
+
SecureRandom.hex(length / 2)
|
129
|
+
end
|
141
130
|
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
131
|
+
def build_headers(copilot_token)
|
132
|
+
{
|
133
|
+
"Authorization" => "Bearer #{copilot_token}",
|
134
|
+
"X-Request-Id" => "#{gen_hex_str(8)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(12)}",
|
135
|
+
"Vscode-Sessionid" => "#{gen_hex_str(8)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(25)}",
|
136
|
+
"Vscode-Machineid" => gen_hex_str(64),
|
137
|
+
"Editor-Version" => "vscode/1.83.1",
|
138
|
+
"Editor-Plugin-Version" => "copilot-chat/0.8.0",
|
139
|
+
"Openai-Organization" => "github-copilot",
|
140
|
+
"Openai-Intent" => "conversation-panel",
|
141
|
+
"Content-Type" => "text/event-stream; charset=utf-8",
|
142
|
+
"User-Agent" => "GitHubCopilotChat/0.8.0",
|
143
|
+
"Accept" => "*/*",
|
144
|
+
"Accept-Encoding" => "gzip,deflate,br",
|
145
|
+
"Connection" => "close"
|
146
|
+
}
|
155
147
|
end
|
148
|
+
|
149
|
+
run!
|
156
150
|
end
|
157
|
-
end
|
158
|
-
|
159
|
-
def gen_hex_str(length)
|
160
|
-
SecureRandom.hex(length / 2)
|
161
|
-
end
|
162
|
-
|
163
|
-
def build_headers(copilot_token)
|
164
|
-
{
|
165
|
-
"Authorization" => "Bearer #{copilot_token}",
|
166
|
-
"X-Request-Id" => "#{gen_hex_str(8)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(12)}",
|
167
|
-
"Vscode-Sessionid" => "#{gen_hex_str(8)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(4)}-#{gen_hex_str(25)}",
|
168
|
-
"Vscode-Machineid" => gen_hex_str(64),
|
169
|
-
"Editor-Version" => "vscode/1.83.1",
|
170
|
-
"Editor-Plugin-Version" => "copilot-chat/0.8.0",
|
171
|
-
"Openai-Organization" => "github-copilot",
|
172
|
-
"Openai-Intent" => "conversation-panel",
|
173
|
-
"Content-Type" => "text/event-stream; charset=utf-8",
|
174
|
-
"User-Agent" => "GitHubCopilotChat/0.8.0",
|
175
|
-
"Accept" => "*/*",
|
176
|
-
"Accept-Encoding" => "gzip,deflate,br",
|
177
|
-
"Connection" => "close"
|
178
|
-
}
|
179
151
|
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
module Copilot2GPT
|
2
|
+
class ChatRequest
|
3
|
+
attr_accessor :messages, :model, :temperature, :top_p, :n, :stream, :intent, :one_time_return
|
4
|
+
|
5
|
+
def initialize(args)
|
6
|
+
@messages = args[:messages]
|
7
|
+
@model = args[:model]
|
8
|
+
@temperature = args[:temperature]
|
9
|
+
@top_p = args[:top_p]
|
10
|
+
@n = args[:n]
|
11
|
+
@stream = args[:stream]
|
12
|
+
@intent = args[:intent]
|
13
|
+
@one_time_return = args[:one_time_return]
|
14
|
+
end
|
15
|
+
|
16
|
+
def to_json
|
17
|
+
{
|
18
|
+
messages: @messages,
|
19
|
+
model: @model,
|
20
|
+
temperature: @temperature,
|
21
|
+
top_p: @top_p,
|
22
|
+
n: @n,
|
23
|
+
stream: @stream,
|
24
|
+
intent: @intent,
|
25
|
+
one_time_return: @one_time_return
|
26
|
+
}.to_json
|
27
|
+
end
|
28
|
+
|
29
|
+
class << self
|
30
|
+
def with_default(content, params)
|
31
|
+
default = {
|
32
|
+
messages: [
|
33
|
+
{"role" => "system", "content" => "\nYou are ChatGPT, a large language model trained by OpenAI.\nKnowledge cutoff: 2021-09\nCurrent model: gpt-4\nCurrent time: 2023/11/7 11: 39: 14\n"},
|
34
|
+
{"role" => "user", "content" => content}
|
35
|
+
],
|
36
|
+
model: "gpt-4", temperature: 0.5,
|
37
|
+
top_p: 1, n: 1,
|
38
|
+
stream: true, intent: true,
|
39
|
+
one_time_return: false
|
40
|
+
}.merge(params)
|
41
|
+
new(default)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
data/lib/copilot2gpt/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: copilot2gpt
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Liu Xiang
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-12-
|
11
|
+
date: 2023-12-30 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: sinatra
|
@@ -86,6 +86,7 @@ files:
|
|
86
86
|
- exe/copilot2gpt
|
87
87
|
- lib/copilot2gpt.rb
|
88
88
|
- lib/copilot2gpt/app.rb
|
89
|
+
- lib/copilot2gpt/chat_request.rb
|
89
90
|
- lib/copilot2gpt/test.rb
|
90
91
|
- lib/copilot2gpt/token.rb
|
91
92
|
- lib/copilot2gpt/version.rb
|