n2b 0.5.0 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22,20 +22,23 @@ module N2M
22
22
  model_name
23
23
  end
24
24
 
25
- def make_request(content)
25
+ def make_request(content, expect_json: true)
26
26
  request = Net::HTTP::Post.new(API_URI)
27
27
  request.content_type = 'application/json'
28
28
  request['Authorization'] = "Bearer #{@config['access_key']}"
29
29
 
30
- request.body = JSON.dump({
30
+ body_hash = {
31
31
  "model" => get_model_name,
32
- response_format: { type: 'json_object' },
33
32
  "messages" => [
34
33
  {
35
34
  "role" => "user",
36
35
  "content" => content
37
- }]
38
- })
36
+ }
37
+ ]
38
+ }
39
+ body_hash["response_format"] = { "type" => "json_object" } if expect_json
40
+
41
+ request.body = JSON.dump(body_hash)
39
42
 
40
43
  response = Net::HTTP.start(API_URI.hostname, API_URI.port, use_ssl: true) do |http|
41
44
  http.request(request)
@@ -46,34 +49,38 @@ module N2M
46
49
  raise N2B::LlmApiError.new("LLM API Error: #{response.code} #{response.message} - #{response.body}")
47
50
  end
48
51
  answer = JSON.parse(response.body)['choices'].first['message']['content']
49
- begin
50
- # remove everything before the first { and after the last }
51
- answer = answer.sub(/.*\{(.*)\}.*/m, '{\1}') unless answer.start_with?('{')
52
- answer = JSON.parse(answer)
53
- rescue JSON::ParserError
54
- answer = { 'commands' => answer.split("\n"), explanation: answer }
52
+ if expect_json
53
+ begin
54
+ # remove everything before the first { and after the last }
55
+ answer = answer.sub(/.*\{(.*)\}.*/m, '{\1}') unless answer.start_with?('{')
56
+ answer = JSON.parse(answer)
57
+ rescue JSON::ParserError
58
+ answer = { 'commands' => answer.split("\n"), explanation: answer }
59
+ end
55
60
  end
56
61
  answer
57
62
  end
58
63
 
59
- def analyze_code_diff(prompt_content)
64
+ def analyze_code_diff(prompt_content, expect_json: true)
60
65
  # This method assumes prompt_content is the full, ready-to-send prompt
61
66
  # including all instructions for the LLM (system message, diff, user additions, JSON format).
62
67
  request = Net::HTTP::Post.new(API_URI)
63
68
  request.content_type = 'application/json'
64
69
  request['Authorization'] = "Bearer #{@config['access_key']}"
65
70
 
66
- request.body = JSON.dump({
71
+ body_hash = {
67
72
  "model" => get_model_name,
68
- "response_format" => { "type" => "json_object" }, # Crucial for OpenAI to return JSON
69
73
  "messages" => [
70
74
  {
71
- "role" => "user", # The entire prompt is passed as a single user message
75
+ "role" => "user",
72
76
  "content" => prompt_content
73
77
  }
74
78
  ],
75
- "max_tokens" => @config['max_tokens'] || 1500 # Allow overriding, ensure it's enough for JSON
76
- })
79
+ "max_tokens" => @config['max_tokens'] || 1500
80
+ }
81
+ body_hash["response_format"] = { "type" => "json_object" } if expect_json
82
+
83
+ request.body = JSON.dump(body_hash)
77
84
 
78
85
  response = Net::HTTP.start(API_URI.hostname, API_URI.port, use_ssl: true) do |http|
79
86
  http.request(request)
@@ -83,9 +90,8 @@ module N2M
83
90
  raise N2B::LlmApiError.new("LLM API Error: #{response.code} #{response.message} - #{response.body}")
84
91
  end
85
92
 
86
- # Return the raw JSON string. CLI's call_llm_for_diff_analysis will handle parsing.
87
- # OpenAI with json_object mode should return the JSON directly in 'choices'.first.message.content
88
- JSON.parse(response.body)['choices'].first['message']['content']
93
+ answer = JSON.parse(response.body)['choices'].first['message']['content']
94
+ answer
89
95
  end
90
96
  end
91
97
  end