clag 0.0.3 → 0.0.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f35aebcc5178b31dd6c9b83463a433ed363538234795557ef5b57d02608e1478
4
- data.tar.gz: 9179d6d67b9d36263e9568d8e915f13a249fbe8103bce529ecf8ecd86f5eff04
3
+ metadata.gz: 7567d9817e4d7abc1c6da5ab57e36b99945fe64d3cb9d361d86c2c712e5e41af
4
+ data.tar.gz: 5425fc8a75d9267aaf341e4b13d82f76dc7dea19549fd2686d84bc667653f5f8
5
5
  SHA512:
6
- metadata.gz: 673ac864ebd2e5e2b008811f10b69f2d2a4fe053dc2aaa72f66ac1057e79e4871e81af729ee59869a5f32b0f968c68e808388a25be666d75e161379d39fa8422
7
- data.tar.gz: 303d55dce5f0780aa227719327ec550f64a710be8e7af978065971f5e03d4a59ffdee5cc91373eb0959f67c817f5cbe301c92c890a9ffafa7f6aaa8223847c99
6
+ metadata.gz: 8b629c28ad455cc20af1fe172c9dd774f88a980335c5f127fe7ebe6a624688f1ff004103261baedd252a7832beea01be378a2f1995e8c3c55cae7c1522355eb6
7
+ data.tar.gz: e2327b5d9f0625b0ddf0d084f40372e677d2952becd2ba7f0271ccbc8e234a0dd1e35a0231d2223c31ebdca881409c4d46cad8d72cc05b7dd23e1b0aea76e202
data/README.md CHANGED
@@ -30,6 +30,32 @@ command with the help of an LLM!
30
30
  * Select Gemini as your preferred LLM by setting CLAG\_LLM=gemini in your
31
31
  environment
32
32
 
33
+ ### Using Anthropic's Claude 3 Opus
34
+
35
+ * Get an API key from Anthropic at https://www.anthropic.com/
36
+
37
+ * Set your API key as ANTHROPIC\_API\_KEY in your environment
38
+
39
+ * Select Claude 3 Opus as your preferred LLM by setting CLAG\_LLM=claude in
40
+ your environment
41
+
42
+ ### Using Groq on Mixtral
43
+
44
+ * Get an API key from https://console.groq.com/
45
+
46
+ * Set your API key as GROQ\_API\_KEY in your environment
47
+
48
+ * Select Groq as your preferred LLM by setting CLAG\_LLM=groq in your environment
49
+
50
+ ### Using a Local Model
51
+
52
+ * Have a model locally from either Ollama or Llamafile with an OpenAI compatible
53
+ API
54
+
55
+ * Have the API server running on port 8080
56
+
57
+ * Select local as your preferred LLM by setting CLAG\_LLM=local in your environment
58
+
33
59
  ## Usage
34
60
 
35
61
  Currently support one command: "g".
data/lib/clag/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Clag
2
- VERSION = '0.0.3'
2
+ VERSION = '0.0.5'
3
3
  end
@@ -23,12 +23,110 @@ module Sublayer
23
23
  generate_with_gemini
24
24
  when "claude"
25
25
  generate_with_claude
26
+ when "groq"
27
+ generate_with_groq
28
+ when "local"
29
+ generate_with_local_model
26
30
  else
27
31
  generate_with_openai
28
32
  end
29
33
  end
30
34
 
31
35
  private
36
+
37
+ def generate_with_local_model
38
+ system_prompt = <<-PROMPT
39
+ In this environment you have access to a set of tools you can use to answer the user's question.
40
+
41
+ You may call them like this:
42
+ <function_calls>
43
+ <invoke>
44
+ <tool_name>$TOOL_NAME</tool_name>
45
+ <parameters>
46
+ <command>value</command>
47
+ ...
48
+ </parameters>
49
+ </invoke>
50
+ </function_calls>
51
+
52
+ Here are the tools available:
53
+ <tools>
54
+ #{self.class::OUTPUT_FUNCTION.to_xml}
55
+ </tools>
56
+
57
+ Respond only with valid xml.
58
+ The entire response should be wrapped in a <response> tag.
59
+ Any additional information not inside a tool call should go in a <scratch> tag.
60
+ PROMPT
61
+
62
+ response = HTTParty.post(
63
+ "http://localhost:8080/v1/chat/completions",
64
+ headers: {
65
+ "Authorization": "Bearer no-key",
66
+ "Content-Type": "application/json"
67
+ },
68
+ body: {
69
+ "model": "LLaMA_CPP",
70
+ "messages": [
71
+ { "role": "system", "content": system_prompt },
72
+ { "role": "user", "content": prompt }
73
+ ]
74
+ }.to_json
75
+ )
76
+
77
+ text_containing_xml = JSON.parse(response.body).dig("choices", 0, "message", "content")
78
+ xml = text_containing_xml.match(/\<response\>(.*?)\<\/response\>/m).to_s
79
+ response_xml = Nokogiri::XML(xml)
80
+ function_output = response_xml.at_xpath("//parameters/command").children.to_s
81
+
82
+ return function_output
83
+ end
84
+
85
+ def generate_with_groq
86
+ system_prompt = <<-PROMPT
87
+ In this environment you have access to a set of tools you can use to answer the user's question.
88
+
89
+ You may call them like this:
90
+ <function_calls>
91
+ <invoke>
92
+ <tool_name>$TOOL_NAME</tool_name>
93
+ <parameters>
94
+ <command>value</command>
95
+ ...
96
+ </parameters>
97
+ </invoke>
98
+ </function_calls>
99
+
100
+ Here are the tools available:
101
+ <tools>
102
+ #{self.class::OUTPUT_FUNCTION.to_xml}
103
+ </tools>
104
+
105
+ Respond only with valid xml.
106
+ The entire response should be wrapped in a <response> tag.
107
+ Any additional information not inside a tool call should go in a <scratch> tag.
108
+ PROMPT
109
+
110
+ response = HTTParty.post(
111
+ "https://api.groq.com/openai/v1/chat/completions",
112
+ headers: {
113
+ "Authorization": "Bearer #{ENV["GROQ_API_KEY"]}",
114
+ "Content-Type": "application/json"
115
+ },
116
+ body: {
117
+ "messages": [{"role": "user", "content": "#{system_prompt}\n#{prompt}"}],
118
+ "model": "mixtral-8x7b-32768"
119
+ }.to_json
120
+ )
121
+
122
+ text_containing_xml = JSON.parse(response.body).dig("choices", 0, "message", "content")
123
+ xml = text_containing_xml.match(/\<response\>(.*?)\<\/response\>/m).to_s
124
+ response_xml = Nokogiri::XML(xml)
125
+ function_output = response_xml.at_xpath("//response/function_calls/invoke/parameters/command").children.to_s
126
+
127
+ return function_output
128
+ end
129
+
32
130
  def generate_with_claude
33
131
  system_prompt = <<-PROMPT
34
132
  In this environment you have access to a set of tools you can use to answer the user's question.
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: clag
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.3
4
+ version: 0.0.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Scott Werner
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-03-04 00:00:00.000000000 Z
11
+ date: 2024-03-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: cli-kit