local_llm 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rakeTasks +7 -0
- data/lib/local_llm/client.rb +123 -0
- data/lib/local_llm/version.rb +1 -1
- data/local_llm-0.1.0.gem +0 -0
- metadata +9 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 5761b5a2e549175f5a73ea98ac9728341ec43a120c8065ff1f3259a59a4bc174
|
|
4
|
+
data.tar.gz: baadb9c3c98504c9e9e1ac8ffabf84877568f21d51124ea9cecb9aa1988d7805
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: ea52b617b66a39fc95a2fcb299fafe4fef4821ba2bc2a8c8d8033fcd2d771c5355e4366b5775978392974984096cf6888636cf66edf0943b6d8c1aca1d60d09c
|
|
7
|
+
data.tar.gz: 3ddcb9c198d1ea9cd5db8580ad30db0088a94a3fafb8d4a500b8083d84d7637420a325f29b4923eddd01fd5d577195eec293bc2f11c2c1990a7180f7e8c6653c
|
data/.rakeTasks
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
2
|
+
<Settings><!--This file was automatically generated by Ruby plugin.
|
|
3
|
+
You are allowed to:
|
|
4
|
+
1. Remove rake task
|
|
5
|
+
2. Add existing rake tasks
|
|
6
|
+
To add existing rake tasks automatically delete this file and reload the project.
|
|
7
|
+
--><RakeGroup description="" fullCmd="" taksId="rake" /></Settings>
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
# lib/local_llm/client.rb
|
|
2
|
+
require "net/http"
|
|
3
|
+
require "json"
|
|
4
|
+
|
|
5
|
+
module LocalLlm
|
|
6
|
+
class Error < StandardError; end
|
|
7
|
+
|
|
8
|
+
class Client
|
|
9
|
+
def initialize(base_url:)
|
|
10
|
+
@base_url = base_url
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
# Simple one-shot Q&A:
|
|
14
|
+
#
|
|
15
|
+
# client.ask(model: "mistral:7b-instruct", prompt: "What is HIPAA?")
|
|
16
|
+
#
|
|
17
|
+
# stream: true/false (defaults handled by LocalLlm, but you can pass here too)
|
|
18
|
+
def ask(model:, prompt:, stream: false, options: {}, &block)
|
|
19
|
+
messages = [
|
|
20
|
+
{ "role" => "user", "content" => prompt }
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
chat(model: model, messages: messages, stream: stream, options: options, &block)
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
# Full chat:
|
|
27
|
+
#
|
|
28
|
+
# client.chat(
|
|
29
|
+
# model: "llama2:13b",
|
|
30
|
+
# messages: [
|
|
31
|
+
# { "role" => "system", "content" => "You are a helpful assistant." },
|
|
32
|
+
# { "role" => "user", "content" => "Explain LSTM." }
|
|
33
|
+
# ],
|
|
34
|
+
# stream: true
|
|
35
|
+
# ) { |chunk| print chunk }
|
|
36
|
+
#
|
|
37
|
+
# options can include any extra Ollama fields (e.g. temperature)
|
|
38
|
+
def chat(model:, messages:, stream: false, options: {}, &block)
|
|
39
|
+
uri = URI.join(@base_url, "/api/chat")
|
|
40
|
+
|
|
41
|
+
body_hash = {
|
|
42
|
+
"model" => model,
|
|
43
|
+
"messages" => messages,
|
|
44
|
+
"stream" => stream
|
|
45
|
+
}.merge(options)
|
|
46
|
+
|
|
47
|
+
body_json = JSON.dump(body_hash)
|
|
48
|
+
|
|
49
|
+
if stream
|
|
50
|
+
stream_chat(uri, body_json, &block)
|
|
51
|
+
else
|
|
52
|
+
response = Net::HTTP.post(
|
|
53
|
+
uri,
|
|
54
|
+
body_json,
|
|
55
|
+
"Content-Type" => "application/json"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
unless response.is_a?(Net::HTTPSuccess)
|
|
59
|
+
raise Error, "Ollama API error: #{response.code} #{response.body}"
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
data = JSON.parse(response.body)
|
|
63
|
+
data.dig("message", "content") || ""
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# List available models (similar to `ollama list`)
|
|
68
|
+
def models
|
|
69
|
+
uri = URI.join(@base_url, "/api/tags")
|
|
70
|
+
response = Net::HTTP.get_response(uri)
|
|
71
|
+
|
|
72
|
+
unless response.is_a?(Net::HTTPSuccess)
|
|
73
|
+
raise Error, "Ollama API error: #{response.code} #{response.body}"
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
data = JSON.parse(response.body)
|
|
77
|
+
data["models"] || []
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
private
|
|
81
|
+
|
|
82
|
+
# Basic streaming support for Ollama /api/chat with "stream": true
|
|
83
|
+
#
|
|
84
|
+
# If a block is given, yields each text chunk.
|
|
85
|
+
# Always returns the full concatenated response as a String.
|
|
86
|
+
def stream_chat(uri, body_json, &block)
|
|
87
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
|
88
|
+
http.use_ssl = (uri.scheme == "https")
|
|
89
|
+
|
|
90
|
+
req = Net::HTTP::Post.new(uri.request_uri, "Content-Type" => "application/json")
|
|
91
|
+
req.body = body_json
|
|
92
|
+
|
|
93
|
+
full_content = +""
|
|
94
|
+
|
|
95
|
+
http.request(req) do |res|
|
|
96
|
+
unless res.is_a?(Net::HTTPSuccess)
|
|
97
|
+
raise Error, "Ollama API error: #{res.code} #{res.body}"
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
res.read_body do |chunk|
|
|
101
|
+
chunk.each_line do |line|
|
|
102
|
+
line = line.strip
|
|
103
|
+
next if line.empty?
|
|
104
|
+
|
|
105
|
+
begin
|
|
106
|
+
data = JSON.parse(line)
|
|
107
|
+
rescue JSON::ParserError
|
|
108
|
+
next
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
delta = data.dig("message", "content") || ""
|
|
112
|
+
next if delta.empty?
|
|
113
|
+
|
|
114
|
+
full_content << delta
|
|
115
|
+
yield delta if block_given?
|
|
116
|
+
end
|
|
117
|
+
end
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
full_content
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
end
|
data/lib/local_llm/version.rb
CHANGED
data/local_llm-0.1.0.gem
ADDED
|
Binary file
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: local_llm
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.1.
|
|
4
|
+
version: 0.1.1
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- MD Abdul Barek
|
|
@@ -21,18 +21,25 @@ executables: []
|
|
|
21
21
|
extensions: []
|
|
22
22
|
extra_rdoc_files: []
|
|
23
23
|
files:
|
|
24
|
+
- ".rakeTasks"
|
|
24
25
|
- ".rspec"
|
|
25
26
|
- LICENSE.txt
|
|
26
27
|
- README.md
|
|
27
28
|
- Rakefile
|
|
28
29
|
- lib/local_llm.rb
|
|
30
|
+
- lib/local_llm/client.rb
|
|
29
31
|
- lib/local_llm/version.rb
|
|
32
|
+
- local_llm-0.1.0.gem
|
|
30
33
|
- sig/local_llm.rbs
|
|
31
|
-
homepage:
|
|
34
|
+
homepage: https://github.com/barek2k2/local_llm
|
|
32
35
|
licenses:
|
|
33
36
|
- MIT
|
|
34
37
|
metadata:
|
|
35
38
|
allowed_push_host: https://rubygems.org
|
|
39
|
+
source_code_uri: https://github.com/barek2k2/local_llm
|
|
40
|
+
homepage_uri: https://github.com/barek2k2/local_llm
|
|
41
|
+
changelog_uri: https://github.com/barek2k2/local_llm/releases
|
|
42
|
+
bug_tracker_uri: https://github.com/barek2k2/local_llm/issues
|
|
36
43
|
post_install_message:
|
|
37
44
|
rdoc_options: []
|
|
38
45
|
require_paths:
|