ollama-ruby 0.0.1 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (56) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +1 -0
  3. data/CHANGES.md +78 -0
  4. data/README.md +62 -23
  5. data/Rakefile +16 -4
  6. data/bin/ollama_chat +470 -90
  7. data/bin/ollama_console +3 -3
  8. data/bin/ollama_update +17 -0
  9. data/config/redis.conf +5 -0
  10. data/docker-compose.yml +11 -0
  11. data/lib/ollama/client.rb +7 -2
  12. data/lib/ollama/documents/memory_cache.rb +44 -0
  13. data/lib/ollama/documents/redis_cache.rb +57 -0
  14. data/lib/ollama/documents/splitters/character.rb +70 -0
  15. data/lib/ollama/documents/splitters/semantic.rb +90 -0
  16. data/lib/ollama/documents.rb +172 -0
  17. data/lib/ollama/dto.rb +4 -7
  18. data/lib/ollama/handlers/progress.rb +18 -5
  19. data/lib/ollama/image.rb +16 -7
  20. data/lib/ollama/options.rb +4 -0
  21. data/lib/ollama/utils/chooser.rb +30 -0
  22. data/lib/ollama/utils/colorize_texts.rb +42 -0
  23. data/lib/ollama/utils/fetcher.rb +105 -0
  24. data/lib/ollama/utils/math.rb +48 -0
  25. data/lib/ollama/utils/tags.rb +7 -0
  26. data/lib/ollama/utils/width.rb +1 -1
  27. data/lib/ollama/version.rb +1 -1
  28. data/lib/ollama.rb +12 -5
  29. data/ollama-ruby.gemspec +19 -9
  30. data/spec/assets/embeddings.json +1 -0
  31. data/spec/ollama/client_spec.rb +2 -2
  32. data/spec/ollama/commands/chat_spec.rb +2 -2
  33. data/spec/ollama/commands/copy_spec.rb +2 -2
  34. data/spec/ollama/commands/create_spec.rb +2 -2
  35. data/spec/ollama/commands/delete_spec.rb +2 -2
  36. data/spec/ollama/commands/embed_spec.rb +3 -3
  37. data/spec/ollama/commands/embeddings_spec.rb +2 -2
  38. data/spec/ollama/commands/generate_spec.rb +2 -2
  39. data/spec/ollama/commands/pull_spec.rb +2 -2
  40. data/spec/ollama/commands/push_spec.rb +2 -2
  41. data/spec/ollama/commands/show_spec.rb +2 -2
  42. data/spec/ollama/documents/memory_cache_spec.rb +63 -0
  43. data/spec/ollama/documents/redis_cache_spec.rb +78 -0
  44. data/spec/ollama/documents/splitters/character_spec.rb +96 -0
  45. data/spec/ollama/documents/splitters/semantic_spec.rb +56 -0
  46. data/spec/ollama/documents_spec.rb +119 -0
  47. data/spec/ollama/handlers/progress_spec.rb +2 -2
  48. data/spec/ollama/image_spec.rb +4 -0
  49. data/spec/ollama/message_spec.rb +3 -4
  50. data/spec/ollama/options_spec.rb +18 -0
  51. data/spec/ollama/tool_spec.rb +1 -6
  52. data/spec/ollama/utils/fetcher_spec.rb +74 -0
  53. data/spec/ollama/utils/tags_spec.rb +24 -0
  54. data/spec/spec_helper.rb +8 -0
  55. data/tmp/.keep +0 -0
  56. metadata +187 -5
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 327051092cef37a7fd95d6a5c3a5a13aa6e52df2314da5f4cfd28f2890ffa820
4
- data.tar.gz: 86da0d23497f9717239abaa1830ac687562c8e9827a097781f0404e6b65e1e35
3
+ metadata.gz: 1f80ae8ee6e8acbbedfff8b56923b25b583fc60b96c733985e32908874d542bb
4
+ data.tar.gz: 80834beb676929f08f4216e373e56cadd12d16c0b50e95b1a599bdd48bf29c86
5
5
  SHA512:
6
- metadata.gz: f3a9324f11877b0772d1f4bcb1902ea905e7132d94d73fe8b6b7e11a567c2aea04431838fa2b534557e4e1e76ae1ee47b13832aebb71e144ac329b13d15ea111
7
- data.tar.gz: d40adf27de7e7700158027f1c6bfec7e97189b4a6081a7da36758edc31b5229b6efef62ee5a192af19afa5b633224cf8e80666d38154a9d3c1cae974a98f2de5
6
+ metadata.gz: 94823ec618f940056bcee6ac7d706c633087404efcb49122cdccc89a4596b4a5f14a0d1c1d60fd7bb13a28e10f3ba4dc2027cd96cfe384169b25acaf75f532b7
7
+ data.tar.gz: ef8d4a7001c5502bc787f074f10dfaf16692b8fa90c4a6e32ae3c0c75b94da45b4691094229106cec72438837eef57b933cd5e5c9b28e63d0d4a598c152a9c32
data/.envrc ADDED
@@ -0,0 +1 @@
1
+ export REDIS_URL=redis://localhost:9736
data/CHANGES.md ADDED
@@ -0,0 +1,78 @@
1
+ # Changes
2
+
3
+ ## 2024-09-03 v0.2.0
4
+
5
+ ### Changes
6
+
7
+ * **Added Web Search Functionality to `ollama_chat`**
8
+ + Added `/web` command to fetch search results from DuckDuckGo
9
+ + Updated `/summarize` command to handle cases where summarization fails
10
+ + Fix bug in parsing content type of source document
11
+ * **Refactored Options Class and Usage**
12
+ + Renamed `options` variable to use `Options[]` method in ollama_chat script
13
+ + Added `[](value)` method to Ollama::Options class for casting hashes
14
+ + Updated options_spec.rb with tests for casting hashes and error handling
15
+ * **Refactored Web Search Command**
16
+ + Added support for specifying a page number in `/web` command
17
+ + Updated regular expression to match new format
18
+ + Passed page number as an argument to `search_web` method
19
+ + Updated content string to reference the query and sources correctly
20
+ * **DTO Class Changes**
21
+ + Renamed `json_create` method to `from_hash` in Ollama::DTO class
22
+ + Updated `as_json` method to remove now unnecessary hash creation
23
+ * **Message and Tool Spec Changes**
24
+ + Removed `json_class` from JSON serialization in message_spec
25
+ + Removed `json_class` from JSON serialization in tool_spec
26
+ * **Command Spec Changes**
27
+ + Removed `json_class` from JSON serialization in various command specs (e.g. generate_spec, pull_spec, etc.)
28
+ * **Miscellaneous Changes**
29
+ + Improved width calculation for text truncation
30
+ + Updated FollowChat class to display evaluation statistics
31
+ + Update OllamaChatConfig to use EOT instead of end for heredoc syntax
32
+ + Add .keep file to tmp directory
33
+
34
+ ## 2024-08-30 v0.1.0
35
+
36
+ ### Change Log for New Version
37
+
38
+ #### Significant Changes
39
+
40
+ * **Document Splitting and Embedding Functionality**: Added `Ollama::Documents` class with methods for adding documents, checking existence, deleting documents, and finding similar documents.
41
+ + Introduced two types of caches: `MemoryCache` and `RedisCache`
42
+ + Implemented `SemanticSplitter` class to split text into sentences based on semantic similarity
43
+ * **Improved Ollama Chat Client**: Added support for document embeddings and web/file RAG
44
+ + Allowed configuration per yaml file
45
+ + Parse user input for URLs or files to send images to multimodal models
46
+ * **Redis Docker Service**: Set `REDIS_URL` environment variable to `redis://localhost:9736`
47
+ + Added Redis service to `docker-compose.yml`
48
+ * **Status Display and Progress Updates**: Added infobar.label = response.status when available
49
+ + Updated infobar with progress message on each call if total and completed are set
50
+ + Display error message from response.error if present
51
+ * **Refactored Chat Commands**: Simplified regular expression patterns for `/pop`, `/save`, `/load`, and `/image` commands
52
+ + Added whitespace to some command patterns for better readability
53
+
54
+ #### Other Changes
55
+
56
+ * Added `Character` and `RecursiveCharacter` splitter classes to split text into chunks based on character separators
57
+ * Added RSpec tests for the Ollama::Documents class(es)
58
+ * Updated dependencies and added new methods for calculating breakpoint thresholds and sentence embeddings
59
+ * Added 'ollama_update' to executables in Rakefile
60
+ * Started using webmock
61
+ * Refactored chooser and add fetcher specs
62
+ * Added tests for Ollama::Utils::Fetcher
63
+ * Update README.md
64
+
65
+ ## 2024-08-16 v0.0.1
66
+
67
+ * **New Features**
68
+ + Added missing options parameter to Embed command
69
+ + Documented new `/api/embed` endpoint
70
+ * **Improvements**
71
+ + Improved example in README.md
72
+ * **Code Refactoring**
73
+ + Renamed `client` to `ollama` in client and command specs
74
+ + Updated expectations to use `ollama` instead of `client`
75
+
76
+ ## 2024-08-12 v0.0.0
77
+
78
+ * Start
data/README.md CHANGED
@@ -28,7 +28,7 @@ to your Gemfile and run `bundle install` in your terminal.
28
28
 
29
29
  ## Executables
30
30
 
31
- ### ollama_chat
31
+ ### ollama\_chat
32
32
 
33
33
  This a chat client, that can be used to connect to an ollama server and enter a
34
34
  chat converstation with a LLM. It can be called with the following arguments:
@@ -36,20 +36,58 @@ chat converstation with a LLM. It can be called with the following arguments:
36
36
  ```
37
37
  ollama_chat [OPTIONS]
38
38
 
39
- -u URL the ollama base url, OLLAMA_URL
40
- -m MODEL the ollama model to chat with, OLLAMA_MODEL
41
- -M OPTIONS the model options as JSON file, see Ollama::Options
42
- -s SYSTEM the system prompt to use as a file
43
- -c CHAT a saved chat conversation to load
44
- -v VOICE use VOICE (e. g. Samantha) to speak with say command
45
- -d use markdown to display the chat messages
46
- -h this help
39
+ -f CONFIG config file to read
40
+ -u URL the ollama base url, OLLAMA_URL
41
+ -m MODEL the ollama model to chat with, OLLAMA_CHAT_MODEL
42
+ -s SYSTEM the system prompt to use as a file, OLLAMA_CHAT_SYSTEM
43
+ -c CHAT a saved chat conversation to load
44
+ -C COLLECTION name of the collection used in this conversation
45
+ -D DOCUMENT load document and add to collection (multiple)
46
+ -v use voice output
47
+ -h this help
47
48
  ```
48
49
 
49
50
  The base URL can be either set by the environment variable `OLLAMA_URL` or it
50
51
  is derived from the environment variable `OLLAMA_HOST`. The default model to
51
52
  connect can be configured in the environment variable `OLLAMA_MODEL`.
52
53
 
54
+ The YAML config file in `$XDG_CONFIG_HOME/ollama_chat/config.yml`, that you can
55
+ use for more complex settings, it looks like this:
56
+
57
+ ```
58
+ ---
59
+ url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
60
+ model:
61
+ name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
62
+ options:
63
+ num_ctx: 8192
64
+ system: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
65
+ voice: Samantha
66
+ markdown: true
67
+ embedding:
68
+ enabled: true
69
+ model:
70
+ name: mxbai-embed-large
71
+ options: {}
72
+ collection: <%= ENV.fetch('OLLAMA_CHAT_COLLECTION', 'ollama_chat') %>
73
+ found_texts_size: 4096
74
+ splitter:
75
+ name: RecursiveCharacter
76
+ chunk_size: 1024
77
+ cache: Ollama::Documents::RedisCache
78
+ redis:
79
+ url: <%= ENV.fetch('REDIS_URL', 'null') %>
80
+ debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
81
+ ```
82
+
83
+ If you want to store embeddings persistently, set an environment variable
84
+ `REDIS_URL` or update the `redis.url` setting in your `config.yml` file to
85
+ connect to a Redis server. Without this setup, embeddings will only be stored
86
+ in process memory, which is less durable.
87
+
88
+ Some settings can be passed as arguments as well, e. g. if you want to choose a
89
+ specific system prompt:
90
+
53
91
  ```
54
92
  $ ollama_chat -s sherlock.txt
55
93
  Model with architecture llama found.
@@ -86,9 +124,7 @@ $ ollama_chat -m llava-llama3
86
124
  Model with architecture llama found.
87
125
  Connecting to llava-llama3@http://localhost:11434 now…
88
126
  Type /help to display the chat help.
89
- 📨 user> /image spec/assets/kitten.jpg
90
- Attached image spec/assets/kitten.jpg to the next message.
91
- 📸 user> What's on this image?
127
+ 📸 user> What's on this image? ./spec/assets/kitten.jpg
92
128
  📨 assistant:
93
129
  The image captures a moment of tranquility featuring a young cat. The cat,
94
130
  adorned with gray and white fur marked by black stripes on its face and legs,
@@ -116,19 +152,22 @@ subject - the young, blue-eyed cat.
116
152
  The following commands can be given inside the chat, if prefixed by a `/`:
117
153
 
118
154
  ```
119
- /paste to paste content
120
- /list list the messages of the conversation
121
- /clear clear the conversation messages
122
- /pop n pop the last n message, defaults to 1
123
- /regenerate the last answer message
124
- /save filename store conversation messages
125
- /load filename load conversation messages
126
- /image filename attach image to the next message
127
- /quit to quit.
128
- /help to view this help.
155
+ /paste to paste content
156
+ /markdown toggle markdown output
157
+ /list list the messages of the conversation
158
+ /clear clear the conversation messages
159
+ /pop [n] pop the last n exchanges, defaults to 1
160
+ /model change the model
161
+ /regenerate the last answer message
162
+ /collection clear|stats|change|new clear or show stats of current collection
163
+ /summarize source summarize the URL/file source's content
164
+ /save filename store conversation messages
165
+ /load filename load conversation messages
166
+ /quit to quit
167
+ /help to view this help
129
168
  ```
130
169
 
131
- ### ollama_console
170
+ ### ollama\_console
132
171
 
133
172
  This is an interactive console, that can be used to try the different commands
134
173
  provided by an `Ollama::Client` instance. For example this command generate a
data/Rakefile CHANGED
@@ -13,12 +13,12 @@ GemHadar do
13
13
  description 'Library that allows interacting with the Ollama API'
14
14
  test_dir 'spec'
15
15
  ignore '.*.sw[pon]', 'pkg', 'Gemfile.lock', '.AppleDouble', '.bundle',
16
- '.yardoc', 'tags', 'errors.lst', 'cscope.out', 'coverage', 'tmp'
16
+ '.yardoc', 'tags', 'errors.lst', 'cscope.out', 'coverage', 'tmp', 'corpus'
17
17
  package_ignore '.all_images.yml', '.tool-versions', '.gitignore', 'VERSION',
18
18
  '.utilsrc', '.rspec', *Dir.glob('.github/**/*', File::FNM_DOTMATCH)
19
19
  readme 'README.md'
20
20
 
21
- executables << 'ollama_console' << 'ollama_chat'
21
+ executables << 'ollama_console' << 'ollama_chat' << 'ollama_update'
22
22
 
23
23
  required_ruby_version '~> 3.1'
24
24
 
@@ -27,9 +27,21 @@ GemHadar do
27
27
  dependency 'term-ansicolor', '~> 1.11'
28
28
  dependency 'kramdown-parser-gfm', '~> 1.1'
29
29
  dependency 'terminal-table', '~> 3.0'
30
- development_dependency 'all_images', '~>0.4'
31
- development_dependency 'rspec', '~>3.2'
30
+ dependency 'redis', '~> 5.0'
31
+ dependency 'numo-narray', '~> 0.9'
32
+ dependency 'more_math', '~> 1.1'
33
+ dependency 'sorted_set', '~> 1.0'
34
+ dependency 'mime-types', '~> 3.0'
35
+ dependency 'reverse_markdown', '~> 2.0'
36
+ dependency 'complex_config', '~> 0.20'
37
+ dependency 'search_ui', '~> 0.0'
38
+ dependency 'amatch', '~> 0.4.1'
39
+ development_dependency 'all_images', '~> 0.4'
40
+ development_dependency 'rspec', '~> 3.2'
32
41
  development_dependency 'utils'
42
+ development_dependency 'webmock'
33
43
 
34
44
  licenses << 'MIT'
45
+
46
+ clobber 'coverage'
35
47
  end