ollama-ai 1.0.0 → 1.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +1 -1
- data/Gemfile.lock +23 -14
- data/README.md +264 -183
- data/controllers/client.rb +9 -3
- data/ollama-ai.gemspec +2 -1
- data/static/gem.rb +1 -1
- data/tasks/generate-readme.clj +1 -1
- data/template.md +231 -154
- metadata +22 -8
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ollama-ai
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- gbaptista
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-02-10 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: faraday
|
@@ -16,17 +16,31 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '2.
|
19
|
+
version: '2.9'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '2.
|
26
|
+
version: '2.9'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: faraday-typhoeus
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '1.1'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '1.1'
|
27
41
|
description: A Ruby gem for interacting with Ollama's API that allows you to run open
|
28
42
|
source AI LLMs (Large Language Models) locally.
|
29
|
-
email:
|
43
|
+
email:
|
30
44
|
executables: []
|
31
45
|
extensions: []
|
32
46
|
extra_rdoc_files: []
|
@@ -54,7 +68,7 @@ metadata:
|
|
54
68
|
homepage_uri: https://github.com/gbaptista/ollama-ai
|
55
69
|
source_code_uri: https://github.com/gbaptista/ollama-ai
|
56
70
|
rubygems_mfa_required: 'true'
|
57
|
-
post_install_message:
|
71
|
+
post_install_message:
|
58
72
|
rdoc_options: []
|
59
73
|
require_paths:
|
60
74
|
- ports/dsl
|
@@ -70,7 +84,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
70
84
|
version: '0'
|
71
85
|
requirements: []
|
72
86
|
rubygems_version: 3.3.3
|
73
|
-
signing_key:
|
87
|
+
signing_key:
|
74
88
|
specification_version: 4
|
75
89
|
summary: Interact with Ollama API to run open source AI models locally.
|
76
90
|
test_files: []
|