llamafile 0.2.1 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8f65be502a0aaa502ab94785114d66f057e9af629cda650851a39c73d3b78c97
4
- data.tar.gz: 376add4a170db17299c3e11b1f2cc20313433894c1264c1ee729827b240ec08d
3
+ metadata.gz: 69461d4fef1f2c1f6c10e899443b6440f73ddac25da733ab88bac5404e5b683f
4
+ data.tar.gz: 27c0ca0a03ffcd32de69bc2d06488dd724159897c7a64a0f3848e765b44e0e9c
5
5
  SHA512:
6
- metadata.gz: 13ccc9565d30515cafe42c61ee8e32c270e89399d1189b0f834285f277f70e31351b8ff0fdcdcacec0b5f78f16960768115918a4f3ee60ad92a03e16fda94e74
7
- data.tar.gz: 3ef5540c172c6bdb6b021b20bfa0cbba2420f70059903a9961eb83586aaf2ae91ed069101d6b92100de396313825cbf8f4a767a79ff0b6b547ac430bba90b7f1
6
+ metadata.gz: 3cb14dbffb8d76109a4777493bf2c937505dca106277aa9cc8946b0e51a7580af7719724142fa0dc741f9a85b1225a92a88df771889a59e7d32c45d629e954db
7
+ data.tar.gz: 63e72fb3177eb128fb5677d21dc22fafc191390aba42ca978fe3ce0cefda073a7dd0ee760e3559e52e5b41f344f474b7c24f4a3c1d07259cdd764f29e95b923f
@@ -47,21 +47,31 @@ module LLAMA
47
47
  end
48
48
  end
49
49
 
50
- # LLAMA.post prompt: PROMPT.make { system: "system prompt", mode: "output mode", output: "previous output", input: "new input" }
50
+ # LLAMA.post prompt: PROMPT.make("prompt template", params={})
51
51
 
52
52
  def self.<< i
53
53
  fiber = Fiber.new do |ii|
54
- Fiber.yield LLAMA.post(prompt: PROMPT.make(input: ii))
54
+ Fiber.yield LLAMA.post(prompt: i)
55
55
  end
56
56
  fiber.resume i
57
57
  end
58
+
59
+ def self.make t, h={}
60
+ fiber = Fiber.new do |tt, hh|
61
+ Fiber.yield LLAMA.post(prompt: PROMPT.make(tt, hh))
62
+ end
63
+ fiber.resume t, h
64
+ end
58
65
 
59
66
  def self.if? h={}
60
- if LLAMA.post({ n_predict: 4, grammar: GRAMMAR[:bool], prompt: PROMPT.test(h) }) == 'yes'
61
- return true
62
- else
63
- return false
67
+ fiber = Fiber.new do |hh|
68
+ if LLAMA.post({ n_predict: 4, grammar: GRAMMAR[:bool], prompt: PROMPT.test(hh) }) == 'yes'
69
+ Fiber.yield true
70
+ else
71
+ Fiber.yield false
72
+ end
64
73
  end
74
+ fiber.resume h
65
75
  end
66
76
  end
67
77
 
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Llamafile
4
- VERSION = "0.2.1"
4
+ VERSION = "0.2.3"
5
5
  end
data/lib/llamafile.rb CHANGED
@@ -10,19 +10,20 @@ require_relative "llamafile/prompt"
10
10
  require_relative "llamafile/voice"
11
11
 
12
12
  module Lf
13
+ def self.make t, h={}
14
+ LLAMA.make t, h
15
+ end
13
16
  def self.<< p
14
17
  LLAMA << p
15
18
  end
16
19
  def self.if? h={}
17
20
  LLAMA.if?(h)
18
21
  end
19
- def self.prompt h={}
20
- LLAMA.post(prompt: PROMPT.make(h))
21
- end
22
+
22
23
  @@C = Hash.new { |h,k| h[k] = C.new(k) }
23
24
  class C
24
25
  attr_accessor :character, :actor, :example, :format
25
- attr_reader :output, :input
26
+ attr_reader :output, :input, :history
26
27
  def initialize k
27
28
  @id = k
28
29
  @input = ""
@@ -31,20 +32,14 @@ module Lf
31
32
  @actor = %[an honest person.]
32
33
  @format = %[Format your response as properly formatted markdown.]
33
34
  @example = %[# this is a heading\nThis is some general text about the heading.\n1. list item one.\n2. list item two.\n3. list item three.]
34
- @convo = []
35
- end
36
- def fact q, a
37
- @convo << [ q, a ]
35
+ @history = ["Hello","Hi."]
38
36
  end
39
- def hist
40
- @convo
41
- end
42
- def convo
43
- o = []; @convo[-5..-1].each { |e| o << %[User: #{e[0]}\nLlama: #{e[1]}] };
37
+ def conversation
38
+ o = @history.map { |e| %[User: #{e[0]}\nLlama: #{e[1]}] };
44
39
  return o.join("\n\n")
45
40
  end
46
41
  def prompt
47
- %[Llama is #{@character}\nUser is #{@actor}\n#{@format}\nUse this example to guide your response:\n#{@example}\n#{convo}]
42
+ %[Llama is #{@character}\nUser is #{@actor}\n#{@format}\nUse this example to guide your response:\n#{@example}\n#{conversation}]
48
43
  end
49
44
  def << i
50
45
  chain i
@@ -52,7 +47,7 @@ module Lf
52
47
  def chain *p
53
48
  [p].flatten.compact.each { |e|
54
49
  puts %[<-- #{e}]
55
- @convo << [ e, Lf.prompt(output: prompt, input: e)]
50
+ @history << [ e, Lf.make("<%= params[:output] %>\n<%= params[:input] %>", output: prompt, input: e)]
56
51
  @output = @convo[-1][1]
57
52
  puts %[--> #{@output}]
58
53
  @input = e
@@ -69,11 +64,6 @@ module Lf
69
64
  def self.delete k
70
65
  @@C.delete(k)
71
66
  end
72
- def self.chain *p
73
- s = {}
74
- [p].flatten.compact.each { |e| o = []; s.each_pair {|k,v| o << %[User: #{k}\nLlama: #{v}] }; s[e] = Lf.prompt(output: o.join("\n"), input: e); }
75
- return s
76
- end
77
67
  end
78
68
 
79
69
  VOICE.hear = lambda { |tgt, voice, payload| puts %[VOICE #{tgt}: #{voice} #{payload}]; MIND.publish(%[#{tgt}/], %[Thinking like a #{voice}, #{payload}]) }
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llamafile
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.1
4
+ version: 0.2.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Erik Olson