cloudflare-ai 0.4.0 → 0.4.1
Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4de1d8c20767aab8d40b96fa01a447e1ab83a1586cad6c4a3d7597331cabc5bd
|
4
|
+
data.tar.gz: 7b912c4bf7bb23ec4f2befca92c966656ac4ab0527b03857adaf14b8e8b87a34
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c87c773d129790a865a524a56c1ab545a51f3d8a54825b4e02d092282b733f5dbee9a59d1536fd8df096aac47c7d94ae4466e6d6f94aa90115ec4cebdd4d4b0e
|
7
|
+
data.tar.gz: 45ead04646ae3756d05d0d4805bfbfcd509d421b08bfec63a53ff3736abd16887786d17741df542e4596500779881d35e9d8af6ffdb07c2bd2ffdfc632e17883
|
data/lib/cloudflare/ai/client.rb
CHANGED
@@ -11,10 +11,10 @@ class Cloudflare::AI::Client
|
|
11
11
|
@api_token = api_token
|
12
12
|
end
|
13
13
|
|
14
|
-
def chat(messages:, model_name: default_text_generation_model_name, &block)
|
14
|
+
def chat(messages:, model_name: default_text_generation_model_name, max_tokens: default_max_tokens, &block)
|
15
15
|
url = service_url_for(account_id: account_id, model_name: model_name)
|
16
16
|
stream = block ? true : false
|
17
|
-
payload = create_streamable_payload({messages: messages.map(&:serializable_hash)}, stream: stream)
|
17
|
+
payload = create_streamable_payload({messages: messages.map(&:serializable_hash)}, stream: stream, max_tokens: max_tokens)
|
18
18
|
post_streamable_request(url, payload, &block)
|
19
19
|
end
|
20
20
|
|
@@ -25,10 +25,10 @@ class Cloudflare::AI::Client
|
|
25
25
|
Cloudflare::AI::Results::TextClassification.new(connection.post(url, payload).body)
|
26
26
|
end
|
27
27
|
|
28
|
-
def complete(prompt:, model_name: default_text_generation_model_name, &block)
|
28
|
+
def complete(prompt:, model_name: default_text_generation_model_name, max_tokens: default_max_tokens, &block)
|
29
29
|
url = service_url_for(account_id: account_id, model_name: model_name)
|
30
30
|
stream = block ? true : false
|
31
|
-
payload = create_streamable_payload({prompt: prompt}, stream: stream)
|
31
|
+
payload = create_streamable_payload({prompt: prompt}, stream: stream, max_tokens: max_tokens)
|
32
32
|
post_streamable_request(url, payload, &block)
|
33
33
|
end
|
34
34
|
|
@@ -2,12 +2,16 @@ module Cloudflare
|
|
2
2
|
module AI
|
3
3
|
module Clients
|
4
4
|
module TextGenerationHelpers
|
5
|
-
def
|
6
|
-
|
5
|
+
def create_streamable_payload(data, stream:, max_tokens:)
|
6
|
+
data.merge({stream: stream, max_tokens: max_tokens}).to_json
|
7
|
+
end
|
8
|
+
|
9
|
+
def default_max_tokens
|
10
|
+
256
|
7
11
|
end
|
8
12
|
|
9
|
-
def
|
10
|
-
|
13
|
+
def default_text_generation_model_name
|
14
|
+
Cloudflare::AI::Models.text_generation.first
|
11
15
|
end
|
12
16
|
|
13
17
|
def post_streamable_request(url, payload, &block)
|