supermemory 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +24 -0
- data/LICENSE +21 -0
- data/README.md +503 -0
- data/lib/supermemory/client.rb +193 -0
- data/lib/supermemory/configuration.rb +25 -0
- data/lib/supermemory/errors.rb +84 -0
- data/lib/supermemory/integrations/graph_agent.rb +222 -0
- data/lib/supermemory/integrations/langchain.rb +235 -0
- data/lib/supermemory/integrations/openai.rb +294 -0
- data/lib/supermemory/resources/base.rb +15 -0
- data/lib/supermemory/resources/connections.rb +104 -0
- data/lib/supermemory/resources/documents.rb +115 -0
- data/lib/supermemory/resources/memories.rb +36 -0
- data/lib/supermemory/resources/search.rb +68 -0
- data/lib/supermemory/resources/settings.rb +59 -0
- data/lib/supermemory/version.rb +5 -0
- data/lib/supermemory.rb +23 -0
- metadata +173 -0
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
|
|
5
|
+
begin
|
|
6
|
+
require "langchain"
|
|
7
|
+
rescue LoadError
|
|
8
|
+
raise LoadError.new("The langchainrb gem is required for Supermemory::Integrations::Langchain. " \
|
|
9
|
+
"Add `gem 'langchainrb'` to your Gemfile.")
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
module Supermemory
|
|
13
|
+
module Integrations
|
|
14
|
+
# Integration with langchainrb (https://github.com/patterns-ai-core/langchainrb).
|
|
15
|
+
#
|
|
16
|
+
# Provides:
|
|
17
|
+
# 1. SupermemoryTool - A Langchain tool for memory operations via function calling
|
|
18
|
+
# 2. SupermemoryMemory - A memory class for conversation-aware agents
|
|
19
|
+
#
|
|
20
|
+
# @example Using as a tool with Langchain::Assistant
|
|
21
|
+
# tool = Supermemory::Integrations::Langchain::SupermemoryTool.new(
|
|
22
|
+
# api_key: ENV["SUPERMEMORY_API_KEY"],
|
|
23
|
+
# container_tag: "user-123"
|
|
24
|
+
# )
|
|
25
|
+
#
|
|
26
|
+
# assistant = Langchain::Assistant.new(
|
|
27
|
+
# llm: Langchain::LLM::OpenAI.new(api_key: ENV["OPENAI_API_KEY"]),
|
|
28
|
+
# tools: [tool],
|
|
29
|
+
# instructions: "You are a helpful assistant with memory."
|
|
30
|
+
# )
|
|
31
|
+
#
|
|
32
|
+
# assistant.add_message_and_run!(content: "Remember I like Ruby")
|
|
33
|
+
module Langchain
|
|
34
|
+
# Langchain tool for Supermemory memory operations.
|
|
35
|
+
# Extends Langchain::ToolDefinition to work with Langchain::Assistant.
|
|
36
|
+
class SupermemoryTool
|
|
37
|
+
extend ::Langchain::ToolDefinition
|
|
38
|
+
|
|
39
|
+
define_function :search_memory,
|
|
40
|
+
description: "SupermemoryTool: Search the user's long-term memory. " \
|
|
41
|
+
"Use to recall stored facts, preferences, or context." do
|
|
42
|
+
property :query, type: "string",
|
|
43
|
+
description: "The question or topic to search for in memory", required: true
|
|
44
|
+
property :limit, type: "integer",
|
|
45
|
+
description: "Maximum number of memories to return (default: 5)"
|
|
46
|
+
property :search_mode, type: "string",
|
|
47
|
+
description: "Search mode: 'memories', 'hybrid', or 'documents'",
|
|
48
|
+
enum: %w[memories hybrid documents]
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
define_function :add_memory,
|
|
52
|
+
description: "SupermemoryTool: Save information to long-term memory. " \
|
|
53
|
+
"Use when the user shares preferences or facts worth remembering." do
|
|
54
|
+
property :content, type: "string",
|
|
55
|
+
description: "The information to save as a memory", required: true
|
|
56
|
+
property :metadata, type: "object",
|
|
57
|
+
description: "Optional metadata to attach to the memory"
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
define_function :get_profile,
|
|
61
|
+
description: "SupermemoryTool: Get user profile with long-term facts and context. " \
|
|
62
|
+
"Use to understand the user's background." do
|
|
63
|
+
property :query, type: "string",
|
|
64
|
+
description: "Optional query to also search for relevant memories alongside the profile"
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
define_function :forget_memory,
|
|
68
|
+
description: "SupermemoryTool: Remove a specific memory from the user's long-term memory. " \
|
|
69
|
+
"Use this when the user asks to forget or delete specific information." do
|
|
70
|
+
property :content, type: "string",
|
|
71
|
+
description: "The exact content of the memory to forget", required: true
|
|
72
|
+
property :reason, type: "string",
|
|
73
|
+
description: "Optional reason for forgetting this memory"
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
# @param api_key [String] Supermemory API key
|
|
77
|
+
# @param container_tag [String] Container tag for scoping memories to a user/project
|
|
78
|
+
# @param base_url [String, nil] Custom API endpoint
|
|
79
|
+
def initialize(api_key:, container_tag:, base_url: nil)
|
|
80
|
+
@client = Supermemory::Client.new(api_key: api_key, base_url: base_url)
|
|
81
|
+
@container_tag = container_tag
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
attr_reader :client, :container_tag
|
|
85
|
+
|
|
86
|
+
def search_memory(query:, limit: 5, search_mode: "hybrid")
|
|
87
|
+
result = @client.search.memories(
|
|
88
|
+
q: query,
|
|
89
|
+
container_tag: @container_tag,
|
|
90
|
+
search_mode: search_mode,
|
|
91
|
+
limit: limit
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
memories = (result["results"] || []).map do |r|
|
|
95
|
+
r["memory"] || r["chunk"] || ""
|
|
96
|
+
end.compact
|
|
97
|
+
|
|
98
|
+
tool_response(content: memories.any? ? memories.join("\n---\n") : "No relevant memories found.")
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def add_memory(content:, metadata: nil)
|
|
102
|
+
result = @client.add(
|
|
103
|
+
content: content,
|
|
104
|
+
container_tag: @container_tag,
|
|
105
|
+
metadata: metadata
|
|
106
|
+
)
|
|
107
|
+
tool_response(content: "Memory saved (ID: #{result["id"]})")
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
def get_profile(query: nil)
|
|
111
|
+
result = @client.profile(container_tag: @container_tag, q: query)
|
|
112
|
+
|
|
113
|
+
static = result.dig("profile", "static") || []
|
|
114
|
+
dynamic = result.dig("profile", "dynamic") || []
|
|
115
|
+
|
|
116
|
+
parts = []
|
|
117
|
+
parts << "Background: #{static.join("; ")}" if static.any?
|
|
118
|
+
parts << "Recent context: #{dynamic.join("; ")}" if dynamic.any?
|
|
119
|
+
|
|
120
|
+
if query && result["searchResults"]
|
|
121
|
+
search_results = result.dig("searchResults", "results") || []
|
|
122
|
+
memories = search_results.map { |r| r["memory"] || r["chunk"] }.compact
|
|
123
|
+
parts << "Related: #{memories.join("; ")}" if memories.any?
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
tool_response(content: parts.any? ? parts.join("\n") : "No profile information available yet.")
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def forget_memory(content:, reason: nil)
|
|
130
|
+
result = @client.memories.forget(
|
|
131
|
+
container_tag: @container_tag,
|
|
132
|
+
content: content,
|
|
133
|
+
reason: reason
|
|
134
|
+
)
|
|
135
|
+
tool_response(content: result["forgotten"] ? "Memory forgotten." : "Memory not found.")
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
private
|
|
139
|
+
|
|
140
|
+
def tool_response(content: nil, image_url: nil)
|
|
141
|
+
::Langchain::ToolResponse.new(content: content, image_url: image_url)
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
# Helper class for injecting memory context into Langchain conversations.
|
|
146
|
+
# Use this for automatic memory retrieval/storage without explicit tool calling.
|
|
147
|
+
#
|
|
148
|
+
# @example
|
|
149
|
+
# memory = Supermemory::Integrations::Langchain::SupermemoryMemory.new(
|
|
150
|
+
# api_key: ENV["SUPERMEMORY_API_KEY"],
|
|
151
|
+
# container_tag: "user-123"
|
|
152
|
+
# )
|
|
153
|
+
#
|
|
154
|
+
# # Get context for injection into system prompt
|
|
155
|
+
# context = memory.context(query: "user preferences")
|
|
156
|
+
#
|
|
157
|
+
# # Store a conversation exchange
|
|
158
|
+
# memory.store(user_message: "I prefer dark mode", assistant_message: "Noted!")
|
|
159
|
+
class SupermemoryMemory
|
|
160
|
+
attr_reader :client, :container_tag
|
|
161
|
+
|
|
162
|
+
# @param api_key [String] Supermemory API key
|
|
163
|
+
# @param container_tag [String] Container tag for scoping
|
|
164
|
+
# @param base_url [String, nil] Custom API endpoint
|
|
165
|
+
def initialize(api_key:, container_tag:, base_url: nil)
|
|
166
|
+
@client = Supermemory::Client.new(api_key: api_key, base_url: base_url)
|
|
167
|
+
@container_tag = container_tag
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
# Retrieve memory context for a query
|
|
171
|
+
# @param query [String, nil] Search query
|
|
172
|
+
# @return [String] Formatted memory context
|
|
173
|
+
def context(query: nil)
|
|
174
|
+
result = @client.profile(container_tag: @container_tag, q: query)
|
|
175
|
+
format_context(result)
|
|
176
|
+
rescue => e
|
|
177
|
+
warn "[Supermemory::Langchain] Failed to fetch context: #{e.message}"
|
|
178
|
+
""
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
# Store a conversation exchange
|
|
182
|
+
# @param user_message [String] User's message
|
|
183
|
+
# @param assistant_message [String, nil] Assistant's response
|
|
184
|
+
# @param metadata [Hash, nil] Optional metadata
|
|
185
|
+
def store(user_message:, assistant_message: nil, metadata: nil)
|
|
186
|
+
content = if assistant_message
|
|
187
|
+
"User: #{user_message}\nAssistant: #{assistant_message}"
|
|
188
|
+
else
|
|
189
|
+
user_message
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
@client.add(content: content, container_tag: @container_tag, metadata: metadata)
|
|
193
|
+
rescue => e
|
|
194
|
+
warn "[Supermemory::Langchain] Failed to store memory: #{e.message}"
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Search memories
|
|
198
|
+
# @param query [String] Search query
|
|
199
|
+
# @param limit [Integer] Max results
|
|
200
|
+
# @return [Array<Hash>]
|
|
201
|
+
def search(query:, limit: 5)
|
|
202
|
+
result = @client.search.memories(
|
|
203
|
+
q: query,
|
|
204
|
+
container_tag: @container_tag,
|
|
205
|
+
search_mode: "hybrid",
|
|
206
|
+
limit: limit
|
|
207
|
+
)
|
|
208
|
+
result["results"] || []
|
|
209
|
+
rescue => e
|
|
210
|
+
warn "[Supermemory::Langchain] Failed to search: #{e.message}"
|
|
211
|
+
[]
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
private
|
|
215
|
+
|
|
216
|
+
def format_context(result)
|
|
217
|
+
static = result.dig("profile", "static") || []
|
|
218
|
+
dynamic = result.dig("profile", "dynamic") || []
|
|
219
|
+
search_results = result.dig("searchResults", "results") || []
|
|
220
|
+
|
|
221
|
+
parts = []
|
|
222
|
+
parts << "User Background:\n#{static.join("\n")}" if static.any?
|
|
223
|
+
parts << "Recent Context:\n#{dynamic.join("\n")}" if dynamic.any?
|
|
224
|
+
|
|
225
|
+
if search_results.any?
|
|
226
|
+
memories = search_results.map { |r| r["memory"] || r["chunk"] }.compact
|
|
227
|
+
parts << "Relevant Memories:\n#{memories.join("\n")}" if memories.any?
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
parts.join("\n\n")
|
|
231
|
+
end
|
|
232
|
+
end
|
|
233
|
+
end
|
|
234
|
+
end
|
|
235
|
+
end
|
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "supermemory"
|
|
5
|
+
|
|
6
|
+
module Supermemory
|
|
7
|
+
module Integrations
|
|
8
|
+
# Integration with the ruby-openai gem (https://github.com/alexrudall/ruby-openai).
|
|
9
|
+
#
|
|
10
|
+
# Provides two approaches:
|
|
11
|
+
# 1. SupermemoryTools - Function calling tools for explicit memory operations
|
|
12
|
+
# 2. with_supermemory - Wrapper that auto-injects memories into system prompts
|
|
13
|
+
module OpenAI
|
|
14
|
+
# Tool definitions for OpenAI function calling
|
|
15
|
+
class SupermemoryTools
|
|
16
|
+
SEARCH_MEMORIES_TOOL = {
|
|
17
|
+
type: "function",
|
|
18
|
+
function: {
|
|
19
|
+
name: "searchMemories",
|
|
20
|
+
description: "Search through user memories for relevant information. " \
|
|
21
|
+
"Use this when the user asks a question that might be answered by their stored memories.",
|
|
22
|
+
parameters: {
|
|
23
|
+
type: "object",
|
|
24
|
+
properties: {
|
|
25
|
+
information_to_get: {
|
|
26
|
+
type: "string",
|
|
27
|
+
description: "What information to search for in the user's memories"
|
|
28
|
+
},
|
|
29
|
+
limit: {
|
|
30
|
+
type: "integer",
|
|
31
|
+
description: "Maximum number of memories to return (default: 10)"
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
required: ["information_to_get"]
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}.freeze
|
|
38
|
+
|
|
39
|
+
ADD_MEMORY_TOOL = {
|
|
40
|
+
type: "function",
|
|
41
|
+
function: {
|
|
42
|
+
name: "addMemory",
|
|
43
|
+
description: "Save important information to the user's long-term memory. " \
|
|
44
|
+
"Use when the user shares preferences, facts, or information worth remembering.",
|
|
45
|
+
parameters: {
|
|
46
|
+
type: "object",
|
|
47
|
+
properties: {
|
|
48
|
+
memory: {
|
|
49
|
+
type: "string",
|
|
50
|
+
description: "The information to save as a memory"
|
|
51
|
+
}
|
|
52
|
+
},
|
|
53
|
+
required: ["memory"]
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}.freeze
|
|
57
|
+
|
|
58
|
+
attr_reader :api_key, :config
|
|
59
|
+
|
|
60
|
+
# @param api_key [String] Supermemory API key
|
|
61
|
+
# @param config [Hash] Configuration options
|
|
62
|
+
# @option config [String] :container_tag Container tag for scoping memories
|
|
63
|
+
# @option config [String] :base_url Custom API endpoint
|
|
64
|
+
def initialize(api_key:, config: {})
|
|
65
|
+
@api_key = api_key
|
|
66
|
+
@config = config
|
|
67
|
+
@client = Supermemory::Client.new(
|
|
68
|
+
api_key: api_key,
|
|
69
|
+
base_url: config[:base_url]
|
|
70
|
+
)
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
# Get tool definitions for OpenAI function calling
|
|
74
|
+
# @return [Array<Hash>]
|
|
75
|
+
def get_tool_definitions
|
|
76
|
+
[SEARCH_MEMORIES_TOOL, ADD_MEMORY_TOOL]
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
# Search memories
|
|
80
|
+
# @param information_to_get [String] Query string
|
|
81
|
+
# @param limit [Integer] Max results
|
|
82
|
+
# @param include_full_docs [Boolean]
|
|
83
|
+
# @return [Hash]
|
|
84
|
+
def search_memories(information_to_get:, limit: 10, include_full_docs: false)
|
|
85
|
+
params = { q: information_to_get, limit: limit }
|
|
86
|
+
params[:container_tag] = config[:container_tag] if config[:container_tag]
|
|
87
|
+
|
|
88
|
+
if include_full_docs
|
|
89
|
+
@client.search.documents(q: information_to_get, limit: limit,
|
|
90
|
+
include_full_docs: true)
|
|
91
|
+
else
|
|
92
|
+
@client.search.memories(**params)
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
# Add a memory
|
|
97
|
+
# @param memory [String] Content to remember
|
|
98
|
+
# @return [Hash]
|
|
99
|
+
def add_memory(memory:)
|
|
100
|
+
options = {}
|
|
101
|
+
options[:container_tag] = config[:container_tag] if config[:container_tag]
|
|
102
|
+
@client.add(content: memory, **options)
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
# Execute a single tool call from an OpenAI response
|
|
106
|
+
# @param tool_call [Hash] Tool call from OpenAI response
|
|
107
|
+
# @return [Hash] { tool_call_id:, role: "tool", name:, content: }
|
|
108
|
+
def execute_tool_call(tool_call)
|
|
109
|
+
function_name = tool_call.dig("function", "name")
|
|
110
|
+
arguments = JSON.parse(tool_call.dig("function", "arguments"), symbolize_names: true)
|
|
111
|
+
|
|
112
|
+
result = case function_name
|
|
113
|
+
when "searchMemories"
|
|
114
|
+
search_memories(
|
|
115
|
+
information_to_get: arguments[:information_to_get],
|
|
116
|
+
limit: arguments[:limit] || 10
|
|
117
|
+
)
|
|
118
|
+
when "addMemory"
|
|
119
|
+
add_memory(memory: arguments[:memory])
|
|
120
|
+
else
|
|
121
|
+
{ error: "Unknown tool: #{function_name}" }
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
{
|
|
125
|
+
tool_call_id: tool_call["id"],
|
|
126
|
+
role: "tool",
|
|
127
|
+
name: function_name,
|
|
128
|
+
content: result.to_json
|
|
129
|
+
}
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
# Execute memory tool calls from an OpenAI response
|
|
134
|
+
# @param api_key [String] Supermemory API key
|
|
135
|
+
# @param tool_calls [Array<Hash>] Tool calls from OpenAI response
|
|
136
|
+
# @param config [Hash] Configuration options
|
|
137
|
+
# @return [Array<Hash>] Tool result messages
|
|
138
|
+
def self.execute_memory_tool_calls(api_key:, tool_calls:, config: {})
|
|
139
|
+
tools = SupermemoryTools.new(api_key: api_key, config: config)
|
|
140
|
+
memory_tool_names = %w[searchMemories addMemory]
|
|
141
|
+
|
|
142
|
+
tool_calls.select { |tc| memory_tool_names.include?(tc.dig("function", "name")) }
|
|
143
|
+
.map { |tc| tools.execute_tool_call(tc) }
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
# Wrap an OpenAI client to automatically inject memories into system prompts
|
|
147
|
+
#
|
|
148
|
+
# @param openai_client [OpenAI::Client] A ruby-openai client instance
|
|
149
|
+
# @param user_id [String] User identifier (used as container_tag)
|
|
150
|
+
# @param options [Hash] Configuration
|
|
151
|
+
# @option options [String] :mode "profile", "query", or "full" (default: "full")
|
|
152
|
+
# @option options [String] :add_memory "always" or "never" (default: "always")
|
|
153
|
+
# @option options [Boolean] :verbose Enable debug logging (default: false)
|
|
154
|
+
# @option options [String] :base_url Custom API endpoint
|
|
155
|
+
# @return [WrappedClient]
|
|
156
|
+
def self.with_supermemory(openai_client, user_id, options = {})
|
|
157
|
+
WrappedClient.new(openai_client, user_id, options)
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
# A wrapper around OpenAI::Client that auto-injects memories
|
|
161
|
+
class WrappedClient
|
|
162
|
+
attr_reader :openai_client
|
|
163
|
+
|
|
164
|
+
def initialize(openai_client, user_id, options = {})
|
|
165
|
+
@openai_client = openai_client
|
|
166
|
+
@user_id = user_id
|
|
167
|
+
@mode = options.fetch(:mode, "full")
|
|
168
|
+
@add_memory = options.fetch(:add_memory, "always")
|
|
169
|
+
@verbose = options.fetch(:verbose, false)
|
|
170
|
+
@supermemory = Supermemory::Client.new(
|
|
171
|
+
api_key: options[:api_key] || ENV.fetch("SUPERMEMORY_API_KEY", nil),
|
|
172
|
+
base_url: options[:base_url]
|
|
173
|
+
)
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
# Intercept chat calls to inject memory context
|
|
177
|
+
# @param parameters [Hash] OpenAI chat parameters
|
|
178
|
+
# @return [Hash] OpenAI response
|
|
179
|
+
def chat(parameters:)
|
|
180
|
+
messages = parameters[:messages] || parameters["messages"] || []
|
|
181
|
+
user_message = find_last_user_message(messages)
|
|
182
|
+
|
|
183
|
+
if user_message
|
|
184
|
+
context = fetch_memory_context(user_message)
|
|
185
|
+
parameters = inject_context(parameters, context) if context
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
response = @openai_client.chat(parameters: parameters)
|
|
189
|
+
|
|
190
|
+
if @add_memory == "always" && user_message
|
|
191
|
+
store_conversation(user_message, response)
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
response
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Delegate all other methods to the wrapped client
|
|
198
|
+
def method_missing(method, ...)
|
|
199
|
+
@openai_client.send(method, ...)
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
def respond_to_missing?(method, include_private = false)
|
|
203
|
+
@openai_client.respond_to?(method, include_private) || super
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
private
|
|
207
|
+
|
|
208
|
+
def find_last_user_message(messages)
|
|
209
|
+
user_msgs = messages.select { |m| m[:role] == "user" || m["role"] == "user" }
|
|
210
|
+
msg = user_msgs.last
|
|
211
|
+
msg[:content] || msg["content"] if msg
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
def fetch_memory_context(query)
|
|
215
|
+
case @mode
|
|
216
|
+
when "profile"
|
|
217
|
+
result = @supermemory.profile(container_tag: @user_id)
|
|
218
|
+
format_profile(result)
|
|
219
|
+
when "query"
|
|
220
|
+
result = @supermemory.search.memories(q: query, container_tag: @user_id, limit: 5)
|
|
221
|
+
format_search_results(result)
|
|
222
|
+
when "full"
|
|
223
|
+
result = @supermemory.profile(container_tag: @user_id, q: query)
|
|
224
|
+
format_full(result)
|
|
225
|
+
end
|
|
226
|
+
rescue => e
|
|
227
|
+
warn "[Supermemory] Failed to fetch context: #{e.message}" if @verbose
|
|
228
|
+
nil
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
def inject_context(parameters, context)
|
|
232
|
+
parameters = parameters.dup
|
|
233
|
+
messages = (parameters[:messages] || parameters["messages"]).dup
|
|
234
|
+
|
|
235
|
+
system_idx = messages.index { |m| (m[:role] || m["role"]) == "system" }
|
|
236
|
+
if system_idx
|
|
237
|
+
msg = messages[system_idx].dup
|
|
238
|
+
content = msg[:content] || msg["content"]
|
|
239
|
+
msg_key = msg.key?(:content) ? :content : "content"
|
|
240
|
+
msg[msg_key] = "#{content}\n\n#{context}"
|
|
241
|
+
messages[system_idx] = msg
|
|
242
|
+
else
|
|
243
|
+
messages.unshift({ role: "system", content: context })
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
key = parameters.key?(:messages) ? :messages : "messages"
|
|
247
|
+
parameters[key] = messages
|
|
248
|
+
parameters
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
def store_conversation(user_message, response)
|
|
252
|
+
assistant_content = response.dig("choices", 0, "message", "content")
|
|
253
|
+
return unless assistant_content
|
|
254
|
+
|
|
255
|
+
@supermemory.add(
|
|
256
|
+
content: "User: #{user_message}\nAssistant: #{assistant_content}",
|
|
257
|
+
container_tag: @user_id
|
|
258
|
+
)
|
|
259
|
+
rescue => e
|
|
260
|
+
warn "[Supermemory] Failed to store conversation: #{e.message}" if @verbose
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
def format_profile(result)
|
|
264
|
+
static = result.dig("profile", "static") || []
|
|
265
|
+
dynamic = result.dig("profile", "dynamic") || []
|
|
266
|
+
parts = []
|
|
267
|
+
parts << "User Background:\n#{static.join("\n")}" if static.any?
|
|
268
|
+
parts << "Recent Context:\n#{dynamic.join("\n")}" if dynamic.any?
|
|
269
|
+
parts.any? ? "[User Memory Context]\n#{parts.join("\n\n")}" : nil
|
|
270
|
+
end
|
|
271
|
+
|
|
272
|
+
def format_search_results(result)
|
|
273
|
+
results = result["results"] || []
|
|
274
|
+
return nil if results.empty?
|
|
275
|
+
|
|
276
|
+
memories = results.map { |r| r["memory"] || r["chunk"] }.compact
|
|
277
|
+
"[Relevant Memories]\n#{memories.join("\n")}"
|
|
278
|
+
end
|
|
279
|
+
|
|
280
|
+
def format_full(result)
|
|
281
|
+
profile_text = format_profile(result)
|
|
282
|
+
search = result.dig("searchResults", "results") || []
|
|
283
|
+
memory_text = if search.any?
|
|
284
|
+
memories = search.map { |r| r["memory"] || r["chunk"] }.compact
|
|
285
|
+
"Relevant Memories:\n#{memories.join("\n")}" if memories.any?
|
|
286
|
+
end
|
|
287
|
+
|
|
288
|
+
parts = [profile_text, memory_text].compact
|
|
289
|
+
parts.any? ? "[User Memory Context]\n#{parts.join("\n\n")}" : nil
|
|
290
|
+
end
|
|
291
|
+
end
|
|
292
|
+
end
|
|
293
|
+
end
|
|
294
|
+
end
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Supermemory
|
|
4
|
+
module Resources
|
|
5
|
+
class Base
|
|
6
|
+
# @return [Supermemory::Client]
|
|
7
|
+
attr_reader :client
|
|
8
|
+
|
|
9
|
+
# @param client [Supermemory::Client]
|
|
10
|
+
def initialize(client)
|
|
11
|
+
@client = client
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
end
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Supermemory
|
|
4
|
+
module Resources
|
|
5
|
+
class Connections < Base
|
|
6
|
+
PROVIDERS = %w[notion google-drive onedrive gmail github web-crawler s3].freeze
|
|
7
|
+
|
|
8
|
+
# Create a connection (returns OAuth auth URL)
|
|
9
|
+
# @param provider [String] Provider name
|
|
10
|
+
# @param container_tags [Array<String>, nil]
|
|
11
|
+
# @param document_limit [Integer, nil]
|
|
12
|
+
# @param metadata [Hash, nil]
|
|
13
|
+
# @param redirect_url [String, nil]
|
|
14
|
+
# @return [Hash] { "id" => "...", "authLink" => "...", "expiresIn" => "..." }
|
|
15
|
+
def create(provider, container_tags: nil, document_limit: nil, metadata: nil, redirect_url: nil)
|
|
16
|
+
body = {}
|
|
17
|
+
body[:containerTags] = container_tags if container_tags
|
|
18
|
+
body[:documentLimit] = document_limit if document_limit
|
|
19
|
+
body[:metadata] = metadata if metadata
|
|
20
|
+
body[:redirectUrl] = redirect_url if redirect_url
|
|
21
|
+
client.post("/v3/connections/#{provider}", body)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# List connections
|
|
25
|
+
# @param container_tags [Array<String>, nil]
|
|
26
|
+
# @return [Array<Hash>]
|
|
27
|
+
def list(container_tags: nil)
|
|
28
|
+
body = {}
|
|
29
|
+
body[:containerTags] = container_tags if container_tags
|
|
30
|
+
client.post("/v3/connections/list", body)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
# Configure a connection (e.g., GitHub resources)
|
|
34
|
+
# @param connection_id [String]
|
|
35
|
+
# @param resources [Array<Hash>]
|
|
36
|
+
# @return [Hash] { "message" => "...", "success" => true/false }
|
|
37
|
+
def configure(connection_id, resources:)
|
|
38
|
+
client.post("/v3/connections/#{connection_id}/configure", { resources: resources })
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Get connection by ID
|
|
42
|
+
# @param connection_id [String]
|
|
43
|
+
# @return [Hash]
|
|
44
|
+
def get_by_id(connection_id)
|
|
45
|
+
client.get("/v3/connections/#{connection_id}")
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# Get connection by provider and tags
|
|
49
|
+
# @param provider [String]
|
|
50
|
+
# @param container_tags [Array<String>]
|
|
51
|
+
# @return [Hash]
|
|
52
|
+
def get_by_tag(provider, container_tags:)
|
|
53
|
+
client.post("/v3/connections/#{provider}/connection", { containerTags: container_tags })
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Delete connection by ID
|
|
57
|
+
# @param connection_id [String]
|
|
58
|
+
# @return [Hash]
|
|
59
|
+
def delete_by_id(connection_id)
|
|
60
|
+
client.delete("/v3/connections/#{connection_id}")
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
# Delete connection by provider
|
|
64
|
+
# @param provider [String]
|
|
65
|
+
# @param container_tags [Array<String>]
|
|
66
|
+
# @return [Hash]
|
|
67
|
+
def delete_by_provider(provider, container_tags:)
|
|
68
|
+
client.delete("/v3/connections/#{provider}", { containerTags: container_tags })
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
# Trigger manual import/sync
|
|
72
|
+
# @param provider [String]
|
|
73
|
+
# @param container_tags [Array<String>, nil]
|
|
74
|
+
# @return [String]
|
|
75
|
+
def import(provider, container_tags: nil)
|
|
76
|
+
body = {}
|
|
77
|
+
body[:containerTags] = container_tags if container_tags
|
|
78
|
+
client.post("/v3/connections/#{provider}/import", body)
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
# List documents for a connection
|
|
82
|
+
# @param provider [String]
|
|
83
|
+
# @param container_tags [Array<String>, nil]
|
|
84
|
+
# @return [Array<Hash>]
|
|
85
|
+
def list_documents(provider, container_tags: nil)
|
|
86
|
+
body = {}
|
|
87
|
+
body[:containerTags] = container_tags if container_tags
|
|
88
|
+
client.post("/v3/connections/#{provider}/documents", body)
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Get available resources for a connection
|
|
92
|
+
# @param connection_id [String]
|
|
93
|
+
# @param page [Integer, nil]
|
|
94
|
+
# @param per_page [Integer, nil]
|
|
95
|
+
# @return [Hash] { "resources" => [...], "total_count" => ... }
|
|
96
|
+
def resources(connection_id, page: nil, per_page: nil)
|
|
97
|
+
params = {}
|
|
98
|
+
params[:page] = page if page
|
|
99
|
+
params[:per_page] = per_page if per_page
|
|
100
|
+
client.get("/v3/connections/#{connection_id}/resources", params)
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
end
|