@cybermem/mcp 0.1.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +2 -46
  2. package/dist/commands/deploy.js +230 -0
  3. package/dist/commands/init.js +65 -0
  4. package/dist/index.js +198 -90
  5. package/dist/templates/ansible/inventory/hosts.ini +3 -0
  6. package/dist/templates/ansible/playbooks/deploy-cybermem.yml +71 -0
  7. package/dist/templates/ansible/playbooks/stop-cybermem.yml +17 -0
  8. package/dist/templates/charts/cybermem/Chart.yaml +6 -0
  9. package/dist/templates/charts/cybermem/templates/dashboard-deployment.yaml +29 -0
  10. package/dist/templates/charts/cybermem/templates/dashboard-service.yaml +20 -0
  11. package/dist/templates/charts/cybermem/templates/openmemory-deployment.yaml +40 -0
  12. package/dist/templates/charts/cybermem/templates/openmemory-pvc.yaml +10 -0
  13. package/dist/templates/charts/cybermem/templates/openmemory-service.yaml +13 -0
  14. package/dist/templates/charts/cybermem/values-vps.yaml +18 -0
  15. package/dist/templates/charts/cybermem/values.yaml +42 -0
  16. package/dist/templates/docker-compose.yml +219 -0
  17. package/dist/templates/envs/local.example +27 -0
  18. package/dist/templates/envs/rpi.example +27 -0
  19. package/dist/templates/envs/vps.example +25 -0
  20. package/dist/templates/monitoring/db_exporter/Dockerfile +19 -0
  21. package/dist/templates/monitoring/db_exporter/exporter.py +313 -0
  22. package/dist/templates/monitoring/db_exporter/requirements.txt +2 -0
  23. package/dist/templates/monitoring/grafana/dashboards/cybermem.json +1088 -0
  24. package/dist/templates/monitoring/grafana/provisioning/dashboards/default.yml +12 -0
  25. package/dist/templates/monitoring/grafana/provisioning/datasources/prometheus.yml +9 -0
  26. package/dist/templates/monitoring/log_exporter/Dockerfile +13 -0
  27. package/dist/templates/monitoring/log_exporter/exporter.py +274 -0
  28. package/dist/templates/monitoring/log_exporter/requirements.txt +1 -0
  29. package/dist/templates/monitoring/postgres_exporter/queries.yml +22 -0
  30. package/dist/templates/monitoring/prometheus/prometheus.yml +22 -0
  31. package/dist/templates/monitoring/traefik/traefik.yml +32 -0
  32. package/dist/templates/monitoring/vector/vector.toml/vector.yaml +77 -0
  33. package/dist/templates/monitoring/vector/vector.yaml +106 -0
  34. package/package.json +33 -10
  35. package/templates/ansible/inventory/hosts.ini +3 -0
  36. package/templates/ansible/playbooks/deploy-cybermem.yml +71 -0
  37. package/templates/ansible/playbooks/stop-cybermem.yml +17 -0
  38. package/templates/charts/cybermem/Chart.yaml +6 -0
  39. package/templates/charts/cybermem/templates/dashboard-deployment.yaml +29 -0
  40. package/templates/charts/cybermem/templates/dashboard-service.yaml +20 -0
  41. package/templates/charts/cybermem/templates/openmemory-deployment.yaml +40 -0
  42. package/templates/charts/cybermem/templates/openmemory-pvc.yaml +10 -0
  43. package/templates/charts/cybermem/templates/openmemory-service.yaml +13 -0
  44. package/templates/charts/cybermem/values-vps.yaml +18 -0
  45. package/templates/charts/cybermem/values.yaml +42 -0
  46. package/templates/docker-compose.yml +219 -0
  47. package/templates/envs/local.example +27 -0
  48. package/templates/envs/rpi.example +27 -0
  49. package/templates/envs/vps.example +25 -0
  50. package/templates/monitoring/db_exporter/Dockerfile +19 -0
  51. package/templates/monitoring/db_exporter/exporter.py +313 -0
  52. package/templates/monitoring/db_exporter/requirements.txt +2 -0
  53. package/templates/monitoring/grafana/dashboards/cybermem.json +1088 -0
  54. package/templates/monitoring/grafana/provisioning/dashboards/default.yml +12 -0
  55. package/templates/monitoring/grafana/provisioning/datasources/prometheus.yml +9 -0
  56. package/templates/monitoring/log_exporter/Dockerfile +13 -0
  57. package/templates/monitoring/log_exporter/exporter.py +274 -0
  58. package/templates/monitoring/log_exporter/requirements.txt +1 -0
  59. package/templates/monitoring/postgres_exporter/queries.yml +22 -0
  60. package/templates/monitoring/prometheus/prometheus.yml +22 -0
  61. package/templates/monitoring/traefik/traefik.yml +32 -0
  62. package/templates/monitoring/vector/vector.toml/vector.yaml +77 -0
  63. package/templates/monitoring/vector/vector.yaml +106 -0
  64. package/requirements.txt +0 -2
  65. package/server.py +0 -347
  66. package/src/index.ts +0 -114
  67. package/test_mcp.py +0 -111
  68. package/tsconfig.json +0 -14
@@ -0,0 +1,12 @@
1
+ apiVersion: 1
2
+
3
+ providers:
4
+ - name: 'Default'
5
+ orgId: 1
6
+ folder: ''
7
+ type: file
8
+ disableDeletion: false
9
+ updateIntervalSeconds: 10
10
+ allowUiUpdates: true
11
+ options:
12
+ path: /etc/grafana/provisioning/dashboards
@@ -0,0 +1,9 @@
1
+ apiVersion: 1
2
+
3
+ datasources:
4
+ - name: Prometheus
5
+ type: prometheus
6
+ access: proxy
7
+ url: http://prometheus:9090
8
+ isDefault: true
9
+ editable: false
@@ -0,0 +1,13 @@
1
+ FROM python:3.11-alpine
2
+
3
+ WORKDIR /app
4
+
5
+ COPY requirements.txt .
6
+ RUN pip install --no-cache-dir -r requirements.txt
7
+
8
+ COPY exporter.py .
9
+ RUN chmod +x exporter.py
10
+
11
+ EXPOSE 8001
12
+
13
+ CMD ["python", "-u", "exporter.py"]
@@ -0,0 +1,274 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Traefik Access Log Exporter for Prometheus
4
+ Parses Traefik JSON access logs and exports per-request metrics
5
+ Writes aggregates to cybermem_stats table for persistence
6
+ """
7
+
8
+ import json
9
+ import os
10
+ import sqlite3
11
+ import time
12
+ from pathlib import Path
13
+
14
+ # Configuration
15
+ LOG_FILE = os.environ.get("LOG_FILE", "/var/log/traefik/access.log")
16
+ EXPORTER_PORT = int(os.environ.get("EXPORTER_PORT", "8001"))
17
+ SCRAPE_INTERVAL = int(os.environ.get("SCRAPE_INTERVAL", "5"))
18
+ DB_PATH = os.environ.get("DB_PATH", "/data/openmemory.sqlite")
19
+
20
+ # No Prometheus metrics here - we write to DB instead
21
+ # db_exporter will read from cybermem_stats and export as Gauge
22
+
23
+
24
+ def init_db():
25
+ """Initialize cybermem_stats and cybermem_access_log tables in OpenMemory database"""
26
+ conn = sqlite3.connect(DB_PATH)
27
+ cursor = conn.cursor()
28
+
29
+ # Create aggregate stats table
30
+ cursor.execute("""
31
+ CREATE TABLE IF NOT EXISTS cybermem_stats (
32
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
33
+ client_name TEXT NOT NULL,
34
+ operation TEXT NOT NULL,
35
+ count INTEGER DEFAULT 0,
36
+ errors INTEGER DEFAULT 0,
37
+ last_updated INTEGER NOT NULL,
38
+ UNIQUE(client_name, operation)
39
+ )
40
+ """)
41
+
42
+ cursor.execute("""
43
+ CREATE INDEX IF NOT EXISTS idx_cybermem_stats_client_op
44
+ ON cybermem_stats(client_name, operation)
45
+ """)
46
+
47
+ # Create access log table for detailed request history
48
+ cursor.execute("""
49
+ CREATE TABLE IF NOT EXISTS cybermem_access_log (
50
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
51
+ timestamp INTEGER NOT NULL,
52
+ client_name TEXT NOT NULL,
53
+ client_version TEXT,
54
+ method TEXT NOT NULL,
55
+ endpoint TEXT NOT NULL,
56
+ operation TEXT NOT NULL,
57
+ status TEXT NOT NULL,
58
+ is_error INTEGER DEFAULT 0
59
+ )
60
+ """)
61
+
62
+ cursor.execute("""
63
+ CREATE INDEX IF NOT EXISTS idx_cybermem_access_log_timestamp
64
+ ON cybermem_access_log(timestamp DESC)
65
+ """)
66
+
67
+ cursor.execute("""
68
+ CREATE INDEX IF NOT EXISTS idx_cybermem_access_log_client
69
+ ON cybermem_access_log(client_name, timestamp DESC)
70
+ """)
71
+
72
+ conn.commit()
73
+ conn.close()
74
+ print(f"[DB] Initialized cybermem tables in {DB_PATH}")
75
+
76
+
77
+ def increment_stat(client_name: str, operation: str, is_error: bool = False):
78
+ """Increment counter in cybermem_stats table"""
79
+ try:
80
+ conn = sqlite3.connect(DB_PATH)
81
+ cursor = conn.cursor()
82
+
83
+ ts = int(time.time() * 1000)
84
+
85
+ # Upsert: increment if exists, insert if not
86
+ if is_error:
87
+ cursor.execute(
88
+ """
89
+ INSERT INTO cybermem_stats (client_name, operation, count, errors, last_updated)
90
+ VALUES (?, ?, 1, 1, ?)
91
+ ON CONFLICT(client_name, operation)
92
+ DO UPDATE SET
93
+ count = count + 1,
94
+ errors = errors + 1,
95
+ last_updated = ?
96
+ """,
97
+ [client_name, operation, ts, ts],
98
+ )
99
+ else:
100
+ cursor.execute(
101
+ """
102
+ INSERT INTO cybermem_stats (client_name, operation, count, errors, last_updated)
103
+ VALUES (?, ?, 1, 0, ?)
104
+ ON CONFLICT(client_name, operation)
105
+ DO UPDATE SET
106
+ count = count + 1,
107
+ last_updated = ?
108
+ """,
109
+ [client_name, operation, ts, ts],
110
+ )
111
+
112
+ conn.commit()
113
+ conn.close()
114
+ except Exception as e:
115
+ print(f"[DB] Error updating stats: {e}")
116
+
117
+
118
+ def log_access(
119
+ client_name: str,
120
+ client_version: str,
121
+ method: str,
122
+ endpoint: str,
123
+ operation: str,
124
+ status: str,
125
+ is_error: bool,
126
+ ):
127
+ """Log individual request to access_log table"""
128
+ try:
129
+ conn = sqlite3.connect(DB_PATH)
130
+ cursor = conn.cursor()
131
+
132
+ ts = int(time.time() * 1000)
133
+
134
+ cursor.execute(
135
+ """
136
+ INSERT INTO cybermem_access_log (timestamp, client_name, client_version, method, endpoint, operation, status, is_error)
137
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
138
+ """,
139
+ [
140
+ ts,
141
+ client_name,
142
+ client_version,
143
+ method,
144
+ endpoint,
145
+ operation,
146
+ status,
147
+ 1 if is_error else 0,
148
+ ],
149
+ )
150
+
151
+ conn.commit()
152
+ conn.close()
153
+ except Exception as e:
154
+ print(f"[DB] Error logging access: {e}")
155
+
156
+
157
+ def tail_log_file(filepath, poll_interval=1):
158
+ """Tail a log file, yielding new lines as they appear"""
159
+ path = Path(filepath)
160
+
161
+ # Wait for file to exist
162
+ while not path.exists():
163
+ print(f"Waiting for log file: {filepath}")
164
+ time.sleep(poll_interval)
165
+
166
+ with open(filepath, "r") as f:
167
+ # Start from the end of file
168
+ f.seek(0, 2)
169
+
170
+ while True:
171
+ line = f.readline()
172
+ if line:
173
+ yield line
174
+ else:
175
+ time.sleep(poll_interval)
176
+
177
+
178
+ def parse_and_export():
179
+ """Parse Traefik access logs and export metrics"""
180
+ print(f"Starting Traefik Log Exporter on port {EXPORTER_PORT}")
181
+ print(f"Watching log file: {LOG_FILE}")
182
+
183
+ for line in tail_log_file(LOG_FILE, poll_interval=SCRAPE_INTERVAL):
184
+ try:
185
+ data = json.loads(line.strip())
186
+
187
+ # Extract relevant fields
188
+ method = data.get("RequestMethod", "UNKNOWN")
189
+ path = data.get("RequestPath", "/unknown")
190
+ status = str(data.get("DownstreamStatus", 0))
191
+
192
+ # Extract MCP client info from custom headers
193
+ client_name = data.get("request_X-Client-Name", "unknown")
194
+ client_version = data.get("request_X-Client-Version", "unknown")
195
+
196
+ # Fallback to User-Agent if client_name is unknown
197
+ if client_name == "unknown":
198
+ ua = data.get("request_User-Agent", "")
199
+ if ua and ua != "-":
200
+ # Simple heuristic: take the first part before '/' or space
201
+ # e.g. "curl/7.64.1" -> "curl", "Mozilla/5.0" -> "Mozilla"
202
+ parts = ua.split("/")
203
+ if len(parts) > 0:
204
+ potential_name = parts[0].split(" ")[0].strip()
205
+ if potential_name:
206
+ client_name = potential_name
207
+
208
+ # Remove query params first
209
+ endpoint = path.split("?")[0]
210
+
211
+ # Determine operation type from endpoint BEFORE normalization
212
+ if endpoint == "/memory/add":
213
+ operation = "create"
214
+ elif endpoint == "/memory/query":
215
+ operation = "read"
216
+ elif endpoint.startswith("/memory/") and method == "PATCH":
217
+ operation = "update"
218
+ elif endpoint.startswith("/memory/") and method == "DELETE":
219
+ operation = "delete"
220
+ elif endpoint.startswith("/mcp"):
221
+ operation = "create" # MCP operations are typically POST
222
+ else:
223
+ operation = "other"
224
+
225
+ # NOW normalize endpoint to remove IDs (e.g., /memory/123 -> /memory/:id)
226
+ if (
227
+ endpoint.startswith("/memory/")
228
+ and len(endpoint) > 8
229
+ and operation in ["update", "delete"]
230
+ ):
231
+ endpoint = "/memory/:id"
232
+
233
+ # Only track requests to OpenMemory API (/memory/* and /mcp endpoints)
234
+ # Exclude /health checks - they pollute Top/Last Reader metrics
235
+ if endpoint.startswith("/memory") or endpoint.startswith("/mcp"):
236
+ # Check if it's an error (4xx or 5xx)
237
+ is_error = status.startswith("4") or status.startswith("5")
238
+
239
+ # Write aggregate stats
240
+ increment_stat(client_name, operation, is_error)
241
+
242
+ # Log individual request to access_log
243
+ log_access(
244
+ client_name,
245
+ client_version,
246
+ method,
247
+ endpoint,
248
+ operation,
249
+ status,
250
+ is_error,
251
+ )
252
+
253
+ print(
254
+ f"[{time.strftime('%H:%M:%S')}] {client_name}/{client_version} {method} {endpoint} ({operation}) -> {status}"
255
+ )
256
+
257
+ except json.JSONDecodeError:
258
+ # Skip invalid JSON lines
259
+ continue
260
+ except Exception as e:
261
+ print(f"Error processing log line: {e}")
262
+ continue
263
+
264
+
265
+ if __name__ == "__main__":
266
+ # Initialize database table
267
+ init_db()
268
+
269
+ # Note: No Prometheus HTTP server here
270
+ # Metrics are written to DB and exported by db_exporter
271
+ print(f"Writing metrics to database: {DB_PATH}")
272
+
273
+ # Start parsing logs
274
+ parse_and_export()
@@ -0,0 +1 @@
1
+ prometheus-client==0.19.0
@@ -0,0 +1,22 @@
1
+ pg_openmemory_stored_memories:
2
+ query: "SELECT user_id as client, COUNT(*) as count FROM openmemory_memories GROUP BY user_id"
3
+ metrics:
4
+ - client:
5
+ usage: "LABEL"
6
+ description: "Client identifier"
7
+ - count:
8
+ usage: "GAUGE"
9
+ description: "Total memories stored per client"
10
+
11
+ pg_openmemory_stored_memories_by_sector:
12
+ query: "SELECT user_id as client, primary_sector as sector, COUNT(*) as count FROM openmemory_memories GROUP BY user_id, primary_sector"
13
+ metrics:
14
+ - client:
15
+ usage: "LABEL"
16
+ description: "Client identifier"
17
+ - sector:
18
+ usage: "LABEL"
19
+ description: "Memory sector"
20
+ - count:
21
+ usage: "GAUGE"
22
+ description: "Memories stored per client per sector"
@@ -0,0 +1,22 @@
1
+ global:
2
+ scrape_interval: 15s
3
+
4
+ scrape_configs:
5
+ # Main metrics source: Database Exporter (memory state)
6
+ - job_name: "db-exporter"
7
+ static_configs:
8
+ - targets: ["db-exporter:8000"]
9
+
10
+ # Request logs from Traefik access log
11
+ # Request logs from Traefik access log -> written to DB -> exported by db-exporter
12
+ # - job_name: 'log-exporter'
13
+ # static_configs:
14
+ # - targets: ['log-exporter:8001']
15
+
16
+ - job_name: "postgres"
17
+ static_configs:
18
+ - targets: ["postgres_exporter:9187"]
19
+
20
+ - job_name: "prometheus"
21
+ static_configs:
22
+ - targets: ["localhost:9090"]
@@ -0,0 +1,32 @@
1
+ log:
2
+ level: INFO
3
+
4
+ accessLog:
5
+ filePath: "/var/log/traefik/access.log"
6
+ format: json
7
+ fields:
8
+ defaultMode: keep
9
+ headers:
10
+ defaultMode: keep
11
+ names:
12
+ Authorization: keep
13
+ Content-Type: keep
14
+ User-Agent: keep
15
+ X-Client-Name: keep
16
+ X-Client-Version: keep
17
+
18
+ api:
19
+ dashboard: true
20
+ insecure: true
21
+
22
+ providers:
23
+ docker:
24
+ endpoint: "unix:///var/run/docker.sock"
25
+ exposedByDefault: false
26
+ file:
27
+ directory: /etc/traefik/dynamic
28
+ watch: true
29
+
30
+ entryPoints:
31
+ web:
32
+ address: ":8626"
@@ -0,0 +1,77 @@
1
+ [sources.traefik_logs]
2
+ type = "file"
3
+ include = ["/var/log/traefik/access.log"]
4
+ read_from = "beginning"
5
+
6
+ [transforms.parse_logs]
7
+ type = "remap"
8
+ inputs = ["traefik_logs"]
9
+ source = '''
10
+ . = parse_json!(.message)
11
+ '''
12
+
13
+ [transforms.extract_metrics]
14
+ type = "remap"
15
+ inputs = ["parse_logs"]
16
+ source = '''
17
+ # Extract client_id from Authorization header (Bearer <client_id>.<secret>)
18
+ # If no auth header or invalid format, use "anonymous"
19
+ auth_header = .request_Authorization
20
+ client_id = "anonymous"
21
+
22
+ if !is_null(auth_header) {
23
+ # Try to capture everything between "Bearer " and the first dot
24
+ parts = split(auth_header, ".")
25
+ if length(parts) > 0 {
26
+ # Remove "Bearer " prefix if present
27
+ token_start = replace(parts[0], "Bearer ", "")
28
+ # Basic validation to ensure it looks like a client ID
29
+ if length(token_start) > 0 {
30
+ client_id = token_start
31
+ }
32
+ }
33
+ }
34
+
35
+ # Standardize endpoint (remove query params, maybe aggregate IDs)
36
+ endpoint = .RequestPath
37
+ method = .RequestMethod
38
+ status = .DownstreamStatus
39
+ duration_seconds = to_float!(.Duration) / 1000000000.0
40
+
41
+ .tags = {
42
+ "client": client_id,
43
+ "method": method,
44
+ "endpoint": endpoint,
45
+ "status": to_string(status)
46
+ }
47
+ .duration = duration_seconds
48
+ '''
49
+
50
+ [transforms.generate_metrics]
51
+ type = "log_to_metric"
52
+ inputs = ["extract_metrics"]
53
+
54
+ [[transforms.generate_metrics.metrics]]
55
+ type = "counter"
56
+ field = "status"
57
+ name = "requests_total"
58
+ namespace = "openmemory"
59
+ tags = ["client", "method", "endpoint", "status"]
60
+
61
+ [[transforms.generate_metrics.metrics]]
62
+ type = "histogram"
63
+ field = "duration"
64
+ name = "request_duration_seconds"
65
+ namespace = "openmemory"
66
+ tags = ["client", "endpoint"]
67
+ buckets = [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0]
68
+
69
+ [sinks.prometheus]
70
+ type = "prometheus_exporter"
71
+ inputs = ["generate_metrics"]
72
+ address = "0.0.0.0:9091"
73
+
74
+ [sinks.console]
75
+ type = "console"
76
+ inputs = ["extract_metrics"]
77
+ encoding.codec = "json"
@@ -0,0 +1,106 @@
1
+ sources:
2
+ traefik_logs:
3
+ type: file
4
+ include:
5
+ - /var/log/traefik/access.log
6
+ read_from: end
7
+
8
+ transforms:
9
+ parse_logs:
10
+ type: remap
11
+ inputs:
12
+ - traefik_logs
13
+ source: |
14
+ . = parse_json!(.message)
15
+
16
+ extract_metrics:
17
+ type: remap
18
+ inputs:
19
+ - parse_logs
20
+ source: |
21
+ auth_header = .request_Authorization
22
+ client_name = "Anonymous"
23
+
24
+ # Extract client from Authorization header and map to human-readable names
25
+ if !is_null(auth_header) {
26
+ parts = split!(auth_header, ".")
27
+ if length(parts) > 0 {
28
+ token_start = replace!(parts[0], "Bearer ", "")
29
+
30
+ # Map client tokens to human-readable names
31
+ if contains(token_start, "claude") || contains(token_start, "anthropic") {
32
+ client_name = "Claude Desktop"
33
+ } else if contains(token_start, "cursor") {
34
+ client_name = "Cursor"
35
+ } else if contains(token_start, "vscode") || contains(token_start, "code") {
36
+ client_name = "Visual Studio Code"
37
+ } else if contains(token_start, "copilot") || contains(token_start, "github") {
38
+ client_name = "GitHub Copilot"
39
+ } else if contains(token_start, "windsurf") {
40
+ client_name = "Windsurf"
41
+ } else if contains(token_start, "cline") {
42
+ client_name = "Cline"
43
+ } else if contains(token_start, "continue") {
44
+ client_name = "Continue"
45
+ } else if contains(token_start, "zed") {
46
+ client_name = "Zed"
47
+ } else if length(token_start) > 0 {
48
+ # For dev/testing, use the token itself
49
+ client_name = token_start
50
+ }
51
+ }
52
+ }
53
+
54
+ endpoint = .RequestPath
55
+ method = .RequestMethod
56
+ status = .DownstreamStatus
57
+ duration_seconds = to_float!(.Duration) / 1000000000.0
58
+
59
+ .tags = {
60
+ "client": client_name,
61
+ "method": method,
62
+ "endpoint": endpoint,
63
+ "status": to_string!(status)
64
+ }
65
+ .duration = duration_seconds
66
+ .count = 1
67
+
68
+ generate_metrics:
69
+ type: log_to_metric
70
+ inputs:
71
+ - extract_metrics
72
+ metrics:
73
+ - type: counter
74
+ field: count
75
+ name: requests_total
76
+ namespace: openmemory
77
+ tags:
78
+ client: "{{ tags.client }}"
79
+ method: "{{ tags.method }}"
80
+ endpoint: "{{ tags.endpoint }}"
81
+ status: "{{ tags.status }}"
82
+
83
+ - type: histogram
84
+ field: duration
85
+ name: request_duration_seconds
86
+ namespace: openmemory
87
+ tags:
88
+ client: "{{ tags.client }}"
89
+ endpoint: "{{ tags.endpoint }}"
90
+
91
+ sinks:
92
+ prometheus:
93
+ type: prometheus_exporter
94
+ inputs:
95
+ - generate_metrics
96
+ address: 0.0.0.0:9091
97
+ default_namespace: openmemory
98
+ flush_period_secs: 5
99
+ suppress_timestamp: true
100
+
101
+ console:
102
+ type: console
103
+ inputs:
104
+ - extract_metrics
105
+ encoding:
106
+ codec: json
package/requirements.txt DELETED
@@ -1,2 +0,0 @@
1
- mcp>=0.9.0
2
- httpx>=0.27.0