@realtimex/folio 0.1.16 → 0.1.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/api/src/middleware/auth.ts +77 -0
  2. package/api/src/routes/chat.ts +7 -1
  3. package/api/src/routes/index.ts +2 -0
  4. package/api/src/routes/ingestions.ts +45 -5
  5. package/api/src/routes/policies.ts +50 -7
  6. package/api/src/routes/stats.ts +9 -5
  7. package/api/src/routes/workspaces.ts +290 -0
  8. package/api/src/services/ChatService.ts +8 -2
  9. package/api/src/services/IngestionService.ts +38 -26
  10. package/api/src/services/PolicyEngine.ts +4 -1
  11. package/api/src/services/PolicyLearningService.ts +31 -6
  12. package/api/src/services/PolicyLoader.ts +44 -25
  13. package/api/src/services/RAGService.ts +52 -12
  14. package/dist/api/src/middleware/auth.js +59 -0
  15. package/dist/api/src/routes/chat.js +1 -1
  16. package/dist/api/src/routes/index.js +2 -0
  17. package/dist/api/src/routes/ingestions.js +45 -8
  18. package/dist/api/src/routes/policies.js +49 -7
  19. package/dist/api/src/routes/stats.js +9 -5
  20. package/dist/api/src/routes/workspaces.js +220 -0
  21. package/dist/api/src/services/ChatService.js +7 -2
  22. package/dist/api/src/services/IngestionService.js +35 -30
  23. package/dist/api/src/services/PolicyEngine.js +2 -1
  24. package/dist/api/src/services/PolicyLearningService.js +28 -6
  25. package/dist/api/src/services/PolicyLoader.js +29 -25
  26. package/dist/api/src/services/RAGService.js +43 -11
  27. package/dist/assets/index-CTn5FcC4.js +113 -0
  28. package/dist/assets/index-Dq9sxoZK.css +1 -0
  29. package/dist/index.html +2 -2
  30. package/package.json +1 -1
  31. package/supabase/functions/workspace-invite/index.ts +110 -0
  32. package/supabase/migrations/20260223000000_initial_foundation.sql +5 -0
  33. package/supabase/migrations/20260224000004_add_avatars_storage.sql +4 -0
  34. package/supabase/migrations/20260224000006_add_policies_table.sql +5 -0
  35. package/supabase/migrations/20260224000008_add_ingestions_table.sql +2 -0
  36. package/supabase/migrations/20260225000000_setup_compatible_mode.sql +17 -4
  37. package/supabase/migrations/20260225000003_add_baseline_configs.sql +4 -3
  38. package/supabase/migrations/20260226000000_add_processing_events.sql +1 -0
  39. package/supabase/migrations/20260226000002_add_dynamic_rag.sql +1 -0
  40. package/supabase/migrations/20260226000005_add_chat_tables.sql +3 -0
  41. package/supabase/migrations/20260228000001_add_policy_match_feedback.sql +4 -0
  42. package/supabase/migrations/20260302064608_add_ingestion_llm_settings_compat.sql +15 -0
  43. package/supabase/migrations/20260303000000_add_workspaces_phase1.sql +459 -0
  44. package/supabase/migrations/20260303010000_add_workspace_management_rpc.sql +310 -0
  45. package/supabase/migrations/20260303020000_workspace_scope_document_chunks.sql +139 -0
  46. package/dist/assets/index-DzN8-j-e.css +0 -1
  47. package/dist/assets/index-dnBz6SWG.js +0 -113
@@ -182,7 +182,14 @@ function buildFromIngestionRow(ingestion) {
182
182
  }
183
183
  export class PolicyLearningService {
184
184
  static async recordManualMatch(opts) {
185
- const { supabase, userId, ingestion, policyId, policyName } = opts;
185
+ const { supabase, userId, workspaceId, ingestion, policyId, policyName } = opts;
186
+ if (!workspaceId) {
187
+ logger.warn("Skipping policy learning feedback: missing workspace context", {
188
+ ingestionId: ingestion.id,
189
+ policyId,
190
+ });
191
+ return;
192
+ }
186
193
  const features = buildFromIngestionRow(ingestion);
187
194
  if (features.tokens.length === 0) {
188
195
  logger.warn("Skipping policy learning feedback: no usable tokens", {
@@ -192,6 +199,7 @@ export class PolicyLearningService {
192
199
  return;
193
200
  }
194
201
  const row = {
202
+ workspace_id: workspaceId,
195
203
  user_id: userId,
196
204
  ingestion_id: ingestion.id,
197
205
  policy_id: policyId,
@@ -201,7 +209,7 @@ export class PolicyLearningService {
201
209
  };
202
210
  const { error } = await supabase
203
211
  .from("policy_match_feedback")
204
- .upsert(row, { onConflict: "user_id,ingestion_id,policy_id" });
212
+ .upsert(row, { onConflict: "workspace_id,ingestion_id,policy_id" });
205
213
  if (error) {
206
214
  logger.error("Failed to save policy match feedback", {
207
215
  ingestionId: ingestion.id,
@@ -217,12 +225,15 @@ export class PolicyLearningService {
217
225
  });
218
226
  }
219
227
  static async getPolicyLearningStats(opts) {
220
- const { supabase, userId } = opts;
228
+ const { supabase, userId, workspaceId } = opts;
229
+ if (!workspaceId) {
230
+ return {};
231
+ }
221
232
  const normalizedPolicyIds = (opts.policyIds ?? []).map((id) => id.trim()).filter(Boolean);
222
233
  let query = supabase
223
234
  .from("policy_match_feedback")
224
235
  .select("policy_id,created_at")
225
- .eq("user_id", userId)
236
+ .eq("workspace_id", workspaceId)
226
237
  .order("created_at", { ascending: false })
227
238
  .limit(5000);
228
239
  if (normalizedPolicyIds.length > 0) {
@@ -251,7 +262,7 @@ export class PolicyLearningService {
251
262
  return stats;
252
263
  }
253
264
  static async resolveLearnedCandidate(opts) {
254
- const { supabase, userId, policyIds, filePath, baselineEntities, documentText } = opts;
265
+ const { supabase, userId, workspaceId, policyIds, filePath, baselineEntities, documentText } = opts;
255
266
  if (policyIds.length === 0) {
256
267
  return {
257
268
  candidate: null,
@@ -263,6 +274,17 @@ export class PolicyLearningService {
263
274
  },
264
275
  };
265
276
  }
277
+ if (!workspaceId) {
278
+ return {
279
+ candidate: null,
280
+ diagnostics: {
281
+ reason: "no_feedback_samples",
282
+ evaluatedPolicies: policyIds.length,
283
+ evaluatedSamples: 0,
284
+ topCandidates: [],
285
+ },
286
+ };
287
+ }
266
288
  const docFeatures = buildFromDocInput({ filePath, baselineEntities, documentText });
267
289
  if (docFeatures.tokens.length === 0) {
268
290
  return {
@@ -278,7 +300,7 @@ export class PolicyLearningService {
278
300
  const { data, error } = await supabase
279
301
  .from("policy_match_feedback")
280
302
  .select("policy_id,policy_name,features")
281
- .eq("user_id", userId)
303
+ .eq("workspace_id", workspaceId)
282
304
  .in("policy_id", policyIds)
283
305
  .order("created_at", { ascending: false })
284
306
  .limit(400);
@@ -1,7 +1,7 @@
1
1
  import { createLogger } from "../utils/logger.js";
2
2
  const logger = createLogger("PolicyLoader");
3
3
  // ─── Cache ───────────────────────────────────────────────────────────────────
4
- // Keyed by user_id so one user's policies never bleed into another's.
4
+ // Keyed by workspace_id so one workspace's policies never bleed into another's.
5
5
  const _cache = new Map();
6
6
  const CACHE_TTL_MS = 30_000;
7
7
  // ─── Row → Policy ────────────────────────────────────────────────────────────
@@ -22,19 +22,21 @@ function rowToPolicy(row) {
22
22
  // ─── PolicyLoader ────────────────────────────────────────────────────────────
23
23
  export class PolicyLoader {
24
24
  /**
25
- * Load all policies for the authenticated user from Supabase.
25
+ * Load all policies for the active workspace from Supabase.
26
26
  * Returns [] if no Supabase client is provided (unauthenticated state).
27
27
  */
28
- static async load(forceRefresh = false, supabase) {
28
+ static async load(forceRefresh = false, supabase, workspaceId) {
29
29
  if (!supabase) {
30
30
  logger.info("No Supabase client — policies require authentication");
31
31
  return [];
32
32
  }
33
- // Resolve the user ID to scope the cache correctly
34
- const { data: { user } } = await supabase.auth.getUser();
35
- const userId = user?.id ?? "anonymous";
33
+ const resolvedWorkspaceId = (workspaceId ?? "").trim();
34
+ if (!resolvedWorkspaceId) {
35
+ logger.warn("No workspace context returning empty policy set");
36
+ return [];
37
+ }
36
38
  const now = Date.now();
37
- const cached = _cache.get(userId);
39
+ const cached = _cache.get(resolvedWorkspaceId);
38
40
  if (!forceRefresh && cached && now - cached.loadedAt < CACHE_TTL_MS) {
39
41
  return cached.policies;
40
42
  }
@@ -42,13 +44,14 @@ export class PolicyLoader {
42
44
  const { data, error } = await supabase
43
45
  .from("policies")
44
46
  .select("*")
47
+ .eq("workspace_id", resolvedWorkspaceId)
45
48
  .eq("enabled", true)
46
49
  .order("priority", { ascending: false });
47
50
  if (error)
48
51
  throw error;
49
52
  const policies = (data ?? []).map(rowToPolicy);
50
- _cache.set(userId, { policies, loadedAt: Date.now() });
51
- logger.info(`Loaded ${policies.length} policies from DB for user ${userId}`);
53
+ _cache.set(resolvedWorkspaceId, { policies, loadedAt: Date.now() });
54
+ logger.info(`Loaded ${policies.length} policies from DB for workspace ${resolvedWorkspaceId}`);
52
55
  return policies;
53
56
  }
54
57
  catch (err) {
@@ -56,9 +59,9 @@ export class PolicyLoader {
56
59
  return [];
57
60
  }
58
61
  }
59
- static invalidateCache(userId) {
60
- if (userId) {
61
- _cache.delete(userId);
62
+ static invalidateCache(workspaceId) {
63
+ if (workspaceId) {
64
+ _cache.delete(workspaceId);
62
65
  }
63
66
  else {
64
67
  _cache.clear();
@@ -78,11 +81,12 @@ export class PolicyLoader {
78
81
  * Save (upsert) a policy to Supabase.
79
82
  * Throws if no Supabase client is available.
80
83
  */
81
- static async save(policy, supabase, userId) {
82
- if (!supabase || !userId) {
84
+ static async save(policy, supabase, userId, workspaceId) {
85
+ if (!supabase || !userId || !workspaceId) {
83
86
  throw new Error("Authentication required to save policies");
84
87
  }
85
88
  const row = {
89
+ workspace_id: workspaceId,
86
90
  user_id: userId,
87
91
  policy_id: policy.metadata.id,
88
92
  api_version: policy.apiVersion,
@@ -94,25 +98,25 @@ export class PolicyLoader {
94
98
  };
95
99
  const { error } = await supabase
96
100
  .from("policies")
97
- .upsert(row, { onConflict: "user_id,policy_id" });
101
+ .upsert(row, { onConflict: "workspace_id,policy_id" });
98
102
  if (error)
99
103
  throw new Error(`Failed to save policy: ${error.message}`);
100
- this.invalidateCache();
104
+ this.invalidateCache(workspaceId);
101
105
  logger.info(`Saved policy to DB: ${policy.metadata.id}`);
102
106
  return `db:policies/${policy.metadata.id}`;
103
107
  }
104
108
  /**
105
109
  * Partially update a policy (enabled toggle, name, description, tags, priority).
106
110
  */
107
- static async patch(policyId, patch, supabase, userId) {
108
- if (!supabase || !userId) {
111
+ static async patch(policyId, patch, supabase, userId, workspaceId) {
112
+ if (!supabase || !userId || !workspaceId) {
109
113
  throw new Error("Authentication required to update policies");
110
114
  }
111
115
  const { data: existing, error: fetchErr } = await supabase
112
116
  .from("policies")
113
117
  .select("metadata, priority, enabled")
114
118
  .eq("policy_id", policyId)
115
- .eq("user_id", userId)
119
+ .eq("workspace_id", workspaceId)
116
120
  .single();
117
121
  if (fetchErr || !existing)
118
122
  throw new Error("Policy not found");
@@ -131,10 +135,10 @@ export class PolicyLoader {
131
135
  priority: patch.priority ?? existing.priority,
132
136
  })
133
137
  .eq("policy_id", policyId)
134
- .eq("user_id", userId);
138
+ .eq("workspace_id", workspaceId);
135
139
  if (error)
136
140
  throw new Error(`Failed to patch policy: ${error.message}`);
137
- this.invalidateCache();
141
+ this.invalidateCache(workspaceId);
138
142
  logger.info(`Patched policy: ${policyId}`);
139
143
  return true;
140
144
  }
@@ -142,18 +146,18 @@ export class PolicyLoader {
142
146
  * Delete a policy by ID from Supabase.
143
147
  * Throws if no Supabase client is available.
144
148
  */
145
- static async delete(policyId, supabase, userId) {
146
- if (!supabase || !userId) {
149
+ static async delete(policyId, supabase, userId, workspaceId) {
150
+ if (!supabase || !userId || !workspaceId) {
147
151
  throw new Error("Authentication required to delete policies");
148
152
  }
149
153
  const { error, count } = await supabase
150
154
  .from("policies")
151
155
  .delete({ count: "exact" })
152
156
  .eq("policy_id", policyId)
153
- .eq("user_id", userId);
157
+ .eq("workspace_id", workspaceId);
154
158
  if (error)
155
159
  throw new Error(`Failed to delete policy: ${error.message}`);
156
- this.invalidateCache();
160
+ this.invalidateCache(workspaceId);
157
161
  return (count ?? 0) > 0;
158
162
  }
159
163
  }
@@ -89,7 +89,7 @@ export class RAGService {
89
89
  /**
90
90
  * Process an ingested document's raw text: chunk it, embed it, and store in DB.
91
91
  */
92
- static async chunkAndEmbed(ingestionId, userId, rawText, supabase, settings) {
92
+ static async chunkAndEmbed(ingestionId, userId, rawText, supabase, settings, workspaceId) {
93
93
  if (/^\[VLM_(IMAGE|PDF)_DATA:/.test(rawText)) {
94
94
  logger.info(`Skipping chunking and embedding for VLM base64 multimodal data (Ingestion: ${ingestionId})`);
95
95
  return;
@@ -100,6 +100,21 @@ export class RAGService {
100
100
  return;
101
101
  }
102
102
  const resolvedModel = await this.resolveEmbeddingModel(settings || {});
103
+ let resolvedWorkspaceId = (workspaceId ?? "").trim();
104
+ if (!resolvedWorkspaceId) {
105
+ const { data: ingestionRow, error: ingestionLookupError } = await supabase
106
+ .from("ingestions")
107
+ .select("workspace_id")
108
+ .eq("id", ingestionId)
109
+ .maybeSingle();
110
+ if (ingestionLookupError) {
111
+ throw new Error(`Failed to resolve workspace for ingestion ${ingestionId}: ${ingestionLookupError.message}`);
112
+ }
113
+ resolvedWorkspaceId = String(ingestionRow?.workspace_id ?? "").trim();
114
+ if (!resolvedWorkspaceId) {
115
+ throw new Error(`Workspace context is required to index chunks for ingestion ${ingestionId}`);
116
+ }
117
+ }
103
118
  logger.info(`Extracted ${chunks.length} chunks for ingestion ${ingestionId}. Embedding with ${resolvedModel.provider}/${resolvedModel.model}...`);
104
119
  // Global gate: background fire-and-forget jobs are bounded process-wide.
105
120
  await this.acquireEmbedJobSlot();
@@ -113,6 +128,7 @@ export class RAGService {
113
128
  const { data: existing } = await supabase
114
129
  .from("document_chunks")
115
130
  .select("id")
131
+ .eq("workspace_id", resolvedWorkspaceId)
116
132
  .eq("ingestion_id", ingestionId)
117
133
  .eq("content_hash", hash)
118
134
  .eq("embedding_provider", resolvedModel.provider)
@@ -126,6 +142,7 @@ export class RAGService {
126
142
  const embedding = await this.embedTextWithResolvedModel(content, resolvedModel);
127
143
  const vector_dim = embedding.length;
128
144
  const { error } = await supabase.from("document_chunks").insert({
145
+ workspace_id: resolvedWorkspaceId,
129
146
  user_id: userId,
130
147
  ingestion_id: ingestionId,
131
148
  content,
@@ -155,30 +172,44 @@ export class RAGService {
155
172
  * Semantically search the document chunks using dynamic pgvector partial indexing.
156
173
  */
157
174
  static async runSearchForModel(args) {
158
- const { userId, supabase, modelScope, queryEmbedding, queryDim, similarityThreshold, topK } = args;
159
- const { data, error } = await supabase.rpc("search_documents", {
160
- p_user_id: userId,
175
+ const { userId, workspaceId, supabase, modelScope, queryEmbedding, queryDim, similarityThreshold, topK } = args;
176
+ const basePayload = {
161
177
  p_embedding_provider: modelScope.provider,
162
178
  p_embedding_model: modelScope.model,
163
179
  query_embedding: queryEmbedding,
164
180
  match_threshold: similarityThreshold,
165
181
  match_count: topK,
166
182
  query_dim: queryDim
167
- });
183
+ };
184
+ const { data, error } = workspaceId
185
+ ? await supabase.rpc("search_workspace_documents", {
186
+ p_workspace_id: workspaceId,
187
+ ...basePayload
188
+ })
189
+ : await supabase.rpc("search_documents", {
190
+ p_user_id: userId,
191
+ ...basePayload
192
+ });
168
193
  if (error) {
169
194
  throw new Error(`Knowledge base search failed for ${modelScope.provider}/${modelScope.model}: ${error.message}`);
170
195
  }
171
196
  return (data || []);
172
197
  }
173
- static async listUserModelScopes(userId, supabase) {
174
- const { data, error } = await supabase
198
+ static async listModelScopes(userId, supabase, workspaceId) {
199
+ let query = supabase
175
200
  .from("document_chunks")
176
201
  .select("embedding_provider, embedding_model, vector_dim, created_at")
177
- .eq("user_id", userId)
178
202
  .order("created_at", { ascending: false })
179
203
  .limit(2000);
204
+ if (workspaceId) {
205
+ query = query.eq("workspace_id", workspaceId);
206
+ }
207
+ else {
208
+ query = query.eq("user_id", userId);
209
+ }
210
+ const { data, error } = await query;
180
211
  if (error) {
181
- logger.warn("Failed to list user embedding scopes for RAG fallback", { error });
212
+ logger.warn("Failed to list embedding scopes for RAG fallback", { userId, workspaceId, error });
182
213
  return [];
183
214
  }
184
215
  const scopes = new Map();
@@ -203,7 +234,7 @@ export class RAGService {
203
234
  return Array.from(scopes.values());
204
235
  }
205
236
  static async searchDocuments(query, userId, supabase, options = {}) {
206
- const { topK = 5, similarityThreshold = 0.7, settings } = options;
237
+ const { topK = 5, similarityThreshold = 0.7, settings, workspaceId } = options;
207
238
  const minThreshold = Math.max(0.1, Math.min(similarityThreshold, 0.4));
208
239
  const thresholdLevels = Array.from(new Set([similarityThreshold, minThreshold]));
209
240
  const preferred = await this.resolveEmbeddingModel(settings || {});
@@ -232,6 +263,7 @@ export class RAGService {
232
263
  logger.info(`Searching knowledge base (${scope.provider}/${scope.model}, dim=${queryDim}, topK=${topK}, threshold=${threshold})`);
233
264
  const hits = await this.runSearchForModel({
234
265
  userId,
266
+ workspaceId,
235
267
  supabase,
236
268
  modelScope: scope,
237
269
  queryEmbedding,
@@ -268,7 +300,7 @@ export class RAGService {
268
300
  });
269
301
  }
270
302
  if (collected.size === 0) {
271
- const scopes = await this.listUserModelScopes(userId, supabase);
303
+ const scopes = await this.listModelScopes(userId, supabase, workspaceId);
272
304
  const fallbackScopes = scopes.filter((scope) => !(scope.provider === preferredScope.provider && scope.model === preferredScope.model));
273
305
  for (const scope of fallbackScopes) {
274
306
  try {