gitmem-mcp 1.0.11 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -7,6 +7,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## [Unreleased]
9
9
 
10
+ ## [1.0.12] - 2026-02-16
11
+
12
+ ### Fixed
13
+ - **Table prefix for pro tier**: `getTableName()` was resolving to `gitmem_*` tables for pro tier, but those tables don't exist yet. All tiers now default to `orchestra_` prefix until schema migration is complete.
14
+
15
+ ### Changed
16
+ - **Dynamic table names**: Replaced all hardcoded `orchestra_*` table name strings across 22 source files with `getTableName()` calls, making table prefixes configurable via `GITMEM_TABLE_PREFIX` env var.
17
+ - **Release status script**: Added `npm run release-status` to check unpublished commits vs npm.
18
+
10
19
  ## [1.0.11] - 2026-02-16
11
20
 
12
21
  ### Changed
@@ -8,6 +8,7 @@
8
8
  */
9
9
  import { directQuery, directQueryAll, safeInFilter } from "./supabase-client.js";
10
10
  import { getCache } from "./cache.js";
11
+ import { getTableName } from "./tier.js";
11
12
  // --- Query Layer ---
12
13
  /**
13
14
  * Fetch sessions within a date range.
@@ -22,7 +23,7 @@ export async function querySessionsByDateRange(startDate, endDate, project, agen
22
23
  project: `eq.${project}`,
23
24
  "created_at": `gte.${startDate}`,
24
25
  };
25
- return directQueryAll("orchestra_sessions", {
26
+ return directQueryAll(getTableName("sessions"), {
26
27
  select: "id,session_title,session_date,agent,linear_issue,decisions,open_threads,closing_reflection,close_compliance,created_at,project",
27
28
  filters,
28
29
  order: "created_at.desc",
@@ -69,7 +70,7 @@ export async function queryRepeatMistakes(startDate, _endDate, project) {
69
70
  created_at: `gte.${startDate}`,
70
71
  is_active: "eq.true",
71
72
  };
72
- const repeats = await directQuery("orchestra_learnings", {
73
+ const repeats = await directQuery(getTableName("learnings"), {
73
74
  select: "id,title,related_scar_id,repeat_mistake_details,created_at",
74
75
  filters,
75
76
  order: "created_at.desc",
@@ -94,7 +95,7 @@ export async function enrichScarUsageTitles(usages) {
94
95
  return usages;
95
96
  // Fetch titles from orchestra_learnings
96
97
  const ids = Array.from(idsNeedingResolution);
97
- const learnings = await directQuery("orchestra_learnings", {
98
+ const learnings = await directQuery(getTableName("learnings"), {
98
99
  select: "id,title,severity",
99
100
  filters: {
100
101
  id: safeInFilter(ids),
@@ -15,6 +15,7 @@
15
15
  import * as fs from "fs";
16
16
  import * as path from "path";
17
17
  import { isConfigured, loadScarsWithEmbeddings } from "./supabase-client.js";
18
+ import { getTableName } from "./tier.js";
18
19
  import { getGitmemDir } from "./gitmem-dir.js";
19
20
  import { initializeLocalSearch, reinitializeLocalSearch, isLocalSearchReady, getLocalVectorSearch, getCacheMetadata, setCacheTtl, } from "./local-vector-search.js";
20
21
  import { getConfig, shouldUseLocalSearch } from "./config.js";
@@ -101,7 +102,7 @@ async function getRemoteScarStats() {
101
102
  // Quick query to get count and latest timestamp (no embeddings needed)
102
103
  // Cross-project — matches unified cache loading
103
104
  // Filter embedding=not.is.null to match cache indexing (which skips entries without embeddings)
104
- const learnings = await directQuery("orchestra_learnings", {
105
+ const learnings = await directQuery(getTableName("learnings"), {
105
106
  select: "id,updated_at",
106
107
  filters: {
107
108
  learning_type: "in.(scar,pattern,win,anti_pattern)",
@@ -8,6 +8,7 @@
8
8
  * Integrates with CacheService for performance.
9
9
  */
10
10
  import { getCache } from "./cache.js";
11
+ import { getTableName } from "./tier.js";
11
12
  // --- PostgREST Input Sanitization ---
12
13
  const UUID_PATTERN = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
13
14
  /**
@@ -390,7 +391,7 @@ export async function loadScarsWithEmbeddings(project, limit = 500) {
390
391
  if (project) {
391
392
  filters.project = project;
392
393
  }
393
- const learnings = await directQuery("orchestra_learnings", {
394
+ const learnings = await directQuery(getTableName("learnings"), {
394
395
  select: "id,title,description,severity,counter_arguments,applies_when,source_linear_issue,project,embedding,updated_at,learning_type,decay_multiplier",
395
396
  filters,
396
397
  order: "updated_at.desc",
@@ -435,7 +436,7 @@ export async function cachedScarSearch(query, matchCount = 5, project = "default
435
436
  export async function cachedListDecisions(project = "default", limit = 5) {
436
437
  const cache = getCache();
437
438
  const { data, cache_hit, cache_age_ms } = await cache.getOrFetchDecisions(project, limit, async () => listRecords({
438
- table: "orchestra_decisions_lite",
439
+ table: getTableName("decisions_lite"),
439
440
  limit,
440
441
  orderBy: { column: "created_at", ascending: false },
441
442
  }));
@@ -447,7 +448,7 @@ export async function cachedListDecisions(project = "default", limit = 5) {
447
448
  export async function cachedListWins(project = "default", limit = 8, columns) {
448
449
  const cache = getCache();
449
450
  const { data, cache_hit, cache_age_ms } = await cache.getOrFetchWins(project, limit, async () => listRecords({
450
- table: "orchestra_learnings_lite",
451
+ table: getTableName("learnings_lite"),
451
452
  columns,
452
453
  filters: {
453
454
  learning_type: "win",
@@ -519,7 +520,7 @@ export async function saveTranscript(sessionId, transcript, metadata = {}) {
519
520
  // Update the session record with transcript_path (direct REST API)
520
521
  let patch_warning;
521
522
  try {
522
- await directPatch("orchestra_sessions", { id: sessionId }, { transcript_path: path });
523
+ await directPatch(getTableName("sessions"), { id: sessionId }, { transcript_path: path });
523
524
  }
524
525
  catch (error) {
525
526
  // File is saved; session record update failed — warn, don't fail
@@ -538,7 +539,7 @@ export async function saveTranscript(sessionId, transcript, metadata = {}) {
538
539
  */
539
540
  export async function getTranscript(sessionId) {
540
541
  // First, get the session to find transcript_path
541
- const session = await getRecord("orchestra_sessions", sessionId);
542
+ const session = await getRecord(getTableName("sessions"), sessionId);
542
543
  if (!session?.transcript_path) {
543
544
  return null;
544
545
  }
@@ -16,7 +16,7 @@ import * as path from "path";
16
16
  import * as crypto from "crypto";
17
17
  import { getGitmemDir } from "./gitmem-dir.js";
18
18
  import { directQuery } from "./supabase-client.js";
19
- import { hasSupabase } from "./tier.js";
19
+ import { hasSupabase, getTableName } from "./tier.js";
20
20
  import { cosineSimilarity } from "./thread-dedup.js";
21
21
  // ---------- Constants ----------
22
22
  export const SESSION_SIMILARITY_THRESHOLD = 0.70;
@@ -203,7 +203,7 @@ export async function loadRecentSessionEmbeddings(project = "default", days = 30
203
203
  const cutoff = new Date();
204
204
  cutoff.setDate(cutoff.getDate() - days);
205
205
  const cutoffStr = cutoff.toISOString().split("T")[0]; // YYYY-MM-DD
206
- const rows = await directQuery("orchestra_sessions", {
206
+ const rows = await directQuery(getTableName("sessions"), {
207
207
  select: "id,session_title,embedding",
208
208
  filters: {
209
209
  project,
@@ -9,7 +9,7 @@
9
9
  * fall back to local file operations.
10
10
  */
11
11
  import * as supabase from "./supabase-client.js";
12
- import { hasSupabase } from "./tier.js";
12
+ import { hasSupabase, getTableName } from "./tier.js";
13
13
  import { computeLifecycleStatus, detectThreadClass } from "./thread-vitality.js";
14
14
  import { normalizeText, deduplicateThreadList } from "./thread-dedup.js";
15
15
  // ---------- Mapping Helpers ----------
@@ -96,7 +96,7 @@ export async function createThreadInSupabase(thread, project = "default", embedd
96
96
  }
97
97
  try {
98
98
  const row = threadObjectToRow(thread, project, embedding);
99
- const result = await supabase.directUpsert("orchestra_threads", row);
99
+ const result = await supabase.directUpsert(getTableName("threads"), row);
100
100
  console.error(`[thread-supabase] Created thread ${thread.id} in Supabase`);
101
101
  return result;
102
102
  }
@@ -116,7 +116,7 @@ export async function resolveThreadInSupabase(threadId, options = {}) {
116
116
  }
117
117
  try {
118
118
  // First, find the UUID primary key for this thread_id
119
- const rows = await supabase.directQuery("orchestra_threads", {
119
+ const rows = await supabase.directQuery(getTableName("threads"), {
120
120
  select: "id,thread_id",
121
121
  filters: { thread_id: threadId },
122
122
  limit: 1,
@@ -136,7 +136,7 @@ export async function resolveThreadInSupabase(threadId, options = {}) {
136
136
  if (options.resolvedBySession) {
137
137
  patchData.resolved_by_session = options.resolvedBySession;
138
138
  }
139
- await supabase.directPatch("orchestra_threads", { id: uuid }, patchData);
139
+ await supabase.directPatch(getTableName("threads"), { id: uuid }, patchData);
140
140
  console.error(`[thread-supabase] Resolved thread ${threadId} in Supabase`);
141
141
  return true;
142
142
  }
@@ -176,7 +176,7 @@ export async function listThreadsFromSupabase(project = "default", options = {})
176
176
  // Default: exclude resolved and archived
177
177
  filters.status = "not.in.(resolved,archived)";
178
178
  }
179
- const rows = await supabase.directQuery("orchestra_threads_lite", {
179
+ const rows = await supabase.directQuery(getTableName("threads_lite"), {
180
180
  select: "*",
181
181
  filters,
182
182
  order: "vitality_score.desc,last_touched_at.desc",
@@ -202,7 +202,7 @@ export async function loadActiveThreadsFromSupabase(project = "default") {
202
202
  }
203
203
  try {
204
204
  // Get only non-resolved, non-archived threads (open/active only)
205
- const rows = await supabase.directQuery("orchestra_threads_lite", {
205
+ const rows = await supabase.directQuery(getTableName("threads_lite"), {
206
206
  select: "*",
207
207
  filters: {
208
208
  project,
@@ -264,7 +264,7 @@ export async function touchThreadsInSupabase(threadIds) {
264
264
  for (const threadId of threadIds) {
265
265
  try {
266
266
  // Fetch current state (need created_at and thread_class for vitality recomputation)
267
- const rows = await supabase.directQuery("orchestra_threads", {
267
+ const rows = await supabase.directQuery(getTableName("threads"), {
268
268
  select: "id,touch_count,created_at,thread_class,status",
269
269
  filters: { thread_id: threadId },
270
270
  limit: 1,
@@ -295,7 +295,7 @@ export async function touchThreadsInSupabase(threadIds) {
295
295
  else if (lifecycle_status !== "dormant") {
296
296
  delete metadata.dormant_since;
297
297
  }
298
- await supabase.directPatch("orchestra_threads", { id: row.id }, {
298
+ await supabase.directPatch(getTableName("threads"), { id: row.id }, {
299
299
  touch_count: newTouchCount,
300
300
  last_touched_at: nowIso,
301
301
  vitality_score: vitality.vitality_score,
@@ -323,7 +323,7 @@ export async function syncThreadsToSupabase(threads, project = "default", sessio
323
323
  // for threads that already exist with the same (or similar) text.
324
324
  let existingOpenThreads = [];
325
325
  try {
326
- existingOpenThreads = await supabase.directQuery("orchestra_threads", {
326
+ existingOpenThreads = await supabase.directQuery(getTableName("threads"), {
327
327
  select: "thread_id,text,status",
328
328
  filters: {
329
329
  project,
@@ -346,7 +346,7 @@ export async function syncThreadsToSupabase(threads, project = "default", sessio
346
346
  for (const thread of threads) {
347
347
  try {
348
348
  // Check if thread exists in Supabase by ID
349
- const existing = await supabase.directQuery("orchestra_threads", {
349
+ const existing = await supabase.directQuery(getTableName("threads"), {
350
350
  select: "id,thread_id,status",
351
351
  filters: { thread_id: thread.id },
352
352
  limit: 1,
@@ -400,7 +400,7 @@ export async function archiveDormantThreads(project = "default", dormantDays = 3
400
400
  }
401
401
  try {
402
402
  // Fetch dormant threads
403
- const rows = await supabase.directQuery("orchestra_threads", {
403
+ const rows = await supabase.directQuery(getTableName("threads"), {
404
404
  select: "id,thread_id,metadata",
405
405
  filters: {
406
406
  project,
@@ -417,7 +417,7 @@ export async function archiveDormantThreads(project = "default", dormantDays = 3
417
417
  const dormantStart = new Date(dormantSince);
418
418
  const daysDormant = (now.getTime() - dormantStart.getTime()) / (1000 * 60 * 60 * 24);
419
419
  if (daysDormant >= dormantDays) {
420
- await supabase.directPatch("orchestra_threads", { id: row.id }, {
420
+ await supabase.directPatch(getTableName("threads"), { id: row.id }, {
421
421
  status: "archived",
422
422
  });
423
423
  archived_ids.push(row.thread_id);
@@ -465,7 +465,7 @@ export async function loadOpenThreadEmbeddings(project = "default") {
465
465
  return null;
466
466
  }
467
467
  try {
468
- const rows = await supabase.directQuery("orchestra_threads", {
468
+ const rows = await supabase.directQuery(getTableName("threads"), {
469
469
  select: "thread_id,text,embedding",
470
470
  filters: {
471
471
  project,
@@ -95,10 +95,9 @@ export function hasEnforcementFields() {
95
95
  * Get the table prefix for the current tier
96
96
  */
97
97
  export function getTablePrefix() {
98
- if (getTier() === "dev") {
99
- return process.env.GITMEM_TABLE_PREFIX || "orchestra_";
100
- }
101
- return process.env.GITMEM_TABLE_PREFIX || "gitmem_";
98
+ // All tiers default to orchestra_ until gitmem_ schema migration is complete.
99
+ // Override with GITMEM_TABLE_PREFIX env var when ready.
100
+ return process.env.GITMEM_TABLE_PREFIX || "orchestra_";
102
101
  }
103
102
  /**
104
103
  * Get the fully-qualified table name for a base table name
@@ -6,6 +6,7 @@
6
6
  *
7
7
  *
8
8
  */
9
+ import { getTableName } from "./tier.js";
9
10
  // OpenRouter API configuration (same as local-vector-search)
10
11
  const OPENROUTER_API_URL = "https://openrouter.ai/api/v1/embeddings";
11
12
  const EMBEDDING_MODEL = "openai/text-embedding-3-small";
@@ -221,7 +222,7 @@ export async function processTranscript(sessionId, transcriptContent, project =
221
222
  if (!SUPABASE_URL || !SUPABASE_KEY) {
222
223
  throw new Error("Supabase configuration missing");
223
224
  }
224
- const restUrl = `${SUPABASE_URL}/rest/v1/orchestra_transcript_chunks?on_conflict=session_id,chunk_index`;
225
+ const restUrl = `${SUPABASE_URL}/rest/v1/${getTableName("transcript_chunks")}?on_conflict=session_id,chunk_index`;
225
226
  const response = await fetch(restUrl, {
226
227
  method: "POST",
227
228
  headers: {
@@ -14,7 +14,7 @@
14
14
  import { v4 as uuidv4 } from "uuid";
15
15
  import { wrapDisplay } from "../services/display-protocol.js";
16
16
  import { addObservations, getObservations, getCurrentSession } from "../services/session-state.js";
17
- import { hasSupabase } from "../services/tier.js";
17
+ import { hasSupabase, getTableName } from "../services/tier.js";
18
18
  import * as supabase from "../services/supabase-client.js";
19
19
  import { Timer, recordMetrics, buildPerformanceData, } from "../services/metrics.js";
20
20
  // --- Scar Candidate Detection ---
@@ -49,7 +49,7 @@ export async function absorbObservations(params) {
49
49
  // 3. Optionally persist to Supabase (fire-and-forget, non-fatal)
50
50
  const session = getCurrentSession();
51
51
  if (hasSupabase() && supabase.isConfigured() && session) {
52
- supabase.directUpsert("orchestra_sessions", {
52
+ supabase.directUpsert(getTableName("sessions"), {
53
53
  id: session.sessionId,
54
54
  task_observations: getObservations(),
55
55
  }).catch((err) => {
@@ -9,7 +9,7 @@
9
9
  * removed from in-memory search results.
10
10
  */
11
11
  import { directPatch, isConfigured } from "../services/supabase-client.js";
12
- import { hasSupabase } from "../services/tier.js";
12
+ import { hasSupabase, getTableName } from "../services/tier.js";
13
13
  import { getStorage } from "../services/storage.js";
14
14
  import { flushCache } from "../services/startup.js";
15
15
  import { Timer } from "../services/metrics.js";
@@ -32,7 +32,7 @@ export async function archiveLearning(params) {
32
32
  let cacheFlushed = false;
33
33
  if (hasSupabase() && isConfigured()) {
34
34
  // Pro/dev: patch in Supabase
35
- await directPatch("orchestra_learnings", { id: `eq.${params.id}` }, {
35
+ await directPatch(getTableName("learnings"), { id: `eq.${params.id}` }, {
36
36
  is_active: false,
37
37
  archived_at: archivedAt,
38
38
  });
@@ -8,7 +8,7 @@
8
8
  */
9
9
  import { v4 as uuidv4 } from "uuid";
10
10
  import * as supabase from "../services/supabase-client.js";
11
- import { hasSupabase } from "../services/tier.js";
11
+ import { hasSupabase, getTableName } from "../services/tier.js";
12
12
  import { getProject } from "../services/session-state.js";
13
13
  import { computeLifecycleStatus } from "../services/thread-vitality.js";
14
14
  import { archiveDormantThreads } from "../services/thread-supabase.js";
@@ -127,7 +127,7 @@ export async function cleanupThreads(params) {
127
127
  archived_ids = archiveResult.archived_ids;
128
128
  }
129
129
  // Step 2: Fetch all non-resolved, non-archived threads
130
- const rows = await supabase.directQuery("orchestra_threads_lite", {
130
+ const rows = await supabase.directQuery(getTableName("threads_lite"), {
131
131
  select: "*",
132
132
  filters: {
133
133
  project,
@@ -180,7 +180,7 @@ export async function cleanupThreads(params) {
180
180
  id: metricsId,
181
181
  tool_name: "cleanup_threads",
182
182
  query_text: `cleanup:${project}:auto_archive=${!!params.auto_archive}`,
183
- tables_searched: ["orchestra_threads_lite"],
183
+ tables_searched: [getTableName("threads_lite")],
184
184
  latency_ms: latencyMs,
185
185
  result_count: totalOpen,
186
186
  phase_tag: "ad_hoc",
@@ -14,7 +14,7 @@ import { wrapDisplay } from "../services/display-protocol.js";
14
14
  import { getAgentIdentity } from "../services/agent-detection.js";
15
15
  import { writeTriplesForDecision } from "../services/triple-writer.js";
16
16
  import { getEffectTracker } from "../services/effect-tracker.js";
17
- import { hasSupabase } from "../services/tier.js";
17
+ import { hasSupabase, getTableName } from "../services/tier.js";
18
18
  import { getStorage } from "../services/storage.js";
19
19
  import { getProject } from "../services/session-state.js";
20
20
  import { Timer, recordMetrics, buildPerformanceData, } from "../services/metrics.js";
@@ -67,7 +67,7 @@ export async function createDecision(params) {
67
67
  }
68
68
  // Write directly to Supabase REST API (bypasses ww-mcp)
69
69
  const upsertStart = Date.now();
70
- await supabase.directUpsert("orchestra_decisions", decisionData);
70
+ await supabase.directUpsert(getTableName("decisions"), decisionData);
71
71
  breakdown.upsert = {
72
72
  latency_ms: Date.now() - upsertStart,
73
73
  source: "supabase",
@@ -108,7 +108,7 @@ export async function createDecision(params) {
108
108
  id: metricsId,
109
109
  session_id: params.session_id,
110
110
  tool_name: "create_decision",
111
- tables_searched: ["orchestra_decisions"],
111
+ tables_searched: [getTableName("decisions")],
112
112
  latency_ms: latencyMs,
113
113
  result_count: 1,
114
114
  phase_tag: "decision_capture",
@@ -16,7 +16,7 @@ import { flushCache } from "../services/startup.js";
16
16
  import { writeTriplesForLearning } from "../services/triple-writer.js";
17
17
  import { generateVariantsForScar } from "../services/variant-generation.js";
18
18
  import { getEffectTracker } from "../services/effect-tracker.js";
19
- import { hasSupabase } from "../services/tier.js";
19
+ import { hasSupabase, getTableName } from "../services/tier.js";
20
20
  import { getStorage } from "../services/storage.js";
21
21
  import { getProject } from "../services/session-state.js";
22
22
  import { Timer, recordMetrics, buildPerformanceData, } from "../services/metrics.js";
@@ -143,7 +143,7 @@ export async function createLearning(params) {
143
143
  console.error(`[create_learning] Learning type: ${params.learning_type}, Project: ${params.project || getProject() || "default"}`);
144
144
  // Write directly to Supabase REST API (bypasses ww-mcp)
145
145
  const upsertStart = Date.now();
146
- const writeResult = await supabase.directUpsert("orchestra_learnings", learningData);
146
+ const writeResult = await supabase.directUpsert(getTableName("learnings"), learningData);
147
147
  const upsertLatency = Date.now() - upsertStart;
148
148
  breakdown.upsert = {
149
149
  latency_ms: upsertLatency,
@@ -210,7 +210,7 @@ export async function createLearning(params) {
210
210
  recordMetrics({
211
211
  id: metricsId,
212
212
  tool_name: "create_learning",
213
- tables_searched: ["orchestra_learnings"],
213
+ tables_searched: [getTableName("learnings")],
214
214
  latency_ms: latencyMs,
215
215
  result_count: 1,
216
216
  phase_tag: "learning_capture",
@@ -14,6 +14,7 @@
14
14
  * Performance target: <500ms (Supabase write + file write)
15
15
  */
16
16
  import { v4 as uuidv4 } from "uuid";
17
+ import { getTableName } from "../services/tier.js";
17
18
  import { getThreads, setThreads, getCurrentSession, getProject } from "../services/session-state.js";
18
19
  import { generateThreadId, loadThreadsFile, saveThreadsFile, } from "../services/thread-manager.js";
19
20
  import { createThreadInSupabase, loadOpenThreadEmbeddings, touchThreadsInSupabase, } from "../services/thread-supabase.js";
@@ -89,7 +90,7 @@ export async function createThread(params) {
89
90
  id: metricsId,
90
91
  tool_name: "create_thread",
91
92
  query_text: `dedup:${dedupResult.matched_thread_id}`,
92
- tables_searched: ["orchestra_threads"],
93
+ tables_searched: [getTableName("threads")],
93
94
  latency_ms: latencyMs,
94
95
  result_count: 0,
95
96
  phase_tag: "ad_hoc",
@@ -147,7 +148,7 @@ export async function createThread(params) {
147
148
  id: metricsId,
148
149
  tool_name: "create_thread",
149
150
  query_text: `create:${thread.id}`,
150
- tables_searched: supabaseSynced ? ["orchestra_threads"] : [],
151
+ tables_searched: supabaseSynced ? [getTableName("threads")] : [],
151
152
  latency_ms: latencyMs,
152
153
  result_count: 1,
153
154
  phase_tag: "ad_hoc",
@@ -11,7 +11,7 @@
11
11
  * Performance target: <500ms (Supabase query with fallback)
12
12
  */
13
13
  import { v4 as uuidv4 } from "uuid";
14
- import { hasSupabase } from "../services/tier.js";
14
+ import { hasSupabase, getTableName } from "../services/tier.js";
15
15
  import { getProject } from "../services/session-state.js";
16
16
  import { aggregateThreads, loadThreadsFile, mergeThreadStates } from "../services/thread-manager.js";
17
17
  import { deduplicateThreadList } from "../services/thread-dedup.js";
@@ -81,7 +81,7 @@ export async function listThreads(params) {
81
81
  if (allThreads === null && hasSupabase()) {
82
82
  try {
83
83
  const sessions = await supabase.listRecords({
84
- table: "orchestra_sessions_lite",
84
+ table: getTableName("sessions_lite"),
85
85
  filters: { project },
86
86
  limit: 10,
87
87
  orderBy: { column: "created_at", ascending: false },
@@ -137,7 +137,7 @@ export async function listThreads(params) {
137
137
  id: metricsId,
138
138
  tool_name: "list_threads",
139
139
  query_text: `list:${statusFilter}:${includeResolved ? "all" : "filtered"}`,
140
- tables_searched: source === "supabase" ? ["orchestra_threads_lite"] : source === "aggregation" ? ["orchestra_sessions_lite"] : [],
140
+ tables_searched: source === "supabase" ? [getTableName("threads_lite")] : source === "aggregation" ? [getTableName("sessions_lite")] : [],
141
141
  latency_ms: latencyMs,
142
142
  result_count: threads.length,
143
143
  phase_tag: "ad_hoc",
package/dist/tools/log.js CHANGED
@@ -10,7 +10,7 @@
10
10
  * Performance target: 500ms
11
11
  */
12
12
  import * as supabase from "../services/supabase-client.js";
13
- import { hasSupabase } from "../services/tier.js";
13
+ import { hasSupabase, getTableName } from "../services/tier.js";
14
14
  import { getProject } from "../services/session-state.js";
15
15
  import { getStorage } from "../services/storage.js";
16
16
  import { Timer, recordMetrics, buildPerformanceData, buildComponentPerformance, } from "../services/metrics.js";
@@ -168,7 +168,7 @@ export async function log(params) {
168
168
  if (sinceDate) {
169
169
  filters.created_at = `gte.${sinceDate}`;
170
170
  }
171
- const records = await supabase.directQuery("orchestra_learnings", {
171
+ const records = await supabase.directQuery(getTableName("learnings"), {
172
172
  select: "id,title,learning_type,severity,created_at,source_linear_issue,project,persona_name",
173
173
  filters,
174
174
  order: "created_at.desc",
@@ -187,7 +187,7 @@ export async function log(params) {
187
187
  recordMetrics({
188
188
  id: metricsId,
189
189
  tool_name: "log",
190
- tables_searched: ["orchestra_learnings"],
190
+ tables_searched: [getTableName("learnings")],
191
191
  latency_ms: latencyMs,
192
192
  result_count: records.length,
193
193
  phase_tag: "ad_hoc",
@@ -18,7 +18,7 @@
18
18
  import * as supabase from "../services/supabase-client.js";
19
19
  import { localScarSearch, isLocalSearchReady } from "../services/local-vector-search.js";
20
20
  import { getProject } from "../services/session-state.js";
21
- import { hasSupabase } from "../services/tier.js";
21
+ import { hasSupabase, getTableName } from "../services/tier.js";
22
22
  import { getStorage } from "../services/storage.js";
23
23
  import { Timer, recordMetrics, buildPerformanceData, buildComponentPerformance, } from "../services/metrics.js";
24
24
  import { v4 as uuidv4 } from "uuid";
@@ -153,7 +153,7 @@ function buildResult(scars, plan, format, maxTokens, timer, metricsId, project,
153
153
  id: metricsId,
154
154
  tool_name: "prepare_context",
155
155
  query_text: `prepare_context:${format}:${plan.slice(0, 80)}`,
156
- tables_searched: search_mode === "local" ? [] : ["orchestra_learnings"],
156
+ tables_searched: search_mode === "local" ? [] : [getTableName("learnings")],
157
157
  latency_ms: latencyMs,
158
158
  result_count: scars_included,
159
159
  phase_tag: "recall",
@@ -14,7 +14,7 @@
14
14
  */
15
15
  import * as supabase from "../services/supabase-client.js";
16
16
  import { localScarSearch, isLocalSearchReady } from "../services/local-vector-search.js";
17
- import { hasSupabase, hasVariants, hasMetrics } from "../services/tier.js";
17
+ import { hasSupabase, hasVariants, hasMetrics, getTableName } from "../services/tier.js";
18
18
  import { getProject } from "../services/session-state.js";
19
19
  import { getStorage } from "../services/storage.js";
20
20
  import { Timer, recordMetrics, buildPerformanceData, buildComponentPerformance, calculateContextBytes, } from "../services/metrics.js";
@@ -423,7 +423,7 @@ export async function recall(params) {
423
423
  id: metricsId,
424
424
  tool_name: "recall",
425
425
  query_text: plan,
426
- tables_searched: search_mode === "local" ? [] : ["orchestra_learnings"],
426
+ tables_searched: search_mode === "local" ? [] : [getTableName("learnings")],
427
427
  latency_ms: latencyMs,
428
428
  result_count: scars.length,
429
429
  similarity_scores: similarityScores,
@@ -4,7 +4,7 @@
4
4
  */
5
5
  import { v4 as uuidv4 } from "uuid";
6
6
  import * as supabase from "../services/supabase-client.js";
7
- import { hasSupabase } from "../services/tier.js";
7
+ import { hasSupabase, getTableName } from "../services/tier.js";
8
8
  import { Timer, recordMetrics, buildPerformanceData } from "../services/metrics.js";
9
9
  const TARGET_LATENCY_MS = 2000; // Target for batch operation
10
10
  /**
@@ -27,7 +27,7 @@ async function resolveScarIdentifier(identifier, project) {
27
27
  }
28
28
  // Try exact title match first
29
29
  const titleResult = await supabase.listRecords({
30
- table: "orchestra_learnings",
30
+ table: getTableName("learnings"),
31
31
  columns: "id,title,description,scar_type,severity",
32
32
  filters: { ...filters, title: identifier },
33
33
  limit: 1,
@@ -37,7 +37,7 @@ async function resolveScarIdentifier(identifier, project) {
37
37
  }
38
38
  // Try partial title match (get more records to search)
39
39
  const partialResult = await supabase.listRecords({
40
- table: "orchestra_learnings",
40
+ table: getTableName("learnings"),
41
41
  columns: "id,title,description,scar_type,severity",
42
42
  filters: { ...filters },
43
43
  limit: 100,
@@ -128,7 +128,7 @@ export async function recordScarUsageBatch(params) {
128
128
  recordMetrics({
129
129
  id: metricsId,
130
130
  tool_name: "record_scar_usage_batch",
131
- tables_searched: ["scar_usage", "orchestra_learnings"],
131
+ tables_searched: ["scar_usage", getTableName("learnings")],
132
132
  latency_ms: latencyMs,
133
133
  result_count: usageIds.length,
134
134
  phase_tag: "scar_tracking",
@@ -12,6 +12,7 @@
12
12
  * Performance target: <500ms (Supabase update + file write)
13
13
  */
14
14
  import { v4 as uuidv4 } from "uuid";
15
+ import { getTableName } from "../services/tier.js";
15
16
  import { getThreads, getCurrentSession } from "../services/session-state.js";
16
17
  import { resolveThread as resolveThreadInList, findThreadById, loadThreadsFile, saveThreadsFile, } from "../services/thread-manager.js";
17
18
  import { resolveThreadInSupabase } from "../services/thread-supabase.js";
@@ -106,7 +107,7 @@ export async function resolveThread(params) {
106
107
  id: metricsId,
107
108
  tool_name: "resolve_thread",
108
109
  query_text: `resolve:${params.thread_id || "text:" + params.text_match}`,
109
- tables_searched: supabaseSynced ? ["orchestra_threads"] : [],
110
+ tables_searched: supabaseSynced ? [getTableName("threads")] : [],
110
111
  latency_ms: latencyMs,
111
112
  result_count: 1 + alsoResolved.length,
112
113
  phase_tag: "ad_hoc",
@@ -12,7 +12,7 @@
12
12
  */
13
13
  import * as supabase from "../services/supabase-client.js";
14
14
  import { localScarSearch, isLocalSearchReady } from "../services/local-vector-search.js";
15
- import { hasSupabase } from "../services/tier.js";
15
+ import { hasSupabase, getTableName } from "../services/tier.js";
16
16
  import { getProject } from "../services/session-state.js";
17
17
  import { getStorage } from "../services/storage.js";
18
18
  import { Timer, recordMetrics, buildPerformanceData, buildComponentPerformance, } from "../services/metrics.js";
@@ -218,7 +218,7 @@ export async function search(params) {
218
218
  id: metricsId,
219
219
  tool_name: "search",
220
220
  query_text: query,
221
- tables_searched: search_mode === "local" ? [] : ["orchestra_learnings"],
221
+ tables_searched: search_mode === "local" ? [] : [getTableName("learnings")],
222
222
  latency_ms: latencyMs,
223
223
  result_count: results.length,
224
224
  similarity_scores: results.map(r => r.similarity),
@@ -10,7 +10,7 @@ import { v4 as uuidv4 } from "uuid";
10
10
  import { detectAgent } from "../services/agent-detection.js";
11
11
  import * as supabase from "../services/supabase-client.js";
12
12
  import { embed, isEmbeddingAvailable } from "../services/embedding.js";
13
- import { hasSupabase } from "../services/tier.js";
13
+ import { hasSupabase, getTableName } from "../services/tier.js";
14
14
  import { getStorage } from "../services/storage.js";
15
15
  import { clearCurrentSession, getSurfacedScars, getConfirmations, getObservations, getChildren, getThreads, getSessionActivity } from "../services/session-state.js";
16
16
  import { normalizeThreads, mergeThreadStates, migrateStringThread, saveThreadsFile } from "../services/thread-manager.js"; //
@@ -832,7 +832,7 @@ export async function sessionClose(params) {
832
832
  const today = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
833
833
  try {
834
834
  const sessions = await supabase.listRecords({
835
- table: "orchestra_sessions_lite",
835
+ table: getTableName("sessions_lite"),
836
836
  filters: { agent },
837
837
  limit: 10,
838
838
  orderBy: { column: "created_at", ascending: false },
@@ -938,7 +938,7 @@ export async function sessionClose(params) {
938
938
  sessionId = params.session_id;
939
939
  // Try Supabase first
940
940
  try {
941
- existingSession = await supabase.getRecord("orchestra_sessions", sessionId);
941
+ existingSession = await supabase.getRecord(getTableName("sessions"), sessionId);
942
942
  }
943
943
  catch {
944
944
  // Supabase might not be configured (free tier) or session not found
@@ -1036,7 +1036,7 @@ export async function sessionClose(params) {
1036
1036
  try {
1037
1037
  // Upsert session WITHOUT embedding (fast path)
1038
1038
  // Embedding + thread detection run fire-and-forget after
1039
- await supabase.directUpsert("orchestra_sessions", sessionData);
1039
+ await supabase.directUpsert(getTableName("sessions"), sessionData);
1040
1040
  // Tracked fire-and-forget embedding generation + session update + thread detection
1041
1041
  if (isEmbeddingAvailable()) {
1042
1042
  getEffectTracker().track("embedding", "session_close", async () => {
@@ -1052,7 +1052,7 @@ export async function sessionClose(params) {
1052
1052
  if (embeddingVector) {
1053
1053
  const embeddingJson = JSON.stringify(embeddingVector);
1054
1054
  // Update session with embedding (PATCH, not upsert — row already exists)
1055
- await supabase.directPatch("orchestra_sessions", { id: sessionId }, { embedding: embeddingJson });
1055
+ await supabase.directPatch(getTableName("sessions"), { id: sessionId }, { embedding: embeddingJson });
1056
1056
  console.error("[session_close] Embedding saved to session");
1057
1057
  // Phase 5: Implicit thread detection (chained after embedding)
1058
1058
  const suggestProject = existingSession?.project || "default";
@@ -1090,7 +1090,7 @@ export async function sessionClose(params) {
1090
1090
  session_id: sessionId,
1091
1091
  agent: agentIdentity,
1092
1092
  tool_name: "session_close",
1093
- tables_searched: ["orchestra_sessions"],
1093
+ tables_searched: [getTableName("sessions")],
1094
1094
  latency_ms: latencyMs,
1095
1095
  result_count: 1,
1096
1096
  phase_tag: "session_close",
@@ -17,7 +17,7 @@ import { detectAgent } from "../services/agent-detection.js";
17
17
  import * as supabase from "../services/supabase-client.js";
18
18
  // Scar search removed from start pipeline (loads on-demand via recall)
19
19
  import { ensureInitialized } from "../services/startup.js";
20
- import { hasSupabase } from "../services/tier.js";
20
+ import { hasSupabase, getTableName } from "../services/tier.js";
21
21
  import { getStorage } from "../services/storage.js";
22
22
  import { Timer, recordMetrics, calculateContextBytes, buildPerformanceData, buildComponentPerformance, } from "../services/metrics.js";
23
23
  import { setCurrentSession, getCurrentSession, addSurfacedScars, getSurfacedScars } from "../services/session-state.js";
@@ -68,7 +68,7 @@ async function loadLastSession(agent, project) {
68
68
  // Use _lite view for performance (excludes embedding)
69
69
  // View now includes decisions/open_threads arrays
70
70
  const sessions = await supabase.listRecords({
71
- table: "orchestra_sessions_lite",
71
+ table: getTableName("sessions_lite"),
72
72
  filters: { agent, project },
73
73
  limit: 10, // Get several to find a closed one + aggregate threads
74
74
  orderBy: { column: "created_at", ascending: false },
@@ -149,7 +149,7 @@ async function loadRecentRapport(project) {
149
149
  return [];
150
150
  try {
151
151
  const sessions = await supabase.listRecords({
152
- table: "orchestra_sessions_lite",
152
+ table: getTableName("sessions_lite"),
153
153
  columns: "agent,rapport_summary,created_at",
154
154
  filters: { project },
155
155
  limit: 20, // Fetch more to find ones with rapport
@@ -233,7 +233,7 @@ async function createSessionRecord(agent, project, linearIssue, preGeneratedId /
233
233
  try {
234
234
  // Capture asciinema recording path from Docker entrypoint
235
235
  const recordingPath = process.env.GITMEM_RECORDING_PATH || null;
236
- await supabase.directUpsert("orchestra_sessions", {
236
+ await supabase.directUpsert(getTableName("sessions"), {
237
237
  id: sessionId,
238
238
  session_date: today,
239
239
  session_title: linearIssue ? `Session for ${linearIssue}` : "Interactive Session",
@@ -273,12 +273,12 @@ async function markSessionSuperseded(oldSessionId, newSessionId) {
273
273
  return; // Free tier: no remote session tracking
274
274
  try {
275
275
  // Check if session already has close_compliance (was properly closed)
276
- const existing = await supabase.directQuery("orchestra_sessions", { filters: { id: oldSessionId }, select: "close_compliance" });
276
+ const existing = await supabase.directQuery(getTableName("sessions"), { filters: { id: oldSessionId }, select: "close_compliance" });
277
277
  if (existing.length > 0 && existing[0].close_compliance != null) {
278
278
  // Already closed — don't overwrite
279
279
  return;
280
280
  }
281
- await supabase.directPatch("orchestra_sessions", { id: oldSessionId }, {
281
+ await supabase.directPatch(getTableName("sessions"), { id: oldSessionId }, {
282
282
  close_compliance: {
283
283
  close_type: "superseded",
284
284
  superseded_by: newSessionId,
@@ -819,7 +819,7 @@ export async function sessionStart(params) {
819
819
  agent: agent,
820
820
  tool_name: "session_start",
821
821
  query_text: [params.issue_title, params.issue_description].filter(Boolean).join(" ").slice(0, 500),
822
- tables_searched: ["orchestra_sessions_lite", "orchestra_decisions_lite"],
822
+ tables_searched: [getTableName("sessions_lite"), getTableName("decisions_lite")],
823
823
  latency_ms: latencyMs,
824
824
  result_count: decisions.length + (lastSession ? 1 : 0),
825
825
  context_bytes: calculateContextBytes(result),
@@ -966,7 +966,7 @@ export async function sessionRefresh(params) {
966
966
  agent: agent,
967
967
  tool_name: "session_refresh",
968
968
  query_text: "mid-session context refresh",
969
- tables_searched: ["orchestra_sessions_lite", "orchestra_decisions_lite"],
969
+ tables_searched: [getTableName("sessions_lite"), getTableName("decisions_lite")],
970
970
  latency_ms: latencyMs,
971
971
  result_count: decisions.length + (lastSession ? 1 : 0),
972
972
  context_bytes: calculateContextBytes(result),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gitmem-mcp",
3
- "version": "1.0.11",
3
+ "version": "1.0.12",
4
4
  "description": "Institutional memory for AI coding agents. Memory that compounds.",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -24,7 +24,8 @@
24
24
  "test:all": "npm run test:unit && npm run test:smoke && npm run test:integration && npm run test:perf && npm run test:e2e",
25
25
  "test:watch": "vitest",
26
26
  "typecheck": "tsc --noEmit",
27
- "prepublishOnly": "tsc"
27
+ "prepublishOnly": "tsc",
28
+ "release-status": "bash scripts/release-status.sh"
28
29
  },
29
30
  "dependencies": {
30
31
  "@huggingface/transformers": "^3.0.0",