optimal-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +175 -0
- package/dist/bin/optimal.d.ts +2 -0
- package/dist/bin/optimal.js +995 -0
- package/dist/lib/budget/projections.d.ts +115 -0
- package/dist/lib/budget/projections.js +384 -0
- package/dist/lib/budget/scenarios.d.ts +93 -0
- package/dist/lib/budget/scenarios.js +214 -0
- package/dist/lib/cms/publish-blog.d.ts +62 -0
- package/dist/lib/cms/publish-blog.js +74 -0
- package/dist/lib/cms/strapi-client.d.ts +123 -0
- package/dist/lib/cms/strapi-client.js +213 -0
- package/dist/lib/config.d.ts +55 -0
- package/dist/lib/config.js +206 -0
- package/dist/lib/infra/deploy.d.ts +29 -0
- package/dist/lib/infra/deploy.js +58 -0
- package/dist/lib/infra/migrate.d.ts +34 -0
- package/dist/lib/infra/migrate.js +103 -0
- package/dist/lib/kanban.d.ts +46 -0
- package/dist/lib/kanban.js +118 -0
- package/dist/lib/newsletter/distribute.d.ts +52 -0
- package/dist/lib/newsletter/distribute.js +193 -0
- package/dist/lib/newsletter/generate-insurance.d.ts +42 -0
- package/dist/lib/newsletter/generate-insurance.js +36 -0
- package/dist/lib/newsletter/generate.d.ts +104 -0
- package/dist/lib/newsletter/generate.js +571 -0
- package/dist/lib/returnpro/anomalies.d.ts +64 -0
- package/dist/lib/returnpro/anomalies.js +166 -0
- package/dist/lib/returnpro/audit.d.ts +32 -0
- package/dist/lib/returnpro/audit.js +147 -0
- package/dist/lib/returnpro/diagnose.d.ts +52 -0
- package/dist/lib/returnpro/diagnose.js +281 -0
- package/dist/lib/returnpro/kpis.d.ts +32 -0
- package/dist/lib/returnpro/kpis.js +192 -0
- package/dist/lib/returnpro/templates.d.ts +48 -0
- package/dist/lib/returnpro/templates.js +229 -0
- package/dist/lib/returnpro/upload-income.d.ts +25 -0
- package/dist/lib/returnpro/upload-income.js +235 -0
- package/dist/lib/returnpro/upload-netsuite.d.ts +37 -0
- package/dist/lib/returnpro/upload-netsuite.js +566 -0
- package/dist/lib/returnpro/upload-r1.d.ts +48 -0
- package/dist/lib/returnpro/upload-r1.js +398 -0
- package/dist/lib/social/post-generator.d.ts +83 -0
- package/dist/lib/social/post-generator.js +333 -0
- package/dist/lib/social/publish.d.ts +66 -0
- package/dist/lib/social/publish.js +226 -0
- package/dist/lib/social/scraper.d.ts +67 -0
- package/dist/lib/social/scraper.js +361 -0
- package/dist/lib/supabase.d.ts +4 -0
- package/dist/lib/supabase.js +20 -0
- package/dist/lib/transactions/delete-batch.d.ts +60 -0
- package/dist/lib/transactions/delete-batch.js +203 -0
- package/dist/lib/transactions/ingest.d.ts +43 -0
- package/dist/lib/transactions/ingest.js +555 -0
- package/dist/lib/transactions/stamp.d.ts +51 -0
- package/dist/lib/transactions/stamp.js +524 -0
- package/package.json +50 -0
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
interface ConfigRecord {
|
|
2
|
+
id: string;
|
|
3
|
+
agent_name: string;
|
|
4
|
+
config_json: Record<string, unknown>;
|
|
5
|
+
version: string;
|
|
6
|
+
created_at: string;
|
|
7
|
+
updated_at: string;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Initialize local config directory
|
|
11
|
+
*/
|
|
12
|
+
export declare function initConfigDir(): void;
|
|
13
|
+
/**
|
|
14
|
+
* Load local openclaw.json
|
|
15
|
+
*/
|
|
16
|
+
export declare function loadLocalConfig(): Record<string, unknown> | null;
|
|
17
|
+
/**
|
|
18
|
+
* Save config to local openclaw.json
|
|
19
|
+
*/
|
|
20
|
+
export declare function saveLocalConfig(config: Record<string, unknown>): void;
|
|
21
|
+
/**
|
|
22
|
+
* Push current openclaw.json to Supabase
|
|
23
|
+
*/
|
|
24
|
+
export declare function pushConfig(agentName: string): Promise<{
|
|
25
|
+
id: string;
|
|
26
|
+
version: string;
|
|
27
|
+
}>;
|
|
28
|
+
/**
|
|
29
|
+
* Pull config from Supabase and save to local openclaw.json
|
|
30
|
+
*/
|
|
31
|
+
export declare function pullConfig(agentName: string): Promise<ConfigRecord>;
|
|
32
|
+
/**
|
|
33
|
+
* List all saved agent configs
|
|
34
|
+
*/
|
|
35
|
+
export declare function listConfigs(): Promise<Array<{
|
|
36
|
+
agent_name: string;
|
|
37
|
+
version: string;
|
|
38
|
+
updated_at: string;
|
|
39
|
+
}>>;
|
|
40
|
+
/**
|
|
41
|
+
* Compare local config with cloud version
|
|
42
|
+
*/
|
|
43
|
+
export declare function diffConfig(agentName: string): Promise<{
|
|
44
|
+
local: Record<string, unknown> | null;
|
|
45
|
+
cloud: ConfigRecord | null;
|
|
46
|
+
differences: string[];
|
|
47
|
+
}>;
|
|
48
|
+
/**
|
|
49
|
+
* Sync config (two-way merge)
|
|
50
|
+
*/
|
|
51
|
+
export declare function syncConfig(agentName: string): Promise<{
|
|
52
|
+
action: 'pushed' | 'pulled' | 'merged' | 'none';
|
|
53
|
+
message: string;
|
|
54
|
+
}>;
|
|
55
|
+
export {};
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
import { createClient } from '@supabase/supabase-js';
|
|
2
|
+
import { readFileSync, writeFileSync, existsSync } from 'node:fs';
|
|
3
|
+
import { homedir } from 'node:os';
|
|
4
|
+
import { join } from 'node:path';
|
|
5
|
+
const CONFIG_DIR = join(homedir(), '.optimal');
|
|
6
|
+
const LOCAL_CONFIG_PATH = join(CONFIG_DIR, 'config.json');
|
|
7
|
+
const OPENCLAW_CONFIG_PATH = join(homedir(), '.openclaw', 'openclaw.json');
|
|
8
|
+
// Get Supabase client for OptimalOS instance (stores CLI configs)
|
|
9
|
+
function getOptimalSupabase() {
|
|
10
|
+
const url = process.env.OPTIMAL_SUPABASE_URL;
|
|
11
|
+
const key = process.env.OPTIMAL_SUPABASE_SERVICE_KEY;
|
|
12
|
+
if (!url || !key) {
|
|
13
|
+
throw new Error('OPTIMAL_SUPABASE_URL and OPTIMAL_SUPABASE_SERVICE_KEY must be set');
|
|
14
|
+
}
|
|
15
|
+
return createClient(url, key);
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Initialize local config directory
|
|
19
|
+
*/
|
|
20
|
+
export function initConfigDir() {
|
|
21
|
+
if (!existsSync(CONFIG_DIR)) {
|
|
22
|
+
import('node:fs').then(fs => fs.mkdirSync(CONFIG_DIR, { recursive: true }));
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Load local openclaw.json
|
|
27
|
+
*/
|
|
28
|
+
export function loadLocalConfig() {
|
|
29
|
+
if (!existsSync(OPENCLAW_CONFIG_PATH)) {
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
try {
|
|
33
|
+
const raw = readFileSync(OPENCLAW_CONFIG_PATH, 'utf-8');
|
|
34
|
+
return JSON.parse(raw);
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Save config to local openclaw.json
|
|
42
|
+
*/
|
|
43
|
+
export function saveLocalConfig(config) {
|
|
44
|
+
writeFileSync(OPENCLAW_CONFIG_PATH, JSON.stringify(config, null, 2));
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Push current openclaw.json to Supabase
|
|
48
|
+
*/
|
|
49
|
+
export async function pushConfig(agentName) {
|
|
50
|
+
const supabase = getOptimalSupabase();
|
|
51
|
+
const config = loadLocalConfig();
|
|
52
|
+
if (!config) {
|
|
53
|
+
throw new Error(`No config found at ${OPENCLAW_CONFIG_PATH}`);
|
|
54
|
+
}
|
|
55
|
+
// Generate version timestamp
|
|
56
|
+
const version = new Date().toISOString();
|
|
57
|
+
// Check if config exists for this agent
|
|
58
|
+
const { data: existing } = await supabase
|
|
59
|
+
.from('agent_configs')
|
|
60
|
+
.select('id')
|
|
61
|
+
.eq('agent_name', agentName)
|
|
62
|
+
.single();
|
|
63
|
+
let result;
|
|
64
|
+
if (existing) {
|
|
65
|
+
// Update existing
|
|
66
|
+
const { data, error } = await supabase
|
|
67
|
+
.from('agent_configs')
|
|
68
|
+
.update({
|
|
69
|
+
config_json: config,
|
|
70
|
+
version,
|
|
71
|
+
updated_at: version,
|
|
72
|
+
})
|
|
73
|
+
.eq('id', existing.id)
|
|
74
|
+
.select()
|
|
75
|
+
.single();
|
|
76
|
+
if (error)
|
|
77
|
+
throw error;
|
|
78
|
+
result = data;
|
|
79
|
+
}
|
|
80
|
+
else {
|
|
81
|
+
// Insert new
|
|
82
|
+
const { data, error } = await supabase
|
|
83
|
+
.from('agent_configs')
|
|
84
|
+
.insert({
|
|
85
|
+
agent_name: agentName,
|
|
86
|
+
config_json: config,
|
|
87
|
+
version,
|
|
88
|
+
})
|
|
89
|
+
.select()
|
|
90
|
+
.single();
|
|
91
|
+
if (error)
|
|
92
|
+
throw error;
|
|
93
|
+
result = data;
|
|
94
|
+
}
|
|
95
|
+
return { id: result.id, version };
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Pull config from Supabase and save to local openclaw.json
|
|
99
|
+
*/
|
|
100
|
+
export async function pullConfig(agentName) {
|
|
101
|
+
const supabase = getOptimalSupabase();
|
|
102
|
+
const { data, error } = await supabase
|
|
103
|
+
.from('agent_configs')
|
|
104
|
+
.select('*')
|
|
105
|
+
.eq('agent_name', agentName)
|
|
106
|
+
.order('updated_at', { ascending: false })
|
|
107
|
+
.limit(1)
|
|
108
|
+
.single();
|
|
109
|
+
if (error) {
|
|
110
|
+
throw new Error(`No config found for agent: ${agentName}`);
|
|
111
|
+
}
|
|
112
|
+
// Save to local
|
|
113
|
+
saveLocalConfig(data.config_json);
|
|
114
|
+
return data;
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* List all saved agent configs
|
|
118
|
+
*/
|
|
119
|
+
export async function listConfigs() {
|
|
120
|
+
const supabase = getOptimalSupabase();
|
|
121
|
+
const { data, error } = await supabase
|
|
122
|
+
.from('agent_configs')
|
|
123
|
+
.select('agent_name, version, updated_at')
|
|
124
|
+
.order('updated_at', { ascending: false });
|
|
125
|
+
if (error) {
|
|
126
|
+
throw new Error(`Failed to list configs: ${error.message}`);
|
|
127
|
+
}
|
|
128
|
+
return data || [];
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Compare local config with cloud version
|
|
132
|
+
*/
|
|
133
|
+
export async function diffConfig(agentName) {
|
|
134
|
+
const local = loadLocalConfig();
|
|
135
|
+
let cloud = null;
|
|
136
|
+
try {
|
|
137
|
+
const supabase = getOptimalSupabase();
|
|
138
|
+
const { data } = await supabase
|
|
139
|
+
.from('agent_configs')
|
|
140
|
+
.select('*')
|
|
141
|
+
.eq('agent_name', agentName)
|
|
142
|
+
.single();
|
|
143
|
+
cloud = data;
|
|
144
|
+
}
|
|
145
|
+
catch {
|
|
146
|
+
// Cloud config doesn't exist
|
|
147
|
+
}
|
|
148
|
+
const differences = [];
|
|
149
|
+
if (!local && !cloud) {
|
|
150
|
+
differences.push('No local or cloud config found');
|
|
151
|
+
}
|
|
152
|
+
else if (!local) {
|
|
153
|
+
differences.push('No local config (cloud exists)');
|
|
154
|
+
}
|
|
155
|
+
else if (!cloud) {
|
|
156
|
+
differences.push('No cloud config (local exists)');
|
|
157
|
+
}
|
|
158
|
+
else {
|
|
159
|
+
// Simple diff on top-level keys
|
|
160
|
+
const localKeys = Object.keys(local).sort();
|
|
161
|
+
const cloudKeys = Object.keys(cloud.config_json).sort();
|
|
162
|
+
if (JSON.stringify(localKeys) !== JSON.stringify(cloudKeys)) {
|
|
163
|
+
differences.push('Top-level keys differ');
|
|
164
|
+
}
|
|
165
|
+
// Check version
|
|
166
|
+
const localMeta = local.meta;
|
|
167
|
+
if (localMeta?.lastTouchedVersion !== cloud.version) {
|
|
168
|
+
differences.push(`Version mismatch: local=${localMeta?.lastTouchedVersion}, cloud=${cloud.version}`);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
return { local, cloud, differences };
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Sync config (two-way merge)
|
|
175
|
+
*/
|
|
176
|
+
export async function syncConfig(agentName) {
|
|
177
|
+
const { local, cloud, differences } = await diffConfig(agentName);
|
|
178
|
+
if (!local && !cloud) {
|
|
179
|
+
return { action: 'none', message: 'No configs to sync' };
|
|
180
|
+
}
|
|
181
|
+
if (!cloud) {
|
|
182
|
+
// Only local exists - push
|
|
183
|
+
const result = await pushConfig(agentName);
|
|
184
|
+
return { action: 'pushed', message: `Pushed to cloud (version ${result.version})` };
|
|
185
|
+
}
|
|
186
|
+
if (!local) {
|
|
187
|
+
// Only cloud exists - pull
|
|
188
|
+
await pullConfig(agentName);
|
|
189
|
+
return { action: 'pulled', message: `Pulled from cloud (version ${cloud.version})` };
|
|
190
|
+
}
|
|
191
|
+
// Both exist - compare timestamps
|
|
192
|
+
const localTime = local.meta?.lastTouchedAt || '1970-01-01';
|
|
193
|
+
const localVersion = local.meta?.lastTouchedVersion || 'unknown';
|
|
194
|
+
const cloudTime = cloud.updated_at;
|
|
195
|
+
if (localTime > cloudTime) {
|
|
196
|
+
const result = await pushConfig(agentName);
|
|
197
|
+
return { action: 'pushed', message: `Local is newer - pushed to cloud (version ${result.version})` };
|
|
198
|
+
}
|
|
199
|
+
else if (cloudTime > localTime) {
|
|
200
|
+
await pullConfig(agentName);
|
|
201
|
+
return { action: 'pulled', message: `Cloud is newer - pulled from cloud (version ${cloud.version})` };
|
|
202
|
+
}
|
|
203
|
+
else {
|
|
204
|
+
return { action: 'none', message: 'Configs are in sync' };
|
|
205
|
+
}
|
|
206
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* List all available app names that can be deployed.
|
|
3
|
+
*/
|
|
4
|
+
export declare function listApps(): string[];
|
|
5
|
+
/**
|
|
6
|
+
* Resolve an app name to its absolute filesystem path.
|
|
7
|
+
* Throws if the app name is unknown.
|
|
8
|
+
*/
|
|
9
|
+
export declare function getAppPath(appName: string): string;
|
|
10
|
+
/**
|
|
11
|
+
* Deploy an app to Vercel using the `vercel` CLI.
|
|
12
|
+
*
|
|
13
|
+
* Uses `execFile` (not `exec`) to avoid shell injection.
|
|
14
|
+
* The `--cwd` flag tells Vercel which project directory to deploy.
|
|
15
|
+
*
|
|
16
|
+
* @param appName - Short name from APP_PATHS (e.g. 'portfolio', 'dashboard-returnpro')
|
|
17
|
+
* @param prod - If true, deploys to production (--prod flag). Otherwise preview.
|
|
18
|
+
* @returns The deployment URL printed by Vercel CLI.
|
|
19
|
+
*/
|
|
20
|
+
export declare function deploy(appName: string, prod?: boolean): Promise<string>;
|
|
21
|
+
/**
|
|
22
|
+
* Run the Optimal workstation health check script.
|
|
23
|
+
*
|
|
24
|
+
* Checks: n8n, Affine (Docker + HTTP), Strapi CMS (systemd + HTTP),
|
|
25
|
+
* Git repo sync status, Docker containers, and OptimalOS dev server.
|
|
26
|
+
*
|
|
27
|
+
* @returns The full text output of the health check script.
|
|
28
|
+
*/
|
|
29
|
+
export declare function healthCheck(): Promise<string>;
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { execFile } from 'node:child_process';
|
|
2
|
+
import { promisify } from 'node:util';
|
|
3
|
+
const run = promisify(execFile);
|
|
4
|
+
/** Map of short app names to absolute filesystem paths. */
|
|
5
|
+
const APP_PATHS = {
|
|
6
|
+
'dashboard-returnpro': '/home/optimal/dashboard-returnpro',
|
|
7
|
+
'optimalos': '/home/optimal/optimalos',
|
|
8
|
+
'portfolio': '/home/optimal/portfolio-2026',
|
|
9
|
+
'newsletter-preview': '/home/optimal/projects/newsletter-preview',
|
|
10
|
+
'wes': '/home/optimal/wes-dashboard',
|
|
11
|
+
};
|
|
12
|
+
/**
|
|
13
|
+
* List all available app names that can be deployed.
|
|
14
|
+
*/
|
|
15
|
+
export function listApps() {
|
|
16
|
+
return Object.keys(APP_PATHS);
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Resolve an app name to its absolute filesystem path.
|
|
20
|
+
* Throws if the app name is unknown.
|
|
21
|
+
*/
|
|
22
|
+
export function getAppPath(appName) {
|
|
23
|
+
const appPath = APP_PATHS[appName];
|
|
24
|
+
if (!appPath) {
|
|
25
|
+
throw new Error(`Unknown app: ${appName}. Available: ${Object.keys(APP_PATHS).join(', ')}`);
|
|
26
|
+
}
|
|
27
|
+
return appPath;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Deploy an app to Vercel using the `vercel` CLI.
|
|
31
|
+
*
|
|
32
|
+
* Uses `execFile` (not `exec`) to avoid shell injection.
|
|
33
|
+
* The `--cwd` flag tells Vercel which project directory to deploy.
|
|
34
|
+
*
|
|
35
|
+
* @param appName - Short name from APP_PATHS (e.g. 'portfolio', 'dashboard-returnpro')
|
|
36
|
+
* @param prod - If true, deploys to production (--prod flag). Otherwise preview.
|
|
37
|
+
* @returns The deployment URL printed by Vercel CLI.
|
|
38
|
+
*/
|
|
39
|
+
export async function deploy(appName, prod = false) {
|
|
40
|
+
const appPath = getAppPath(appName);
|
|
41
|
+
const args = prod
|
|
42
|
+
? ['--prod', '--cwd', appPath]
|
|
43
|
+
: ['--cwd', appPath];
|
|
44
|
+
const { stdout } = await run('vercel', args, { timeout: 120_000 });
|
|
45
|
+
return stdout.trim();
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Run the Optimal workstation health check script.
|
|
49
|
+
*
|
|
50
|
+
* Checks: n8n, Affine (Docker + HTTP), Strapi CMS (systemd + HTTP),
|
|
51
|
+
* Git repo sync status, Docker containers, and OptimalOS dev server.
|
|
52
|
+
*
|
|
53
|
+
* @returns The full text output of the health check script.
|
|
54
|
+
*/
|
|
55
|
+
export async function healthCheck() {
|
|
56
|
+
const { stdout } = await run('bash', ['/home/optimal/scripts/health-check.sh'], { timeout: 30_000 });
|
|
57
|
+
return stdout.trim();
|
|
58
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import 'dotenv/config';
|
|
2
|
+
export interface MigrateOptions {
|
|
3
|
+
target: 'returnpro' | 'optimalos';
|
|
4
|
+
dryRun?: boolean;
|
|
5
|
+
}
|
|
6
|
+
export interface MigrateResult {
|
|
7
|
+
success: boolean;
|
|
8
|
+
target: string;
|
|
9
|
+
output: string;
|
|
10
|
+
errors: string;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Run `supabase db push --linked` (or `--dry-run` if requested) against the
|
|
14
|
+
* given target project directory.
|
|
15
|
+
*
|
|
16
|
+
* Uses `execFile` (not `exec`) to avoid shell injection.
|
|
17
|
+
* The `cwd` option switches the Supabase CLI into the correct project.
|
|
18
|
+
*/
|
|
19
|
+
export declare function migrateDb(opts: MigrateOptions): Promise<MigrateResult>;
|
|
20
|
+
/**
|
|
21
|
+
* List migration `.sql` files in the target's `supabase/migrations/` directory,
|
|
22
|
+
* sorted chronologically (by filename, which starts with a YYYYMMDDHHMMSS prefix).
|
|
23
|
+
*
|
|
24
|
+
* Returns only filenames, not full paths.
|
|
25
|
+
*/
|
|
26
|
+
export declare function listPendingMigrations(target: 'returnpro' | 'optimalos'): Promise<string[]>;
|
|
27
|
+
/**
|
|
28
|
+
* Create a new empty migration file in the target's `supabase/migrations/`
|
|
29
|
+
* directory.
|
|
30
|
+
*
|
|
31
|
+
* The filename format is `{YYYYMMDDHHMMSS}_{name}.sql` (UTC timestamp).
|
|
32
|
+
* Returns the full absolute path of the created file.
|
|
33
|
+
*/
|
|
34
|
+
export declare function createMigration(target: 'returnpro' | 'optimalos', name: string): Promise<string>;
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import 'dotenv/config';
|
|
2
|
+
import { execFile } from 'node:child_process';
|
|
3
|
+
import { promisify } from 'node:util';
|
|
4
|
+
import { readdir, writeFile } from 'node:fs/promises';
|
|
5
|
+
import { join } from 'node:path';
|
|
6
|
+
const run = promisify(execFile);
|
|
7
|
+
// ---------------------------------------------------------------------------
|
|
8
|
+
// Constants
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
/** Hardcoded project directories — these live on Carlos's machine. */
|
|
11
|
+
const PROJECT_DIRS = {
|
|
12
|
+
returnpro: '/home/optimal/dashboard-returnpro',
|
|
13
|
+
optimalos: '/home/optimal/optimalos',
|
|
14
|
+
};
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
// Helpers
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
function getProjectDir(target) {
|
|
19
|
+
return PROJECT_DIRS[target];
|
|
20
|
+
}
|
|
21
|
+
function migrationsDir(target) {
|
|
22
|
+
return join(getProjectDir(target), 'supabase', 'migrations');
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Generate a timestamp string in YYYYMMDDHHMMSS format (UTC).
|
|
26
|
+
*/
|
|
27
|
+
function timestamp() {
|
|
28
|
+
const now = new Date();
|
|
29
|
+
const pad = (n, len = 2) => String(n).padStart(len, '0');
|
|
30
|
+
return [
|
|
31
|
+
now.getUTCFullYear(),
|
|
32
|
+
pad(now.getUTCMonth() + 1),
|
|
33
|
+
pad(now.getUTCDate()),
|
|
34
|
+
pad(now.getUTCHours()),
|
|
35
|
+
pad(now.getUTCMinutes()),
|
|
36
|
+
pad(now.getUTCSeconds()),
|
|
37
|
+
].join('');
|
|
38
|
+
}
|
|
39
|
+
// ---------------------------------------------------------------------------
|
|
40
|
+
// Public API
|
|
41
|
+
// ---------------------------------------------------------------------------
|
|
42
|
+
/**
|
|
43
|
+
* Run `supabase db push --linked` (or `--dry-run` if requested) against the
|
|
44
|
+
* given target project directory.
|
|
45
|
+
*
|
|
46
|
+
* Uses `execFile` (not `exec`) to avoid shell injection.
|
|
47
|
+
* The `cwd` option switches the Supabase CLI into the correct project.
|
|
48
|
+
*/
|
|
49
|
+
export async function migrateDb(opts) {
|
|
50
|
+
const { target, dryRun = false } = opts;
|
|
51
|
+
const projectDir = getProjectDir(target);
|
|
52
|
+
const args = ['db', 'push', '--linked'];
|
|
53
|
+
if (dryRun)
|
|
54
|
+
args.push('--dry-run');
|
|
55
|
+
try {
|
|
56
|
+
const { stdout, stderr } = await run('supabase', args, {
|
|
57
|
+
cwd: projectDir,
|
|
58
|
+
timeout: 120_000,
|
|
59
|
+
});
|
|
60
|
+
return {
|
|
61
|
+
success: true,
|
|
62
|
+
target,
|
|
63
|
+
output: stdout.trim(),
|
|
64
|
+
errors: stderr.trim(),
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
catch (err) {
|
|
68
|
+
const e = err;
|
|
69
|
+
return {
|
|
70
|
+
success: false,
|
|
71
|
+
target,
|
|
72
|
+
output: (e.stdout ?? '').trim(),
|
|
73
|
+
errors: (e.stderr ?? e.message ?? String(err)).trim(),
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* List migration `.sql` files in the target's `supabase/migrations/` directory,
|
|
79
|
+
* sorted chronologically (by filename, which starts with a YYYYMMDDHHMMSS prefix).
|
|
80
|
+
*
|
|
81
|
+
* Returns only filenames, not full paths.
|
|
82
|
+
*/
|
|
83
|
+
export async function listPendingMigrations(target) {
|
|
84
|
+
const dir = migrationsDir(target);
|
|
85
|
+
const entries = await readdir(dir);
|
|
86
|
+
return entries
|
|
87
|
+
.filter((f) => f.endsWith('.sql'))
|
|
88
|
+
.sort(); // lexicographic == chronological given the YYYYMMDDHHMMSS prefix
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* Create a new empty migration file in the target's `supabase/migrations/`
|
|
92
|
+
* directory.
|
|
93
|
+
*
|
|
94
|
+
* The filename format is `{YYYYMMDDHHMMSS}_{name}.sql` (UTC timestamp).
|
|
95
|
+
* Returns the full absolute path of the created file.
|
|
96
|
+
*/
|
|
97
|
+
export async function createMigration(target, name) {
|
|
98
|
+
const sanitized = name.replace(/\s+/g, '_').replace(/[^a-zA-Z0-9_]/g, '');
|
|
99
|
+
const filename = `${timestamp()}_${sanitized}.sql`;
|
|
100
|
+
const fullPath = join(migrationsDir(target), filename);
|
|
101
|
+
await writeFile(fullPath, `-- Migration: ${sanitized}\n-- Target: ${target}\n-- Created: ${new Date().toISOString()}\n\n`, { encoding: 'utf8' });
|
|
102
|
+
return fullPath;
|
|
103
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
export interface CliTask {
|
|
2
|
+
id: string;
|
|
3
|
+
project_id: string;
|
|
4
|
+
parent_id: string | null;
|
|
5
|
+
title: string;
|
|
6
|
+
description: string | null;
|
|
7
|
+
status: 'backlog' | 'ready' | 'in_progress' | 'blocked' | 'review' | 'done' | 'canceled';
|
|
8
|
+
priority: 1 | 2 | 3 | 4;
|
|
9
|
+
assigned_agent: string | null;
|
|
10
|
+
skill_ref: string | null;
|
|
11
|
+
source_repo: string | null;
|
|
12
|
+
blocked_by: string[] | null;
|
|
13
|
+
labels: string[];
|
|
14
|
+
metadata: Record<string, unknown>;
|
|
15
|
+
started_at: string | null;
|
|
16
|
+
completed_at: string | null;
|
|
17
|
+
created_at: string;
|
|
18
|
+
updated_at: string;
|
|
19
|
+
}
|
|
20
|
+
export interface CreateTaskInput {
|
|
21
|
+
project_slug: string;
|
|
22
|
+
title: string;
|
|
23
|
+
description?: string;
|
|
24
|
+
priority?: 1 | 2 | 3 | 4;
|
|
25
|
+
skill_ref?: string;
|
|
26
|
+
source_repo?: string;
|
|
27
|
+
labels?: string[];
|
|
28
|
+
parent_id?: string;
|
|
29
|
+
blocked_by?: string[];
|
|
30
|
+
}
|
|
31
|
+
export declare function getProjectBySlug(slug: string): Promise<any>;
|
|
32
|
+
export declare function createTask(input: CreateTaskInput): Promise<CliTask>;
|
|
33
|
+
export declare function updateTask(taskId: string, updates: Partial<Pick<CliTask, 'status' | 'assigned_agent' | 'priority' | 'metadata' | 'labels'>>): Promise<CliTask>;
|
|
34
|
+
export declare function getNextTask(projectSlug: string, agentName: string): Promise<CliTask | null>;
|
|
35
|
+
export declare function getBoard(projectSlug: string): Promise<CliTask[]>;
|
|
36
|
+
export declare function logActivity(taskId: string, entry: {
|
|
37
|
+
agent: string;
|
|
38
|
+
action: string;
|
|
39
|
+
message?: string;
|
|
40
|
+
metadata?: Record<string, unknown>;
|
|
41
|
+
}): Promise<void>;
|
|
42
|
+
export declare function logSkillExecution(skillName: string, agent: string, result: {
|
|
43
|
+
success: boolean;
|
|
44
|
+
message: string;
|
|
45
|
+
metadata?: Record<string, unknown>;
|
|
46
|
+
}): Promise<void>;
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { getSupabase } from './supabase.js';
|
|
2
|
+
const sb = () => getSupabase('optimal');
|
|
3
|
+
// --- Projects ---
|
|
4
|
+
export async function getProjectBySlug(slug) {
|
|
5
|
+
const { data, error } = await sb()
|
|
6
|
+
.from('cli_projects')
|
|
7
|
+
.select('*')
|
|
8
|
+
.eq('slug', slug)
|
|
9
|
+
.single();
|
|
10
|
+
if (error)
|
|
11
|
+
throw new Error(`Project not found: ${slug} — ${error.message}`);
|
|
12
|
+
return data;
|
|
13
|
+
}
|
|
14
|
+
// --- Tasks ---
|
|
15
|
+
export async function createTask(input) {
|
|
16
|
+
const project = await getProjectBySlug(input.project_slug);
|
|
17
|
+
const { data, error } = await sb()
|
|
18
|
+
.from('cli_tasks')
|
|
19
|
+
.insert({
|
|
20
|
+
project_id: project.id,
|
|
21
|
+
title: input.title,
|
|
22
|
+
description: input.description ?? null,
|
|
23
|
+
priority: input.priority ?? 3,
|
|
24
|
+
skill_ref: input.skill_ref ?? null,
|
|
25
|
+
source_repo: input.source_repo ?? null,
|
|
26
|
+
labels: input.labels ?? [],
|
|
27
|
+
parent_id: input.parent_id ?? null,
|
|
28
|
+
blocked_by: input.blocked_by ?? null,
|
|
29
|
+
})
|
|
30
|
+
.select()
|
|
31
|
+
.single();
|
|
32
|
+
if (error)
|
|
33
|
+
throw new Error(`Failed to create task: ${error.message}`);
|
|
34
|
+
return data;
|
|
35
|
+
}
|
|
36
|
+
export async function updateTask(taskId, updates) {
|
|
37
|
+
const { data, error } = await sb()
|
|
38
|
+
.from('cli_tasks')
|
|
39
|
+
.update(updates)
|
|
40
|
+
.eq('id', taskId)
|
|
41
|
+
.select()
|
|
42
|
+
.single();
|
|
43
|
+
if (error)
|
|
44
|
+
throw new Error(`Failed to update task ${taskId}: ${error.message}`);
|
|
45
|
+
return data;
|
|
46
|
+
}
|
|
47
|
+
export async function getNextTask(projectSlug, agentName) {
|
|
48
|
+
const project = await getProjectBySlug(projectSlug);
|
|
49
|
+
const { data, error } = await sb()
|
|
50
|
+
.from('cli_tasks')
|
|
51
|
+
.select('*')
|
|
52
|
+
.eq('project_id', project.id)
|
|
53
|
+
.in('status', ['ready', 'backlog'])
|
|
54
|
+
.is('assigned_agent', null)
|
|
55
|
+
.order('priority', { ascending: true })
|
|
56
|
+
.order('created_at', { ascending: true })
|
|
57
|
+
.limit(10);
|
|
58
|
+
if (error)
|
|
59
|
+
throw new Error(`Failed to fetch tasks: ${error.message}`);
|
|
60
|
+
if (!data || data.length === 0)
|
|
61
|
+
return null;
|
|
62
|
+
for (const task of data) {
|
|
63
|
+
if (!task.blocked_by || task.blocked_by.length === 0)
|
|
64
|
+
return task;
|
|
65
|
+
const { data: blockers } = await sb()
|
|
66
|
+
.from('cli_tasks')
|
|
67
|
+
.select('id, status')
|
|
68
|
+
.in('id', task.blocked_by);
|
|
69
|
+
const allDone = blockers?.every(b => b.status === 'done' || b.status === 'canceled');
|
|
70
|
+
if (allDone)
|
|
71
|
+
return task;
|
|
72
|
+
}
|
|
73
|
+
return null;
|
|
74
|
+
}
|
|
75
|
+
export async function getBoard(projectSlug) {
|
|
76
|
+
const project = await getProjectBySlug(projectSlug);
|
|
77
|
+
const { data, error } = await sb()
|
|
78
|
+
.from('cli_tasks')
|
|
79
|
+
.select('*')
|
|
80
|
+
.eq('project_id', project.id)
|
|
81
|
+
.not('status', 'eq', 'canceled')
|
|
82
|
+
.order('priority', { ascending: true })
|
|
83
|
+
.order('created_at', { ascending: true });
|
|
84
|
+
if (error)
|
|
85
|
+
throw new Error(`Failed to fetch board: ${error.message}`);
|
|
86
|
+
return (data ?? []);
|
|
87
|
+
}
|
|
88
|
+
// --- Logging ---
|
|
89
|
+
export async function logActivity(taskId, entry) {
|
|
90
|
+
const { error } = await sb()
|
|
91
|
+
.from('cli_task_logs')
|
|
92
|
+
.insert({
|
|
93
|
+
task_id: taskId,
|
|
94
|
+
agent: entry.agent,
|
|
95
|
+
action: entry.action,
|
|
96
|
+
message: entry.message ?? null,
|
|
97
|
+
metadata: entry.metadata ?? {},
|
|
98
|
+
});
|
|
99
|
+
if (error)
|
|
100
|
+
throw new Error(`Failed to log activity: ${error.message}`);
|
|
101
|
+
}
|
|
102
|
+
export async function logSkillExecution(skillName, agent, result) {
|
|
103
|
+
const { data: tasks } = await sb()
|
|
104
|
+
.from('cli_tasks')
|
|
105
|
+
.select('id')
|
|
106
|
+
.eq('skill_ref', skillName)
|
|
107
|
+
.eq('status', 'in_progress')
|
|
108
|
+
.limit(1);
|
|
109
|
+
const taskId = tasks?.[0]?.id;
|
|
110
|
+
if (taskId) {
|
|
111
|
+
await logActivity(taskId, {
|
|
112
|
+
agent,
|
|
113
|
+
action: result.success ? 'skill_success' : 'skill_error',
|
|
114
|
+
message: result.message,
|
|
115
|
+
metadata: result.metadata,
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
}
|