newo 1.3.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +2 -2
- package/CHANGELOG.md +99 -2
- package/README.md +59 -10
- package/dist/akb.d.ts +10 -0
- package/dist/akb.js +84 -0
- package/dist/api.d.ts +13 -0
- package/dist/api.js +100 -0
- package/dist/auth.d.ts +6 -0
- package/dist/auth.js +104 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.js +111 -0
- package/dist/fsutil.d.ts +12 -0
- package/dist/fsutil.js +28 -0
- package/dist/hash.d.ts +5 -0
- package/dist/hash.js +17 -0
- package/dist/sync.d.ts +7 -0
- package/dist/sync.js +337 -0
- package/dist/types.d.ts +206 -0
- package/dist/types.js +5 -0
- package/package.json +32 -9
- package/src/{akb.js → akb.ts} +16 -25
- package/src/api.ts +127 -0
- package/src/auth.ts +142 -0
- package/src/{cli.js → cli.ts} +29 -15
- package/src/fsutil.ts +41 -0
- package/src/hash.ts +20 -0
- package/src/sync.ts +396 -0
- package/src/types.ts +248 -0
- package/src/api.js +0 -98
- package/src/auth.js +0 -92
- package/src/fsutil.js +0 -26
- package/src/hash.js +0 -17
- package/src/sync.js +0 -284
package/dist/cli.js
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import minimist from 'minimist';
|
|
3
|
+
import dotenv from 'dotenv';
|
|
4
|
+
import { makeClient, getProjectMeta, importAkbArticle } from './api.js';
|
|
5
|
+
import { pullAll, pushChanged, status } from './sync.js';
|
|
6
|
+
import { parseAkbFile, prepareArticlesForImport } from './akb.js';
|
|
7
|
+
import path from 'path';
|
|
8
|
+
dotenv.config();
|
|
9
|
+
const { NEWO_PROJECT_ID } = process.env;
|
|
10
|
+
async function main() {
|
|
11
|
+
const args = minimist(process.argv.slice(2));
|
|
12
|
+
const cmd = args._[0];
|
|
13
|
+
const verbose = Boolean(args.verbose || args.v);
|
|
14
|
+
if (!cmd || ['help', '-h', '--help'].includes(cmd)) {
|
|
15
|
+
console.log(`NEWO CLI
|
|
16
|
+
Usage:
|
|
17
|
+
newo pull # download all projects -> ./projects/ OR specific project if NEWO_PROJECT_ID set
|
|
18
|
+
newo push # upload modified *.guidance/*.jinja back to NEWO
|
|
19
|
+
newo status # show modified files
|
|
20
|
+
newo meta # get project metadata (debug, requires NEWO_PROJECT_ID)
|
|
21
|
+
newo import-akb <file> <persona_id> # import AKB articles from file
|
|
22
|
+
|
|
23
|
+
Flags:
|
|
24
|
+
--verbose, -v # enable detailed logging
|
|
25
|
+
|
|
26
|
+
Env:
|
|
27
|
+
NEWO_BASE_URL, NEWO_PROJECT_ID (optional), NEWO_API_KEY, NEWO_REFRESH_URL (optional)
|
|
28
|
+
|
|
29
|
+
Notes:
|
|
30
|
+
- multi-project support: pull downloads all accessible projects or single project based on NEWO_PROJECT_ID
|
|
31
|
+
- If NEWO_PROJECT_ID is set, pull downloads only that project
|
|
32
|
+
- If NEWO_PROJECT_ID is not set, pull downloads all projects accessible with your API key
|
|
33
|
+
- Projects are stored in ./projects/{project-idn}/ folders
|
|
34
|
+
- Each project folder contains metadata.json and flows.yaml
|
|
35
|
+
`);
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
const client = await makeClient(verbose);
|
|
39
|
+
if (cmd === 'pull') {
|
|
40
|
+
// If PROJECT_ID is set, pull single project; otherwise pull all projects
|
|
41
|
+
await pullAll(client, NEWO_PROJECT_ID || null, verbose);
|
|
42
|
+
}
|
|
43
|
+
else if (cmd === 'push') {
|
|
44
|
+
await pushChanged(client, verbose);
|
|
45
|
+
}
|
|
46
|
+
else if (cmd === 'status') {
|
|
47
|
+
await status(verbose);
|
|
48
|
+
}
|
|
49
|
+
else if (cmd === 'meta') {
|
|
50
|
+
if (!NEWO_PROJECT_ID) {
|
|
51
|
+
throw new Error('NEWO_PROJECT_ID is not set in env');
|
|
52
|
+
}
|
|
53
|
+
const meta = await getProjectMeta(client, NEWO_PROJECT_ID);
|
|
54
|
+
console.log(JSON.stringify(meta, null, 2));
|
|
55
|
+
}
|
|
56
|
+
else if (cmd === 'import-akb') {
|
|
57
|
+
const akbFile = args._[1];
|
|
58
|
+
const personaId = args._[2];
|
|
59
|
+
if (!akbFile || !personaId) {
|
|
60
|
+
console.error('Usage: newo import-akb <file> <persona_id>');
|
|
61
|
+
console.error('Example: newo import-akb akb.txt da4550db-2b95-4500-91ff-fb4b60fe7be9');
|
|
62
|
+
process.exit(1);
|
|
63
|
+
}
|
|
64
|
+
const filePath = path.resolve(akbFile);
|
|
65
|
+
try {
|
|
66
|
+
if (verbose)
|
|
67
|
+
console.log(`📖 Parsing AKB file: ${filePath}`);
|
|
68
|
+
const articles = parseAkbFile(filePath);
|
|
69
|
+
console.log(`✓ Parsed ${articles.length} articles from ${akbFile}`);
|
|
70
|
+
if (verbose)
|
|
71
|
+
console.log(`🔧 Preparing articles for persona: ${personaId}`);
|
|
72
|
+
const preparedArticles = prepareArticlesForImport(articles, personaId);
|
|
73
|
+
let successCount = 0;
|
|
74
|
+
let errorCount = 0;
|
|
75
|
+
console.log(`📤 Importing ${preparedArticles.length} articles...`);
|
|
76
|
+
for (const [index, article] of preparedArticles.entries()) {
|
|
77
|
+
try {
|
|
78
|
+
if (verbose) {
|
|
79
|
+
console.log(` [${index + 1}/${preparedArticles.length}] Importing ${article.topic_name}...`);
|
|
80
|
+
}
|
|
81
|
+
await importAkbArticle(client, article);
|
|
82
|
+
successCount++;
|
|
83
|
+
if (!verbose)
|
|
84
|
+
process.stdout.write('.');
|
|
85
|
+
}
|
|
86
|
+
catch (error) {
|
|
87
|
+
errorCount++;
|
|
88
|
+
const errorMessage = error?.response?.data || error.message;
|
|
89
|
+
console.error(`\n❌ Failed to import ${article.topic_name}:`, errorMessage);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
if (!verbose)
|
|
93
|
+
console.log(''); // new line after dots
|
|
94
|
+
console.log(`✅ Import complete: ${successCount} successful, ${errorCount} failed`);
|
|
95
|
+
}
|
|
96
|
+
catch (error) {
|
|
97
|
+
console.error('❌ AKB import failed:', error.message);
|
|
98
|
+
process.exit(1);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
console.error('Unknown command:', cmd);
|
|
103
|
+
process.exit(1);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
main().catch((error) => {
|
|
107
|
+
const errorData = 'response' in error ? error?.response?.data : error;
|
|
108
|
+
console.error(errorData || error);
|
|
109
|
+
process.exit(1);
|
|
110
|
+
});
|
|
111
|
+
//# sourceMappingURL=cli.js.map
|
package/dist/fsutil.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { RunnerType } from './types.js';
|
|
2
|
+
export declare const ROOT_DIR: string;
|
|
3
|
+
export declare const STATE_DIR: string;
|
|
4
|
+
export declare const MAP_PATH: string;
|
|
5
|
+
export declare const HASHES_PATH: string;
|
|
6
|
+
export declare function ensureState(): Promise<void>;
|
|
7
|
+
export declare function projectDir(projectIdn: string): string;
|
|
8
|
+
export declare function skillPath(projectIdn: string, agentIdn: string, flowIdn: string, skillIdn: string, runnerType?: RunnerType): string;
|
|
9
|
+
export declare function metadataPath(projectIdn: string): string;
|
|
10
|
+
export declare function writeFileAtomic(filepath: string, content: string): Promise<void>;
|
|
11
|
+
export declare function readIfExists(filepath: string): Promise<string | null>;
|
|
12
|
+
//# sourceMappingURL=fsutil.d.ts.map
|
package/dist/fsutil.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import fs from 'fs-extra';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
export const ROOT_DIR = path.join(process.cwd(), 'projects');
|
|
4
|
+
export const STATE_DIR = path.join(process.cwd(), '.newo');
|
|
5
|
+
export const MAP_PATH = path.join(STATE_DIR, 'map.json');
|
|
6
|
+
export const HASHES_PATH = path.join(STATE_DIR, 'hashes.json');
|
|
7
|
+
export async function ensureState() {
|
|
8
|
+
await fs.ensureDir(STATE_DIR);
|
|
9
|
+
await fs.ensureDir(ROOT_DIR);
|
|
10
|
+
}
|
|
11
|
+
export function projectDir(projectIdn) {
|
|
12
|
+
return path.join(ROOT_DIR, projectIdn);
|
|
13
|
+
}
|
|
14
|
+
export function skillPath(projectIdn, agentIdn, flowIdn, skillIdn, runnerType = 'guidance') {
|
|
15
|
+
const extension = runnerType === 'nsl' ? '.jinja' : '.guidance';
|
|
16
|
+
return path.join(ROOT_DIR, projectIdn, agentIdn, flowIdn, `${skillIdn}${extension}`);
|
|
17
|
+
}
|
|
18
|
+
export function metadataPath(projectIdn) {
|
|
19
|
+
return path.join(ROOT_DIR, projectIdn, 'metadata.json');
|
|
20
|
+
}
|
|
21
|
+
export async function writeFileAtomic(filepath, content) {
|
|
22
|
+
await fs.ensureDir(path.dirname(filepath));
|
|
23
|
+
await fs.writeFile(filepath, content, 'utf8');
|
|
24
|
+
}
|
|
25
|
+
export async function readIfExists(filepath) {
|
|
26
|
+
return (await fs.pathExists(filepath)) ? fs.readFile(filepath, 'utf8') : null;
|
|
27
|
+
}
|
|
28
|
+
//# sourceMappingURL=fsutil.js.map
|
package/dist/hash.d.ts
ADDED
package/dist/hash.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import crypto from 'crypto';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import { ensureState, HASHES_PATH } from './fsutil.js';
|
|
4
|
+
export function sha256(str) {
|
|
5
|
+
return crypto.createHash('sha256').update(str, 'utf8').digest('hex');
|
|
6
|
+
}
|
|
7
|
+
export async function loadHashes() {
|
|
8
|
+
await ensureState();
|
|
9
|
+
if (await fs.pathExists(HASHES_PATH)) {
|
|
10
|
+
return fs.readJson(HASHES_PATH);
|
|
11
|
+
}
|
|
12
|
+
return {};
|
|
13
|
+
}
|
|
14
|
+
export async function saveHashes(hashes) {
|
|
15
|
+
await fs.writeJson(HASHES_PATH, hashes, { spaces: 2 });
|
|
16
|
+
}
|
|
17
|
+
//# sourceMappingURL=hash.js.map
|
package/dist/sync.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { AxiosInstance } from 'axios';
|
|
2
|
+
import type { ProjectData } from './types.js';
|
|
3
|
+
export declare function pullSingleProject(client: AxiosInstance, projectId: string, projectIdn: string, verbose?: boolean): Promise<ProjectData>;
|
|
4
|
+
export declare function pullAll(client: AxiosInstance, projectId?: string | null, verbose?: boolean): Promise<void>;
|
|
5
|
+
export declare function pushChanged(client: AxiosInstance, verbose?: boolean): Promise<void>;
|
|
6
|
+
export declare function status(verbose?: boolean): Promise<void>;
|
|
7
|
+
//# sourceMappingURL=sync.d.ts.map
|
package/dist/sync.js
ADDED
|
@@ -0,0 +1,337 @@
|
|
|
1
|
+
import { listProjects, listAgents, listFlowSkills, updateSkill, listFlowEvents, listFlowStates, getProjectMeta } from './api.js';
|
|
2
|
+
import { ensureState, skillPath, writeFileAtomic, readIfExists, MAP_PATH, projectDir, metadataPath } from './fsutil.js';
|
|
3
|
+
import fs from 'fs-extra';
|
|
4
|
+
import { sha256, loadHashes, saveHashes } from './hash.js';
|
|
5
|
+
import yaml from 'js-yaml';
|
|
6
|
+
import path from 'path';
|
|
7
|
+
export async function pullSingleProject(client, projectId, projectIdn, verbose = false) {
|
|
8
|
+
if (verbose)
|
|
9
|
+
console.log(`🔍 Fetching agents for project ${projectId} (${projectIdn})...`);
|
|
10
|
+
const agents = await listAgents(client, projectId);
|
|
11
|
+
if (verbose)
|
|
12
|
+
console.log(`📦 Found ${agents.length} agents`);
|
|
13
|
+
// Get and save project metadata
|
|
14
|
+
const projectMeta = await getProjectMeta(client, projectId);
|
|
15
|
+
await writeFileAtomic(metadataPath(projectIdn), JSON.stringify(projectMeta, null, 2));
|
|
16
|
+
if (verbose)
|
|
17
|
+
console.log(`✓ Saved metadata for ${projectIdn}`);
|
|
18
|
+
const projectMap = { projectId, projectIdn, agents: {} };
|
|
19
|
+
for (const agent of agents) {
|
|
20
|
+
const aKey = agent.idn;
|
|
21
|
+
projectMap.agents[aKey] = { id: agent.id, flows: {} };
|
|
22
|
+
for (const flow of agent.flows ?? []) {
|
|
23
|
+
projectMap.agents[aKey].flows[flow.idn] = { id: flow.id, skills: {} };
|
|
24
|
+
const skills = await listFlowSkills(client, flow.id);
|
|
25
|
+
for (const skill of skills) {
|
|
26
|
+
const file = skillPath(projectIdn, agent.idn, flow.idn, skill.idn, skill.runner_type);
|
|
27
|
+
await writeFileAtomic(file, skill.prompt_script || '');
|
|
28
|
+
// Store complete skill metadata for push operations
|
|
29
|
+
projectMap.agents[aKey].flows[flow.idn].skills[skill.idn] = {
|
|
30
|
+
id: skill.id,
|
|
31
|
+
title: skill.title,
|
|
32
|
+
idn: skill.idn,
|
|
33
|
+
runner_type: skill.runner_type,
|
|
34
|
+
model: skill.model,
|
|
35
|
+
parameters: skill.parameters,
|
|
36
|
+
path: skill.path || undefined
|
|
37
|
+
};
|
|
38
|
+
console.log(`✓ Pulled ${file}`);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
// Generate flows.yaml for this project
|
|
43
|
+
if (verbose)
|
|
44
|
+
console.log(`📄 Generating flows.yaml for ${projectIdn}...`);
|
|
45
|
+
await generateFlowsYaml(client, agents, projectIdn, verbose);
|
|
46
|
+
return projectMap;
|
|
47
|
+
}
|
|
48
|
+
export async function pullAll(client, projectId = null, verbose = false) {
|
|
49
|
+
await ensureState();
|
|
50
|
+
if (projectId) {
|
|
51
|
+
// Single project mode
|
|
52
|
+
const projectMeta = await getProjectMeta(client, projectId);
|
|
53
|
+
const projectMap = await pullSingleProject(client, projectId, projectMeta.idn, verbose);
|
|
54
|
+
const idMap = { projects: { [projectMeta.idn]: projectMap } };
|
|
55
|
+
await fs.writeJson(MAP_PATH, idMap, { spaces: 2 });
|
|
56
|
+
// Generate hash tracking for this project
|
|
57
|
+
const hashes = {};
|
|
58
|
+
for (const [agentIdn, agentObj] of Object.entries(projectMap.agents)) {
|
|
59
|
+
for (const [flowIdn, flowObj] of Object.entries(agentObj.flows)) {
|
|
60
|
+
for (const [skillIdn, skillMeta] of Object.entries(flowObj.skills)) {
|
|
61
|
+
const p = skillPath(projectMeta.idn, agentIdn, flowIdn, skillIdn, skillMeta.runner_type);
|
|
62
|
+
const content = await fs.readFile(p, 'utf8');
|
|
63
|
+
hashes[p] = sha256(content);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
await saveHashes(hashes);
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
// Multi-project mode
|
|
71
|
+
if (verbose)
|
|
72
|
+
console.log('🔍 Fetching all projects...');
|
|
73
|
+
const projects = await listProjects(client);
|
|
74
|
+
if (verbose)
|
|
75
|
+
console.log(`📦 Found ${projects.length} projects`);
|
|
76
|
+
const idMap = { projects: {} };
|
|
77
|
+
const allHashes = {};
|
|
78
|
+
for (const project of projects) {
|
|
79
|
+
if (verbose)
|
|
80
|
+
console.log(`\n📁 Processing project: ${project.idn} (${project.title})`);
|
|
81
|
+
const projectMap = await pullSingleProject(client, project.id, project.idn, verbose);
|
|
82
|
+
idMap.projects[project.idn] = projectMap;
|
|
83
|
+
// Collect hashes for this project
|
|
84
|
+
for (const [agentIdn, agentObj] of Object.entries(projectMap.agents)) {
|
|
85
|
+
for (const [flowIdn, flowObj] of Object.entries(agentObj.flows)) {
|
|
86
|
+
for (const [skillIdn, skillMeta] of Object.entries(flowObj.skills)) {
|
|
87
|
+
const p = skillPath(project.idn, agentIdn, flowIdn, skillIdn, skillMeta.runner_type);
|
|
88
|
+
const content = await fs.readFile(p, 'utf8');
|
|
89
|
+
allHashes[p] = sha256(content);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
await fs.writeJson(MAP_PATH, idMap, { spaces: 2 });
|
|
95
|
+
await saveHashes(allHashes);
|
|
96
|
+
}
|
|
97
|
+
export async function pushChanged(client, verbose = false) {
|
|
98
|
+
await ensureState();
|
|
99
|
+
if (!(await fs.pathExists(MAP_PATH))) {
|
|
100
|
+
throw new Error('Missing .newo/map.json. Run `newo pull` first.');
|
|
101
|
+
}
|
|
102
|
+
if (verbose)
|
|
103
|
+
console.log('📋 Loading project mapping...');
|
|
104
|
+
const idMap = await fs.readJson(MAP_PATH);
|
|
105
|
+
if (verbose)
|
|
106
|
+
console.log('🔍 Loading file hashes...');
|
|
107
|
+
const oldHashes = await loadHashes();
|
|
108
|
+
const newHashes = { ...oldHashes };
|
|
109
|
+
if (verbose)
|
|
110
|
+
console.log('🔄 Scanning for changes...');
|
|
111
|
+
let pushed = 0;
|
|
112
|
+
let scanned = 0;
|
|
113
|
+
// Handle both old single-project format and new multi-project format
|
|
114
|
+
const projects = 'projects' in idMap && idMap.projects ? idMap.projects : { '': idMap };
|
|
115
|
+
for (const [projectIdn, projectData] of Object.entries(projects)) {
|
|
116
|
+
if (verbose && projectIdn)
|
|
117
|
+
console.log(`📁 Scanning project: ${projectIdn}`);
|
|
118
|
+
for (const [agentIdn, agentObj] of Object.entries(projectData.agents)) {
|
|
119
|
+
if (verbose)
|
|
120
|
+
console.log(` 📁 Scanning agent: ${agentIdn}`);
|
|
121
|
+
for (const [flowIdn, flowObj] of Object.entries(agentObj.flows)) {
|
|
122
|
+
if (verbose)
|
|
123
|
+
console.log(` 📁 Scanning flow: ${flowIdn}`);
|
|
124
|
+
for (const [skillIdn, skillMeta] of Object.entries(flowObj.skills)) {
|
|
125
|
+
const p = projectIdn ?
|
|
126
|
+
skillPath(projectIdn, agentIdn, flowIdn, skillIdn, skillMeta.runner_type) :
|
|
127
|
+
skillPath('', agentIdn, flowIdn, skillIdn, skillMeta.runner_type);
|
|
128
|
+
scanned++;
|
|
129
|
+
if (verbose)
|
|
130
|
+
console.log(` 📄 Checking: ${p}`);
|
|
131
|
+
const content = await readIfExists(p);
|
|
132
|
+
if (content === null) {
|
|
133
|
+
if (verbose)
|
|
134
|
+
console.log(` ⚠️ File not found: ${p}`);
|
|
135
|
+
continue;
|
|
136
|
+
}
|
|
137
|
+
const h = sha256(content);
|
|
138
|
+
const oldHash = oldHashes[p];
|
|
139
|
+
if (verbose) {
|
|
140
|
+
console.log(` 🔍 Hash comparison:`);
|
|
141
|
+
console.log(` Old: ${oldHash || 'none'}`);
|
|
142
|
+
console.log(` New: ${h}`);
|
|
143
|
+
}
|
|
144
|
+
if (oldHash !== h) {
|
|
145
|
+
if (verbose)
|
|
146
|
+
console.log(` 🔄 File changed, preparing to push...`);
|
|
147
|
+
// Create complete skill object with updated prompt_script
|
|
148
|
+
const skillObject = {
|
|
149
|
+
id: skillMeta.id,
|
|
150
|
+
title: skillMeta.title,
|
|
151
|
+
idn: skillMeta.idn,
|
|
152
|
+
prompt_script: content,
|
|
153
|
+
runner_type: skillMeta.runner_type,
|
|
154
|
+
model: skillMeta.model,
|
|
155
|
+
parameters: skillMeta.parameters,
|
|
156
|
+
path: skillMeta.path || undefined
|
|
157
|
+
};
|
|
158
|
+
if (verbose) {
|
|
159
|
+
console.log(` 📤 Pushing skill object:`);
|
|
160
|
+
console.log(` ID: ${skillObject.id}`);
|
|
161
|
+
console.log(` Title: ${skillObject.title}`);
|
|
162
|
+
console.log(` IDN: ${skillObject.idn}`);
|
|
163
|
+
console.log(` Content length: ${content.length} chars`);
|
|
164
|
+
console.log(` Content preview: ${content.substring(0, 100).replace(/\n/g, '\\n')}...`);
|
|
165
|
+
}
|
|
166
|
+
await updateSkill(client, skillObject);
|
|
167
|
+
console.log(`↑ Pushed ${p}`);
|
|
168
|
+
newHashes[p] = h;
|
|
169
|
+
pushed++;
|
|
170
|
+
}
|
|
171
|
+
else if (verbose) {
|
|
172
|
+
console.log(` ✓ No changes`);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
if (verbose)
|
|
179
|
+
console.log(`🔄 Scanned ${scanned} files, found ${pushed} changes`);
|
|
180
|
+
await saveHashes(newHashes);
|
|
181
|
+
console.log(pushed ? `✅ Push complete. ${pushed} file(s) updated.` : '✅ Nothing to push.');
|
|
182
|
+
}
|
|
183
|
+
export async function status(verbose = false) {
|
|
184
|
+
await ensureState();
|
|
185
|
+
if (!(await fs.pathExists(MAP_PATH))) {
|
|
186
|
+
console.log('No map. Run `newo pull` first.');
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
if (verbose)
|
|
190
|
+
console.log('📋 Loading project mapping and hashes...');
|
|
191
|
+
const idMap = await fs.readJson(MAP_PATH);
|
|
192
|
+
const hashes = await loadHashes();
|
|
193
|
+
let dirty = 0;
|
|
194
|
+
// Handle both old single-project format and new multi-project format
|
|
195
|
+
const projects = 'projects' in idMap && idMap.projects ? idMap.projects : { '': idMap };
|
|
196
|
+
for (const [projectIdn, projectData] of Object.entries(projects)) {
|
|
197
|
+
if (verbose && projectIdn)
|
|
198
|
+
console.log(`📁 Checking project: ${projectIdn}`);
|
|
199
|
+
for (const [agentIdn, agentObj] of Object.entries(projectData.agents)) {
|
|
200
|
+
if (verbose)
|
|
201
|
+
console.log(` 📁 Checking agent: ${agentIdn}`);
|
|
202
|
+
for (const [flowIdn, flowObj] of Object.entries(agentObj.flows)) {
|
|
203
|
+
if (verbose)
|
|
204
|
+
console.log(` 📁 Checking flow: ${flowIdn}`);
|
|
205
|
+
for (const [skillIdn, skillMeta] of Object.entries(flowObj.skills)) {
|
|
206
|
+
const p = projectIdn ?
|
|
207
|
+
skillPath(projectIdn, agentIdn, flowIdn, skillIdn, skillMeta.runner_type) :
|
|
208
|
+
skillPath('', agentIdn, flowIdn, skillIdn, skillMeta.runner_type);
|
|
209
|
+
const exists = await fs.pathExists(p);
|
|
210
|
+
if (!exists) {
|
|
211
|
+
console.log(`D ${p}`);
|
|
212
|
+
dirty++;
|
|
213
|
+
if (verbose)
|
|
214
|
+
console.log(` ❌ Deleted: ${p}`);
|
|
215
|
+
continue;
|
|
216
|
+
}
|
|
217
|
+
const content = await fs.readFile(p, 'utf8');
|
|
218
|
+
const h = sha256(content);
|
|
219
|
+
const oldHash = hashes[p];
|
|
220
|
+
if (verbose) {
|
|
221
|
+
console.log(` 📄 ${p}`);
|
|
222
|
+
console.log(` Old hash: ${oldHash || 'none'}`);
|
|
223
|
+
console.log(` New hash: ${h}`);
|
|
224
|
+
}
|
|
225
|
+
if (oldHash !== h) {
|
|
226
|
+
console.log(`M ${p}`);
|
|
227
|
+
dirty++;
|
|
228
|
+
if (verbose)
|
|
229
|
+
console.log(` 🔄 Modified: ${p}`);
|
|
230
|
+
}
|
|
231
|
+
else if (verbose) {
|
|
232
|
+
console.log(` ✓ Unchanged: ${p}`);
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
console.log(dirty ? `${dirty} changed file(s).` : 'Clean.');
|
|
239
|
+
}
|
|
240
|
+
async function generateFlowsYaml(client, agents, projectIdn, verbose = false) {
|
|
241
|
+
const flowsData = { flows: [] };
|
|
242
|
+
for (const agent of agents) {
|
|
243
|
+
if (verbose)
|
|
244
|
+
console.log(` 📁 Processing agent: ${agent.idn}`);
|
|
245
|
+
const agentFlows = [];
|
|
246
|
+
for (const flow of agent.flows ?? []) {
|
|
247
|
+
if (verbose)
|
|
248
|
+
console.log(` 📄 Processing flow: ${flow.idn}`);
|
|
249
|
+
// Get skills for this flow
|
|
250
|
+
const skills = await listFlowSkills(client, flow.id);
|
|
251
|
+
const skillsData = skills.map(skill => ({
|
|
252
|
+
idn: skill.idn,
|
|
253
|
+
title: skill.title || "",
|
|
254
|
+
prompt_script: `flows/${flow.idn}/${skill.idn}.${skill.runner_type === 'nsl' ? 'jinja' : 'nsl'}`,
|
|
255
|
+
runner_type: `!enum "RunnerType.${skill.runner_type}"`,
|
|
256
|
+
model: {
|
|
257
|
+
model_idn: skill.model.model_idn,
|
|
258
|
+
provider_idn: skill.model.provider_idn
|
|
259
|
+
},
|
|
260
|
+
parameters: skill.parameters.map(param => ({
|
|
261
|
+
name: param.name,
|
|
262
|
+
default_value: param.default_value || " "
|
|
263
|
+
}))
|
|
264
|
+
}));
|
|
265
|
+
// Get events for this flow
|
|
266
|
+
let eventsData = [];
|
|
267
|
+
try {
|
|
268
|
+
const events = await listFlowEvents(client, flow.id);
|
|
269
|
+
eventsData = events.map(event => ({
|
|
270
|
+
title: event.description,
|
|
271
|
+
idn: event.idn,
|
|
272
|
+
skill_selector: `!enum "SkillSelector.${event.skill_selector}"`,
|
|
273
|
+
skill_idn: event.skill_idn || undefined,
|
|
274
|
+
state_idn: event.state_idn || undefined,
|
|
275
|
+
integration_idn: event.integration_idn || undefined,
|
|
276
|
+
connector_idn: event.connector_idn || undefined,
|
|
277
|
+
interrupt_mode: `!enum "InterruptMode.${event.interrupt_mode}"`
|
|
278
|
+
}));
|
|
279
|
+
if (verbose)
|
|
280
|
+
console.log(` 📋 Found ${events.length} events`);
|
|
281
|
+
}
|
|
282
|
+
catch (error) {
|
|
283
|
+
if (verbose)
|
|
284
|
+
console.log(` ⚠️ No events found for flow ${flow.idn}`);
|
|
285
|
+
}
|
|
286
|
+
// Get state fields for this flow
|
|
287
|
+
let stateFieldsData = [];
|
|
288
|
+
try {
|
|
289
|
+
const states = await listFlowStates(client, flow.id);
|
|
290
|
+
stateFieldsData = states.map(state => ({
|
|
291
|
+
title: state.title,
|
|
292
|
+
idn: state.idn,
|
|
293
|
+
default_value: state.default_value || undefined,
|
|
294
|
+
scope: `!enum "StateFieldScope.${state.scope}"`
|
|
295
|
+
}));
|
|
296
|
+
if (verbose)
|
|
297
|
+
console.log(` 📊 Found ${states.length} state fields`);
|
|
298
|
+
}
|
|
299
|
+
catch (error) {
|
|
300
|
+
if (verbose)
|
|
301
|
+
console.log(` ⚠️ No state fields found for flow ${flow.idn}`);
|
|
302
|
+
}
|
|
303
|
+
agentFlows.push({
|
|
304
|
+
idn: flow.idn,
|
|
305
|
+
title: flow.title,
|
|
306
|
+
description: flow.description || null,
|
|
307
|
+
default_runner_type: `!enum "RunnerType.${flow.default_runner_type}"`,
|
|
308
|
+
default_provider_idn: flow.default_model.provider_idn,
|
|
309
|
+
default_model_idn: flow.default_model.model_idn,
|
|
310
|
+
skills: skillsData,
|
|
311
|
+
events: eventsData,
|
|
312
|
+
state_fields: stateFieldsData
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
const agentData = {
|
|
316
|
+
agent_idn: agent.idn,
|
|
317
|
+
agent_description: agent.description || undefined,
|
|
318
|
+
agent_flows: agentFlows
|
|
319
|
+
};
|
|
320
|
+
flowsData.flows.push(agentData);
|
|
321
|
+
}
|
|
322
|
+
// Convert to YAML and write to file with custom enum handling
|
|
323
|
+
let yamlContent = yaml.dump(flowsData, {
|
|
324
|
+
indent: 2,
|
|
325
|
+
lineWidth: -1,
|
|
326
|
+
noRefs: true,
|
|
327
|
+
sortKeys: false,
|
|
328
|
+
quotingType: '"',
|
|
329
|
+
forceQuotes: false
|
|
330
|
+
});
|
|
331
|
+
// Post-process to fix enum formatting
|
|
332
|
+
yamlContent = yamlContent.replace(/"(!enum "[^"]+")"/g, '$1');
|
|
333
|
+
const yamlPath = path.join(projectDir(projectIdn), 'flows.yaml');
|
|
334
|
+
await writeFileAtomic(yamlPath, yamlContent);
|
|
335
|
+
console.log(`✓ Generated flows.yaml for ${projectIdn}`);
|
|
336
|
+
}
|
|
337
|
+
//# sourceMappingURL=sync.js.map
|