@rglabs/butterfly 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +201 -0
- package/README.md +371 -0
- package/dist/commands/add.d.ts +23 -0
- package/dist/commands/add.js +303 -0
- package/dist/commands/code.d.ts +11 -0
- package/dist/commands/code.js +72 -0
- package/dist/commands/create-object.d.ts +6 -0
- package/dist/commands/create-object.js +293 -0
- package/dist/commands/create-report.d.ts +6 -0
- package/dist/commands/create-report.js +154 -0
- package/dist/commands/diff.d.ts +4 -0
- package/dist/commands/diff.js +238 -0
- package/dist/commands/download.d.ts +4 -0
- package/dist/commands/download.js +374 -0
- package/dist/commands/layout.d.ts +12 -0
- package/dist/commands/layout.js +83 -0
- package/dist/commands/record.d.ts +21 -0
- package/dist/commands/record.js +483 -0
- package/dist/commands/run-poc.d.ts +3 -0
- package/dist/commands/run-poc.js +18 -0
- package/dist/commands/setup.d.ts +3 -0
- package/dist/commands/setup.js +66 -0
- package/dist/commands/start-poc.d.ts +3 -0
- package/dist/commands/start-poc.js +55 -0
- package/dist/commands/sync-docs.d.ts +3 -0
- package/dist/commands/sync-docs.js +27 -0
- package/dist/commands/translate.d.ts +13 -0
- package/dist/commands/translate.js +401 -0
- package/dist/commands/upload.d.ts +3 -0
- package/dist/commands/upload.js +150 -0
- package/dist/commands/workflow-info.d.ts +13 -0
- package/dist/commands/workflow-info.js +161 -0
- package/dist/components/ConflictResolver.d.ts +12 -0
- package/dist/components/ConflictResolver.js +77 -0
- package/dist/components/DiffView.d.ts +11 -0
- package/dist/components/DiffView.js +101 -0
- package/dist/components/DownloadProgress.d.ts +11 -0
- package/dist/components/DownloadProgress.js +29 -0
- package/dist/components/RecordPreview.d.ts +11 -0
- package/dist/components/RecordPreview.js +91 -0
- package/dist/components/SetupForm.d.ts +8 -0
- package/dist/components/SetupForm.js +56 -0
- package/dist/components/UploadProgress.d.ts +13 -0
- package/dist/components/UploadProgress.js +42 -0
- package/dist/diff/adapters/index.d.ts +8 -0
- package/dist/diff/adapters/index.js +18 -0
- package/dist/diff/adapters/objectsAdapter.d.ts +13 -0
- package/dist/diff/adapters/objectsAdapter.js +177 -0
- package/dist/diff/adapters/reportsAdapter.d.ts +14 -0
- package/dist/diff/adapters/reportsAdapter.js +212 -0
- package/dist/diff/adapters/types.d.ts +19 -0
- package/dist/diff/adapters/types.js +2 -0
- package/dist/diff/engine.d.ts +19 -0
- package/dist/diff/engine.js +57 -0
- package/dist/diff/types.d.ts +34 -0
- package/dist/diff/types.js +110 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +117 -0
- package/dist/types/index.d.ts +18 -0
- package/dist/types/index.js +2 -0
- package/dist/utils/api.d.ts +85 -0
- package/dist/utils/api.js +1031 -0
- package/dist/utils/auth.d.ts +4 -0
- package/dist/utils/auth.js +22 -0
- package/dist/utils/bfySplitter.d.ts +12 -0
- package/dist/utils/bfySplitter.js +151 -0
- package/dist/utils/docs.d.ts +16 -0
- package/dist/utils/docs.js +186 -0
- package/dist/utils/errorLogger.d.ts +6 -0
- package/dist/utils/errorLogger.js +29 -0
- package/dist/utils/files.d.ts +14 -0
- package/dist/utils/files.js +772 -0
- package/dist/utils/lockManager.d.ts +15 -0
- package/dist/utils/lockManager.js +126 -0
- package/dist/utils/resourceHandlers.d.ts +50 -0
- package/dist/utils/resourceHandlers.js +684 -0
- package/dist/utils/resourceMapping.d.ts +32 -0
- package/dist/utils/resourceMapping.js +210 -0
- package/dist/utils/singleResourceDownload.d.ts +14 -0
- package/dist/utils/singleResourceDownload.js +261 -0
- package/dist/utils/summaryGenerator.d.ts +2 -0
- package/dist/utils/summaryGenerator.js +183 -0
- package/dist/utils/uploadHandler.d.ts +31 -0
- package/dist/utils/uploadHandler.js +263 -0
- package/docs/AI_API.md +93 -0
- package/docs/CLAUDE.md +216 -0
- package/docs/PROJECT_SPECIFIC.md +1 -0
- package/docs/RECORD_COMMAND.md +262 -0
- package/docs/WORKFLOW_API.md +480 -0
- package/docs/bfy-splitting.md +126 -0
- package/docs/cli-commands.md +333 -0
- package/docs/examples/README.md +95 -0
- package/docs/examples/order-system.md +147 -0
- package/docs/examples/product-catalog.md +195 -0
- package/docs/examples/reports.md +187 -0
- package/docs/excel-export.md +216 -0
- package/docs/field-types/README.md +29 -0
- package/docs/field-types/calculated.md +147 -0
- package/docs/field-types/code-mappings.md +84 -0
- package/docs/field-types/custom.md +340 -0
- package/docs/object-specs/README.md +136 -0
- package/docs/object-specs/code-parameters.md +151 -0
- package/docs/object-specs/creating.md +203 -0
- package/docs/object-specs/js-code-examples.md +208 -0
- package/docs/object-specs/js-field-updates.md +168 -0
- package/docs/objects/README.md +89 -0
- package/docs/objects/creating.md +127 -0
- package/docs/page-layout.md +361 -0
- package/docs/permissions.md +260 -0
- package/docs/reports.md +197 -0
- package/docs/state-machines.md +544 -0
- package/docs/tasks/create-object.md +81 -0
- package/docs/translations.md +346 -0
- package/docs/twig-helpers.md +283 -0
- package/docs/webservices.md +159 -0
- package/docs/workspaces.md +176 -0
- package/package.json +59 -0
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { join } from 'path';
|
|
2
|
+
import { promises as fs } from 'fs';
|
|
3
|
+
const CONFIG_DIR = '.butterfly';
|
|
4
|
+
const CONFIG_FILE = 'config.json';
|
|
5
|
+
function getConfigPath() {
|
|
6
|
+
return join(process.cwd(), CONFIG_DIR, CONFIG_FILE);
|
|
7
|
+
}
|
|
8
|
+
export async function saveAuthConfig(config) {
|
|
9
|
+
const configDir = join(process.cwd(), CONFIG_DIR);
|
|
10
|
+
await fs.mkdir(configDir, { recursive: true });
|
|
11
|
+
await fs.writeFile(getConfigPath(), JSON.stringify(config, null, 2));
|
|
12
|
+
}
|
|
13
|
+
export async function loadAuthConfig() {
|
|
14
|
+
try {
|
|
15
|
+
const data = await fs.readFile(getConfigPath(), 'utf-8');
|
|
16
|
+
return JSON.parse(data);
|
|
17
|
+
}
|
|
18
|
+
catch (error) {
|
|
19
|
+
return null;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=auth.js.map
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export declare function hasSplitMarkers(content: string): boolean;
|
|
2
|
+
export declare function hasIncludeMarkers(content: string): boolean;
|
|
3
|
+
export declare function isSplitFolder(folderPath: string): Promise<boolean>;
|
|
4
|
+
export declare function splitBfyFile(filePath: string): Promise<{
|
|
5
|
+
wasSplit: boolean;
|
|
6
|
+
error?: string;
|
|
7
|
+
}>;
|
|
8
|
+
export declare function mergeBfyFile(filePath: string): Promise<string>;
|
|
9
|
+
export declare function smartReadBfyContent(bfyPath: string): Promise<string>;
|
|
10
|
+
export declare function isInsidePartsFolder(filePath: string): boolean;
|
|
11
|
+
export declare function getContainerPathFromPartFile(partFilePath: string): string | null;
|
|
12
|
+
//# sourceMappingURL=bfySplitter.d.ts.map
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
import { promises as fs } from 'fs';
|
|
2
|
+
import { join, dirname, basename } from 'path';
|
|
3
|
+
const PART_START_REGEX = /\{#\s*---\s*part:\s*([a-zA-Z0-9_-]+)\s*---\s*#\}/g;
|
|
4
|
+
const PART_END_REGEX = /\{#\s*---\s*endpart\s*---\s*#\}/g;
|
|
5
|
+
const INCLUDE_MARKER_REGEX = /\{#\s*---\s*include:\s*([a-zA-Z0-9_-]+)\s*---\s*#\}/g;
|
|
6
|
+
export function hasSplitMarkers(content) {
|
|
7
|
+
const hasStart = new RegExp(PART_START_REGEX.source).test(content);
|
|
8
|
+
const hasEnd = new RegExp(PART_END_REGEX.source).test(content);
|
|
9
|
+
return hasStart && hasEnd;
|
|
10
|
+
}
|
|
11
|
+
export function hasIncludeMarkers(content) {
|
|
12
|
+
return new RegExp(INCLUDE_MARKER_REGEX.source).test(content);
|
|
13
|
+
}
|
|
14
|
+
export async function isSplitFolder(folderPath) {
|
|
15
|
+
try {
|
|
16
|
+
const stats = await fs.stat(folderPath);
|
|
17
|
+
if (!stats.isDirectory())
|
|
18
|
+
return false;
|
|
19
|
+
const files = await fs.readdir(folderPath);
|
|
20
|
+
return files.some(f => f.endsWith('.bfy'));
|
|
21
|
+
}
|
|
22
|
+
catch {
|
|
23
|
+
return false;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
function parseParts(content) {
|
|
27
|
+
const parts = [];
|
|
28
|
+
let containerContent = content;
|
|
29
|
+
const partStartRegex = new RegExp(PART_START_REGEX.source, 'g');
|
|
30
|
+
const partEndRegex = new RegExp(PART_END_REGEX.source, 'g');
|
|
31
|
+
const starts = [];
|
|
32
|
+
const ends = [];
|
|
33
|
+
let match;
|
|
34
|
+
while ((match = partStartRegex.exec(content)) !== null) {
|
|
35
|
+
starts.push({ index: match.index, name: match[1], fullMatch: match[0] });
|
|
36
|
+
}
|
|
37
|
+
while ((match = partEndRegex.exec(content)) !== null) {
|
|
38
|
+
ends.push({ index: match.index, fullMatch: match[0] });
|
|
39
|
+
}
|
|
40
|
+
if (starts.length === 0 || starts.length !== ends.length) {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
const partsToExtract = [];
|
|
44
|
+
for (let i = 0; i < starts.length; i++) {
|
|
45
|
+
const start = starts[i];
|
|
46
|
+
const end = ends[i];
|
|
47
|
+
if (end.index <= start.index) {
|
|
48
|
+
return null;
|
|
49
|
+
}
|
|
50
|
+
const partContentStart = start.index + start.fullMatch.length;
|
|
51
|
+
const partContentEnd = end.index;
|
|
52
|
+
const partContent = content.substring(partContentStart, partContentEnd).trim();
|
|
53
|
+
parts.push({ name: start.name, content: partContent });
|
|
54
|
+
partsToExtract.push({ start, end, partContent });
|
|
55
|
+
}
|
|
56
|
+
for (let i = partsToExtract.length - 1; i >= 0; i--) {
|
|
57
|
+
const { start, end } = partsToExtract[i];
|
|
58
|
+
const fullPartEnd = end.index + end.fullMatch.length;
|
|
59
|
+
const includeMarker = `{# --- include: ${start.name} --- #}`;
|
|
60
|
+
containerContent = containerContent.substring(0, start.index) + includeMarker + containerContent.substring(fullPartEnd);
|
|
61
|
+
}
|
|
62
|
+
return { parts, containerContent };
|
|
63
|
+
}
|
|
64
|
+
export async function splitBfyFile(filePath) {
|
|
65
|
+
try {
|
|
66
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
67
|
+
if (!hasSplitMarkers(content)) {
|
|
68
|
+
return { wasSplit: false };
|
|
69
|
+
}
|
|
70
|
+
const parsed = parseParts(content);
|
|
71
|
+
if (!parsed) {
|
|
72
|
+
return { wasSplit: false, error: 'Unbalanced or invalid part markers' };
|
|
73
|
+
}
|
|
74
|
+
const { parts, containerContent } = parsed;
|
|
75
|
+
const names = parts.map(p => p.name);
|
|
76
|
+
const duplicates = names.filter((n, i) => names.indexOf(n) !== i);
|
|
77
|
+
if (duplicates.length > 0) {
|
|
78
|
+
return { wasSplit: false, error: `Duplicate part names: ${duplicates.join(', ')}` };
|
|
79
|
+
}
|
|
80
|
+
const fileName = basename(filePath, '.bfy');
|
|
81
|
+
const dirPath = dirname(filePath);
|
|
82
|
+
const partsFolder = join(dirPath, fileName);
|
|
83
|
+
await fs.mkdir(partsFolder, { recursive: true });
|
|
84
|
+
for (const part of parts) {
|
|
85
|
+
const partFile = join(partsFolder, `${part.name}.bfy`);
|
|
86
|
+
await fs.writeFile(partFile, part.content);
|
|
87
|
+
}
|
|
88
|
+
await fs.writeFile(filePath, containerContent);
|
|
89
|
+
return { wasSplit: true };
|
|
90
|
+
}
|
|
91
|
+
catch (error) {
|
|
92
|
+
return { wasSplit: false, error: String(error) };
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
export async function mergeBfyFile(filePath) {
|
|
96
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
97
|
+
if (!hasIncludeMarkers(content)) {
|
|
98
|
+
return content;
|
|
99
|
+
}
|
|
100
|
+
const fileName = basename(filePath, '.bfy');
|
|
101
|
+
const dirPath = dirname(filePath);
|
|
102
|
+
const partsFolder = join(dirPath, fileName);
|
|
103
|
+
if (!await isSplitFolder(partsFolder)) {
|
|
104
|
+
return content;
|
|
105
|
+
}
|
|
106
|
+
let mergedContent = content;
|
|
107
|
+
const includeRegex = new RegExp(INCLUDE_MARKER_REGEX.source, 'g');
|
|
108
|
+
const includes = [];
|
|
109
|
+
let match;
|
|
110
|
+
while ((match = includeRegex.exec(content)) !== null) {
|
|
111
|
+
includes.push({ fullMatch: match[0], name: match[1] });
|
|
112
|
+
}
|
|
113
|
+
for (const inc of includes) {
|
|
114
|
+
const partFile = join(partsFolder, `${inc.name}.bfy`);
|
|
115
|
+
try {
|
|
116
|
+
const partContent = await fs.readFile(partFile, 'utf-8');
|
|
117
|
+
const fullPart = `{# --- part: ${inc.name} --- #}\n${partContent}\n{# --- endpart --- #}`;
|
|
118
|
+
mergedContent = mergedContent.replace(inc.fullMatch, fullPart);
|
|
119
|
+
}
|
|
120
|
+
catch {
|
|
121
|
+
console.error(`Warning: Part file not found: ${partFile}`);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
return mergedContent;
|
|
125
|
+
}
|
|
126
|
+
export async function smartReadBfyContent(bfyPath) {
|
|
127
|
+
const content = await fs.readFile(bfyPath, 'utf-8');
|
|
128
|
+
if (hasIncludeMarkers(content)) {
|
|
129
|
+
return mergeBfyFile(bfyPath);
|
|
130
|
+
}
|
|
131
|
+
return content;
|
|
132
|
+
}
|
|
133
|
+
export function isInsidePartsFolder(filePath) {
|
|
134
|
+
const parts = filePath.split('/');
|
|
135
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
136
|
+
const folderName = parts[i];
|
|
137
|
+
if (i > 0 && !folderName.includes('.')) {
|
|
138
|
+
if (filePath.endsWith('.bfy')) {
|
|
139
|
+
return true;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
return false;
|
|
144
|
+
}
|
|
145
|
+
export function getContainerPathFromPartFile(partFilePath) {
|
|
146
|
+
const dirPath = dirname(partFilePath);
|
|
147
|
+
const folderName = basename(dirPath);
|
|
148
|
+
const parentDir = dirname(dirPath);
|
|
149
|
+
return join(parentDir, `${folderName}.bfy`);
|
|
150
|
+
}
|
|
151
|
+
//# sourceMappingURL=bfySplitter.js.map
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export declare function checkDocsStatus(): Promise<{
|
|
2
|
+
needsUpdate: boolean;
|
|
3
|
+
sourceExists: boolean;
|
|
4
|
+
targetExists: boolean;
|
|
5
|
+
filesToUpdate: string[];
|
|
6
|
+
}>;
|
|
7
|
+
export declare function updateDocs(): Promise<boolean>;
|
|
8
|
+
export declare function checkAndPromptDocsUpdate(): Promise<void>;
|
|
9
|
+
export declare function checkAndPromptClaudeMdUpdate(): Promise<void>;
|
|
10
|
+
export declare function syncDocs(): Promise<{
|
|
11
|
+
success: boolean;
|
|
12
|
+
syncedFiles: string[];
|
|
13
|
+
error?: string;
|
|
14
|
+
}>;
|
|
15
|
+
export declare function copyDocs(): Promise<void>;
|
|
16
|
+
//# sourceMappingURL=docs.d.ts.map
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
import { join, dirname } from 'path';
|
|
2
|
+
import { promises as fs } from 'fs';
|
|
3
|
+
import { fileURLToPath } from 'url';
|
|
4
|
+
import { createHash } from 'crypto';
|
|
5
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
6
|
+
const __dirname = dirname(__filename);
|
|
7
|
+
export async function checkDocsStatus() {
|
|
8
|
+
const packageDocsDir = join(__dirname, '..', '..', 'docs');
|
|
9
|
+
const packageCommandsDir = join(__dirname, '..', '..', '.claude', 'commands');
|
|
10
|
+
const targetDocsDir = join(process.cwd(), 'docs');
|
|
11
|
+
const targetCommandsDir = join(process.cwd(), '.claude', 'commands');
|
|
12
|
+
const targetClaudeMd = join(process.cwd(), 'CLAUDE.md');
|
|
13
|
+
let sourceExists = false;
|
|
14
|
+
let targetExists = false;
|
|
15
|
+
const filesToUpdate = [];
|
|
16
|
+
try {
|
|
17
|
+
await fs.access(packageDocsDir);
|
|
18
|
+
sourceExists = true;
|
|
19
|
+
}
|
|
20
|
+
catch {
|
|
21
|
+
return { needsUpdate: false, sourceExists: false, targetExists: false, filesToUpdate: [] };
|
|
22
|
+
}
|
|
23
|
+
try {
|
|
24
|
+
await fs.access(targetDocsDir);
|
|
25
|
+
targetExists = true;
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
}
|
|
29
|
+
const packageClaudeMd = join(packageDocsDir, 'CLAUDE.md');
|
|
30
|
+
try {
|
|
31
|
+
const sourceContent = await fs.readFile(packageClaudeMd, 'utf-8');
|
|
32
|
+
const sourceHash = createHash('md5').update(sourceContent).digest('hex');
|
|
33
|
+
try {
|
|
34
|
+
const targetContent = await fs.readFile(targetClaudeMd, 'utf-8');
|
|
35
|
+
const targetHash = createHash('md5').update(targetContent).digest('hex');
|
|
36
|
+
if (sourceHash !== targetHash) {
|
|
37
|
+
filesToUpdate.push('CLAUDE.md');
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
catch {
|
|
41
|
+
filesToUpdate.push('CLAUDE.md');
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
}
|
|
46
|
+
await checkDirectoryForUpdates(packageDocsDir, targetDocsDir, filesToUpdate, 'docs');
|
|
47
|
+
try {
|
|
48
|
+
await fs.access(packageCommandsDir);
|
|
49
|
+
await checkDirectoryForUpdates(packageCommandsDir, targetCommandsDir, filesToUpdate, '.claude/commands');
|
|
50
|
+
}
|
|
51
|
+
catch {
|
|
52
|
+
}
|
|
53
|
+
return {
|
|
54
|
+
needsUpdate: filesToUpdate.length > 0,
|
|
55
|
+
sourceExists,
|
|
56
|
+
targetExists,
|
|
57
|
+
filesToUpdate
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
async function checkDirectoryForUpdates(sourceDir, targetDir, filesToUpdate, relativePath) {
|
|
61
|
+
try {
|
|
62
|
+
const files = await fs.readdir(sourceDir, { withFileTypes: true });
|
|
63
|
+
for (const file of files) {
|
|
64
|
+
const sourcePath = join(sourceDir, file.name);
|
|
65
|
+
const targetPath = join(targetDir, file.name);
|
|
66
|
+
const relPath = join(relativePath, file.name);
|
|
67
|
+
if (file.isFile()) {
|
|
68
|
+
if (file.name === 'PROJECT_SPECIFIC.md') {
|
|
69
|
+
try {
|
|
70
|
+
await fs.access(targetPath);
|
|
71
|
+
continue;
|
|
72
|
+
}
|
|
73
|
+
catch {
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
try {
|
|
77
|
+
const sourceContent = await fs.readFile(sourcePath, 'utf-8');
|
|
78
|
+
const sourceHash = createHash('md5').update(sourceContent).digest('hex');
|
|
79
|
+
try {
|
|
80
|
+
const targetContent = await fs.readFile(targetPath, 'utf-8');
|
|
81
|
+
const targetHash = createHash('md5').update(targetContent).digest('hex');
|
|
82
|
+
if (sourceHash !== targetHash) {
|
|
83
|
+
filesToUpdate.push(relPath);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
catch {
|
|
87
|
+
filesToUpdate.push(relPath);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
catch {
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
else if (file.isDirectory()) {
|
|
94
|
+
await checkDirectoryForUpdates(sourcePath, targetPath, filesToUpdate, relPath);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
catch {
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
export async function updateDocs() {
|
|
102
|
+
const result = await syncDocs();
|
|
103
|
+
return result.success;
|
|
104
|
+
}
|
|
105
|
+
export async function checkAndPromptDocsUpdate() {
|
|
106
|
+
const status = await checkDocsStatus();
|
|
107
|
+
if (!status.needsUpdate) {
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
const fileCount = status.filesToUpdate.length;
|
|
111
|
+
const fileList = status.filesToUpdate.join(', ');
|
|
112
|
+
console.log(`\x1b[33mUpdating ${fileCount} doc file(s): ${fileList}...\x1b[0m`);
|
|
113
|
+
const success = await updateDocs();
|
|
114
|
+
if (success) {
|
|
115
|
+
console.log(`\x1b[32m✓ Documentation updated successfully.\x1b[0m\n`);
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
console.log(`\x1b[31m✗ Failed to update documentation.\x1b[0m\n`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
export async function checkAndPromptClaudeMdUpdate() {
|
|
122
|
+
return checkAndPromptDocsUpdate();
|
|
123
|
+
}
|
|
124
|
+
export async function syncDocs() {
|
|
125
|
+
const packageDocsDir = join(__dirname, '..', '..', 'docs');
|
|
126
|
+
const packageCommandsDir = join(__dirname, '..', '..', '.claude', 'commands');
|
|
127
|
+
const targetDocsDir = join(process.cwd(), 'docs');
|
|
128
|
+
const targetCommandsDir = join(process.cwd(), '.claude', 'commands');
|
|
129
|
+
const packageClaudeMd = join(packageDocsDir, 'CLAUDE.md');
|
|
130
|
+
const targetClaudeMd = join(process.cwd(), 'CLAUDE.md');
|
|
131
|
+
const syncedFiles = [];
|
|
132
|
+
try {
|
|
133
|
+
await fs.access(packageDocsDir);
|
|
134
|
+
try {
|
|
135
|
+
await fs.copyFile(packageClaudeMd, targetClaudeMd);
|
|
136
|
+
syncedFiles.push('CLAUDE.md');
|
|
137
|
+
}
|
|
138
|
+
catch {
|
|
139
|
+
}
|
|
140
|
+
await fs.mkdir(targetDocsDir, { recursive: true });
|
|
141
|
+
await syncDirectory(packageDocsDir, targetDocsDir, syncedFiles, 'docs');
|
|
142
|
+
try {
|
|
143
|
+
await fs.access(packageCommandsDir);
|
|
144
|
+
await fs.mkdir(targetCommandsDir, { recursive: true });
|
|
145
|
+
await syncDirectory(packageCommandsDir, targetCommandsDir, syncedFiles, '.claude/commands');
|
|
146
|
+
}
|
|
147
|
+
catch {
|
|
148
|
+
}
|
|
149
|
+
return { success: true, syncedFiles };
|
|
150
|
+
}
|
|
151
|
+
catch (error) {
|
|
152
|
+
return {
|
|
153
|
+
success: false,
|
|
154
|
+
syncedFiles,
|
|
155
|
+
error: error instanceof Error ? error.message : 'Unknown error'
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
async function syncDirectory(source, target, syncedFiles, relativePath) {
|
|
160
|
+
await fs.mkdir(target, { recursive: true });
|
|
161
|
+
const files = await fs.readdir(source, { withFileTypes: true });
|
|
162
|
+
for (const file of files) {
|
|
163
|
+
const sourcePath = join(source, file.name);
|
|
164
|
+
const targetPath = join(target, file.name);
|
|
165
|
+
const relPath = join(relativePath, file.name);
|
|
166
|
+
if (file.isFile()) {
|
|
167
|
+
if (file.name === 'PROJECT_SPECIFIC.md') {
|
|
168
|
+
try {
|
|
169
|
+
await fs.access(targetPath);
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
catch {
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
await fs.copyFile(sourcePath, targetPath);
|
|
176
|
+
syncedFiles.push(relPath);
|
|
177
|
+
}
|
|
178
|
+
else if (file.isDirectory()) {
|
|
179
|
+
await syncDirectory(sourcePath, targetPath, syncedFiles, relPath);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
export async function copyDocs() {
|
|
184
|
+
await syncDocs();
|
|
185
|
+
}
|
|
186
|
+
//# sourceMappingURL=docs.js.map
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { promises as fs } from 'fs';
|
|
2
|
+
const ERROR_LOG_PATH = '.butterfly/error_log.txt';
|
|
3
|
+
export class ErrorLogger {
|
|
4
|
+
static async logError(error) {
|
|
5
|
+
try {
|
|
6
|
+
const timestamp = new Date().toISOString();
|
|
7
|
+
const logEntry = `[${timestamp}] ${error}\n`;
|
|
8
|
+
await fs.mkdir('.butterfly', { recursive: true });
|
|
9
|
+
await fs.appendFile(ERROR_LOG_PATH, logEntry, 'utf-8');
|
|
10
|
+
}
|
|
11
|
+
catch (logErr) {
|
|
12
|
+
console.error('Failed to log error:', logErr);
|
|
13
|
+
console.error('Original error:', error);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
static async clearErrorLog() {
|
|
17
|
+
try {
|
|
18
|
+
await fs.mkdir('.butterfly', { recursive: true });
|
|
19
|
+
await fs.writeFile(ERROR_LOG_PATH, '', 'utf-8');
|
|
20
|
+
}
|
|
21
|
+
catch (err) {
|
|
22
|
+
console.warn('Failed to clear error log:', err);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
static async getErrorLogPath() {
|
|
26
|
+
return ERROR_LOG_PATH;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
//# sourceMappingURL=errorLogger.js.map
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export declare function cleanupDirectory(dirPath: string): Promise<void>;
|
|
2
|
+
export declare function saveResource(outputPath: string, resourceType: string, resourceName: string, data: any): Promise<void>;
|
|
3
|
+
export declare function saveAiTask(outputPath: string, aiTask: any): Promise<void>;
|
|
4
|
+
export declare function restoreTableRecordFromFiles(recordDir: string, tableName: string, recordData: any): Promise<any>;
|
|
5
|
+
export declare function restorePageFromFiles(pageDir: string, pageData: any): Promise<any>;
|
|
6
|
+
export declare function restoreAiTaskFromFiles(taskDir: string, taskData: any): Promise<any>;
|
|
7
|
+
export declare function restoreFieldsFromFiles(baseDir: string, data: any): Promise<any>;
|
|
8
|
+
export declare function saveObjectWithSpecs(outputPath: string, tableName: string, objectData: any, specs: any[]): Promise<void>;
|
|
9
|
+
export declare function saveTableRecord(outputPath: string, tableName: string, recordData: any, specs?: any[]): Promise<void>;
|
|
10
|
+
export declare function savePageData(outputPath: string, pageData: any): Promise<void>;
|
|
11
|
+
export declare function saveReportData(outputPath: string, reportData: any, specs: any[], queries: any[], categories: any[]): Promise<void>;
|
|
12
|
+
export declare function saveStateMachineData(outputPath: string, stateMachine: any, states: any[], roles: any[], transitions: any[], transitionSpecs: any[], actions: any[], actionSpecs: any[], transitionActions: any[]): Promise<void>;
|
|
13
|
+
export declare function saveWorkflowData(outputPath: string, workflow: any, versions: any[], nodes: any[], connections: any[]): Promise<void>;
|
|
14
|
+
//# sourceMappingURL=files.d.ts.map
|