@prompd/cli 0.3.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +162 -0
- package/bin/prompd.js +23 -0
- package/dist/commands/cache.d.ts +3 -0
- package/dist/commands/cache.d.ts.map +1 -0
- package/dist/commands/cache.js +199 -0
- package/dist/commands/cache.js.map +1 -0
- package/dist/commands/compile.d.ts +9 -0
- package/dist/commands/compile.d.ts.map +1 -0
- package/dist/commands/compile.js +104 -0
- package/dist/commands/compile.js.map +1 -0
- package/dist/commands/config.d.ts +7 -0
- package/dist/commands/config.d.ts.map +1 -0
- package/dist/commands/config.js +212 -0
- package/dist/commands/config.js.map +1 -0
- package/dist/commands/create.d.ts +3 -0
- package/dist/commands/create.d.ts.map +1 -0
- package/dist/commands/create.js +183 -0
- package/dist/commands/create.js.map +1 -0
- package/dist/commands/deps.d.ts +3 -0
- package/dist/commands/deps.d.ts.map +1 -0
- package/dist/commands/deps.js +192 -0
- package/dist/commands/deps.js.map +1 -0
- package/dist/commands/explain.d.ts +3 -0
- package/dist/commands/explain.d.ts.map +1 -0
- package/dist/commands/explain.js +227 -0
- package/dist/commands/explain.js.map +1 -0
- package/dist/commands/git.d.ts +3 -0
- package/dist/commands/git.d.ts.map +1 -0
- package/dist/commands/git.js +306 -0
- package/dist/commands/git.js.map +1 -0
- package/dist/commands/init.d.ts +3 -0
- package/dist/commands/init.d.ts.map +1 -0
- package/dist/commands/init.js +177 -0
- package/dist/commands/init.js.map +1 -0
- package/dist/commands/list.d.ts +3 -0
- package/dist/commands/list.d.ts.map +1 -0
- package/dist/commands/list.js +126 -0
- package/dist/commands/list.js.map +1 -0
- package/dist/commands/mcp.d.ts +3 -0
- package/dist/commands/mcp.d.ts.map +1 -0
- package/dist/commands/mcp.js +326 -0
- package/dist/commands/mcp.js.map +1 -0
- package/dist/commands/namespace.d.ts +3 -0
- package/dist/commands/namespace.d.ts.map +1 -0
- package/dist/commands/namespace.js +113 -0
- package/dist/commands/namespace.js.map +1 -0
- package/dist/commands/package.d.ts +23 -0
- package/dist/commands/package.d.ts.map +1 -0
- package/dist/commands/package.js +746 -0
- package/dist/commands/package.js.map +1 -0
- package/dist/commands/provider.d.ts +3 -0
- package/dist/commands/provider.d.ts.map +1 -0
- package/dist/commands/provider.js +285 -0
- package/dist/commands/provider.js.map +1 -0
- package/dist/commands/registry.d.ts +9 -0
- package/dist/commands/registry.d.ts.map +1 -0
- package/dist/commands/registry.js +361 -0
- package/dist/commands/registry.js.map +1 -0
- package/dist/commands/run.d.ts +3 -0
- package/dist/commands/run.d.ts.map +1 -0
- package/dist/commands/run.js +157 -0
- package/dist/commands/run.js.map +1 -0
- package/dist/commands/show.d.ts +3 -0
- package/dist/commands/show.d.ts.map +1 -0
- package/dist/commands/show.js +90 -0
- package/dist/commands/show.js.map +1 -0
- package/dist/commands/uninstall.d.ts +3 -0
- package/dist/commands/uninstall.d.ts.map +1 -0
- package/dist/commands/uninstall.js +95 -0
- package/dist/commands/uninstall.js.map +1 -0
- package/dist/commands/validate.d.ts +3 -0
- package/dist/commands/validate.d.ts.map +1 -0
- package/dist/commands/validate.js +57 -0
- package/dist/commands/validate.js.map +1 -0
- package/dist/commands/version.d.ts +3 -0
- package/dist/commands/version.d.ts.map +1 -0
- package/dist/commands/version.js +166 -0
- package/dist/commands/version.js.map +1 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +388 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/auth.d.ts +164 -0
- package/dist/lib/auth.d.ts.map +1 -0
- package/dist/lib/auth.js +388 -0
- package/dist/lib/auth.js.map +1 -0
- package/dist/lib/compiler/file-system.d.ts +178 -0
- package/dist/lib/compiler/file-system.d.ts.map +1 -0
- package/dist/lib/compiler/file-system.js +440 -0
- package/dist/lib/compiler/file-system.js.map +1 -0
- package/dist/lib/compiler/formatters/anthropic.d.ts +21 -0
- package/dist/lib/compiler/formatters/anthropic.d.ts.map +1 -0
- package/dist/lib/compiler/formatters/anthropic.js +95 -0
- package/dist/lib/compiler/formatters/anthropic.js.map +1 -0
- package/dist/lib/compiler/formatters/markdown.d.ts +17 -0
- package/dist/lib/compiler/formatters/markdown.d.ts.map +1 -0
- package/dist/lib/compiler/formatters/markdown.js +114 -0
- package/dist/lib/compiler/formatters/markdown.js.map +1 -0
- package/dist/lib/compiler/formatters/openai.d.ts +21 -0
- package/dist/lib/compiler/formatters/openai.d.ts.map +1 -0
- package/dist/lib/compiler/formatters/openai.js +98 -0
- package/dist/lib/compiler/formatters/openai.js.map +1 -0
- package/dist/lib/compiler/index.d.ts +56 -0
- package/dist/lib/compiler/index.d.ts.map +1 -0
- package/dist/lib/compiler/index.js +165 -0
- package/dist/lib/compiler/index.js.map +1 -0
- package/dist/lib/compiler/language-map.d.ts +31 -0
- package/dist/lib/compiler/language-map.d.ts.map +1 -0
- package/dist/lib/compiler/language-map.js +156 -0
- package/dist/lib/compiler/language-map.js.map +1 -0
- package/dist/lib/compiler/package-resolver.d.ts +68 -0
- package/dist/lib/compiler/package-resolver.d.ts.map +1 -0
- package/dist/lib/compiler/package-resolver.js +254 -0
- package/dist/lib/compiler/package-resolver.js.map +1 -0
- package/dist/lib/compiler/pipeline.d.ts +53 -0
- package/dist/lib/compiler/pipeline.d.ts.map +1 -0
- package/dist/lib/compiler/pipeline.js +209 -0
- package/dist/lib/compiler/pipeline.js.map +1 -0
- package/dist/lib/compiler/prompd-loader.d.ts +108 -0
- package/dist/lib/compiler/prompd-loader.d.ts.map +1 -0
- package/dist/lib/compiler/prompd-loader.js +270 -0
- package/dist/lib/compiler/prompd-loader.js.map +1 -0
- package/dist/lib/compiler/section-override.d.ts +40 -0
- package/dist/lib/compiler/section-override.d.ts.map +1 -0
- package/dist/lib/compiler/section-override.js +296 -0
- package/dist/lib/compiler/section-override.js.map +1 -0
- package/dist/lib/compiler/stages/assets.d.ts +71 -0
- package/dist/lib/compiler/stages/assets.d.ts.map +1 -0
- package/dist/lib/compiler/stages/assets.js +456 -0
- package/dist/lib/compiler/stages/assets.js.map +1 -0
- package/dist/lib/compiler/stages/codegen.d.ts +17 -0
- package/dist/lib/compiler/stages/codegen.d.ts.map +1 -0
- package/dist/lib/compiler/stages/codegen.js +64 -0
- package/dist/lib/compiler/stages/codegen.js.map +1 -0
- package/dist/lib/compiler/stages/dependency.d.ts +38 -0
- package/dist/lib/compiler/stages/dependency.d.ts.map +1 -0
- package/dist/lib/compiler/stages/dependency.js +307 -0
- package/dist/lib/compiler/stages/dependency.js.map +1 -0
- package/dist/lib/compiler/stages/lexical.d.ts +19 -0
- package/dist/lib/compiler/stages/lexical.d.ts.map +1 -0
- package/dist/lib/compiler/stages/lexical.js +92 -0
- package/dist/lib/compiler/stages/lexical.js.map +1 -0
- package/dist/lib/compiler/stages/semantic.d.ts +20 -0
- package/dist/lib/compiler/stages/semantic.d.ts.map +1 -0
- package/dist/lib/compiler/stages/semantic.js +166 -0
- package/dist/lib/compiler/stages/semantic.js.map +1 -0
- package/dist/lib/compiler/stages/template.d.ts +94 -0
- package/dist/lib/compiler/stages/template.d.ts.map +1 -0
- package/dist/lib/compiler/stages/template.js +1044 -0
- package/dist/lib/compiler/stages/template.js.map +1 -0
- package/dist/lib/compiler/types.d.ts +200 -0
- package/dist/lib/compiler/types.d.ts.map +1 -0
- package/dist/lib/compiler/types.js +137 -0
- package/dist/lib/compiler/types.js.map +1 -0
- package/dist/lib/config.d.ts +29 -0
- package/dist/lib/config.d.ts.map +1 -0
- package/dist/lib/config.js +375 -0
- package/dist/lib/config.js.map +1 -0
- package/dist/lib/errors.d.ts +19 -0
- package/dist/lib/errors.d.ts.map +1 -0
- package/dist/lib/errors.js +47 -0
- package/dist/lib/errors.js.map +1 -0
- package/dist/lib/executor.d.ts +18 -0
- package/dist/lib/executor.d.ts.map +1 -0
- package/dist/lib/executor.js +372 -0
- package/dist/lib/executor.js.map +1 -0
- package/dist/lib/git.d.ts +74 -0
- package/dist/lib/git.d.ts.map +1 -0
- package/dist/lib/git.js +254 -0
- package/dist/lib/git.js.map +1 -0
- package/dist/lib/index.d.ts +43 -0
- package/dist/lib/index.d.ts.map +1 -0
- package/dist/lib/index.js +108 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/mcp.d.ts +42 -0
- package/dist/lib/mcp.d.ts.map +1 -0
- package/dist/lib/mcp.js +477 -0
- package/dist/lib/mcp.js.map +1 -0
- package/dist/lib/model-updater.d.ts +51 -0
- package/dist/lib/model-updater.d.ts.map +1 -0
- package/dist/lib/model-updater.js +275 -0
- package/dist/lib/model-updater.js.map +1 -0
- package/dist/lib/parser.d.ts +9 -0
- package/dist/lib/parser.d.ts.map +1 -0
- package/dist/lib/parser.js +197 -0
- package/dist/lib/parser.js.map +1 -0
- package/dist/lib/registry.d.ts +183 -0
- package/dist/lib/registry.d.ts.map +1 -0
- package/dist/lib/registry.js +786 -0
- package/dist/lib/registry.js.map +1 -0
- package/dist/lib/rpc-server.d.ts +78 -0
- package/dist/lib/rpc-server.d.ts.map +1 -0
- package/dist/lib/rpc-server.js +404 -0
- package/dist/lib/rpc-server.js.map +1 -0
- package/dist/lib/security.d.ts +120 -0
- package/dist/lib/security.d.ts.map +1 -0
- package/dist/lib/security.js +478 -0
- package/dist/lib/security.js.map +1 -0
- package/dist/lib/validation.d.ts +106 -0
- package/dist/lib/validation.d.ts.map +1 -0
- package/dist/lib/validation.js +398 -0
- package/dist/lib/validation.js.map +1 -0
- package/dist/lib/version.d.ts +29 -0
- package/dist/lib/version.d.ts.map +1 -0
- package/dist/lib/version.js +202 -0
- package/dist/lib/version.js.map +1 -0
- package/dist/lib/workflow-engine.d.ts +161 -0
- package/dist/lib/workflow-engine.d.ts.map +1 -0
- package/dist/lib/workflow-engine.js +422 -0
- package/dist/lib/workflow-engine.js.map +1 -0
- package/dist/lib/workflow.d.ts +102 -0
- package/dist/lib/workflow.d.ts.map +1 -0
- package/dist/lib/workflow.js +228 -0
- package/dist/lib/workflow.js.map +1 -0
- package/dist/server.d.ts +8 -0
- package/dist/server.d.ts.map +1 -0
- package/dist/server.js +134 -0
- package/dist/server.js.map +1 -0
- package/dist/types/index.d.ts +116 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +144 -0
- package/dist/types/index.js.map +1 -0
- package/package.json +104 -0
|
@@ -0,0 +1,746 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.createPackageCommand = createPackageCommand;
|
|
40
|
+
exports.createPackCommand = createPackCommand;
|
|
41
|
+
exports.createPackageFromPrompdJson = createPackageFromPrompdJson;
|
|
42
|
+
const commander_1 = require("commander");
|
|
43
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
44
|
+
const fs = __importStar(require("fs-extra"));
|
|
45
|
+
const path = __importStar(require("path"));
|
|
46
|
+
const archiver_1 = __importDefault(require("archiver"));
|
|
47
|
+
const crypto_1 = require("crypto");
|
|
48
|
+
const security_1 = require("../lib/security");
|
|
49
|
+
const compiler_1 = require("../lib/compiler");
|
|
50
|
+
const index_js_1 = require("../types/index.js");
|
|
51
|
+
/**
|
|
52
|
+
* Shared package creation logic (used by both 'package create' and 'pack')
|
|
53
|
+
*/
|
|
54
|
+
async function handlePackageCreate(source, output, options) {
|
|
55
|
+
const sourcePath = path.resolve(source);
|
|
56
|
+
// Check if source exists
|
|
57
|
+
if (!await fs.pathExists(sourcePath)) {
|
|
58
|
+
console.error(chalk_1.default.red(`Source not found: ${sourcePath}`));
|
|
59
|
+
process.exit(1);
|
|
60
|
+
}
|
|
61
|
+
// Check if source is a directory
|
|
62
|
+
const stat = await fs.stat(sourcePath);
|
|
63
|
+
if (!stat.isDirectory()) {
|
|
64
|
+
console.error(chalk_1.default.red(`Source must be a directory: ${sourcePath}`));
|
|
65
|
+
process.exit(1);
|
|
66
|
+
}
|
|
67
|
+
// Directory mode - requires manual parameters
|
|
68
|
+
if (!options?.name || !options?.version || !options?.description) {
|
|
69
|
+
console.error(chalk_1.default.red('Package creation requires -n/--name, -v/--pkg-version, and -d/--description options'));
|
|
70
|
+
process.exit(1);
|
|
71
|
+
}
|
|
72
|
+
await packageFromDirectory(sourcePath, output, options);
|
|
73
|
+
}
|
|
74
|
+
function createPackageCommand() {
|
|
75
|
+
const command = new commander_1.Command('package');
|
|
76
|
+
command.description('Package management commands');
|
|
77
|
+
// Create subcommand
|
|
78
|
+
const createCommand = new commander_1.Command('create');
|
|
79
|
+
createCommand
|
|
80
|
+
.description('Create a .pdpkg package from a directory')
|
|
81
|
+
.argument('<source>', 'Source directory')
|
|
82
|
+
.argument('[output]', 'Output .pdpkg file path (optional)')
|
|
83
|
+
.option('-n, --name <name>', 'Package name')
|
|
84
|
+
.option('-v, --pkg-version <version>', 'Package version')
|
|
85
|
+
.option('-d, --description <description>', 'Package description')
|
|
86
|
+
.option('-a, --author <author>', 'Package author')
|
|
87
|
+
.action(async (source, output, options) => {
|
|
88
|
+
try {
|
|
89
|
+
// Map pkgVersion to version for backwards compatibility
|
|
90
|
+
if (options?.pkgVersion) {
|
|
91
|
+
options.version = options.pkgVersion;
|
|
92
|
+
}
|
|
93
|
+
await handlePackageCreate(source, output, options);
|
|
94
|
+
}
|
|
95
|
+
catch (error) {
|
|
96
|
+
console.error(chalk_1.default.red(`❌ Package creation failed: ${error.message}`));
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
// Validate subcommand
|
|
101
|
+
const validateCommand = new commander_1.Command('validate');
|
|
102
|
+
validateCommand
|
|
103
|
+
.description('Validate a .pdpkg package archive')
|
|
104
|
+
.argument('<file>', '.pdpkg package file to validate')
|
|
105
|
+
.action(async (filePath) => {
|
|
106
|
+
try {
|
|
107
|
+
const fullPath = path.resolve(filePath);
|
|
108
|
+
// Check if file exists
|
|
109
|
+
if (!await fs.pathExists(fullPath)) {
|
|
110
|
+
console.error(chalk_1.default.red(`❌ File not found: ${fullPath}`));
|
|
111
|
+
process.exit(1);
|
|
112
|
+
}
|
|
113
|
+
// Only accept .pdpkg files - packages are archives, not individual .prmd files
|
|
114
|
+
if (!fullPath.endsWith('.pdpkg')) {
|
|
115
|
+
console.error(chalk_1.default.red('❌ Invalid package format!'));
|
|
116
|
+
console.error(chalk_1.default.gray(` File: ${path.basename(filePath)}`));
|
|
117
|
+
console.error(chalk_1.default.gray(' Expected: .pdpkg archive file'));
|
|
118
|
+
console.error(chalk_1.default.gray(' Note: .prmd files are individual prompts, not packages'));
|
|
119
|
+
console.error(chalk_1.default.gray(' Use \'prompd validate\' to validate individual .prmd files'));
|
|
120
|
+
process.exit(1);
|
|
121
|
+
}
|
|
122
|
+
// Validate .pdpkg file structure
|
|
123
|
+
await validatePdpkgFile(fullPath);
|
|
124
|
+
console.log(chalk_1.default.green(`✅ Package validation passed: ${path.basename(filePath)}`));
|
|
125
|
+
}
|
|
126
|
+
catch (error) {
|
|
127
|
+
console.error(chalk_1.default.red(`❌ Package validation failed: ${error.message}`));
|
|
128
|
+
process.exit(1);
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
command.addCommand(createCommand);
|
|
132
|
+
command.addCommand(validateCommand);
|
|
133
|
+
return command;
|
|
134
|
+
}
|
|
135
|
+
async function packageFromDirectory(sourceDir, outputPath, options = {}) {
|
|
136
|
+
const { name, version, description, author } = options;
|
|
137
|
+
// Generate output path if not provided
|
|
138
|
+
if (!outputPath) {
|
|
139
|
+
outputPath = `${name.toLowerCase().replace(/\s+/g, '-')}-v${version}.pdpkg`;
|
|
140
|
+
}
|
|
141
|
+
// Ensure .pdpkg extension
|
|
142
|
+
if (!outputPath.endsWith('.pdpkg')) {
|
|
143
|
+
outputPath += '.pdpkg';
|
|
144
|
+
}
|
|
145
|
+
const manifest = {
|
|
146
|
+
name,
|
|
147
|
+
version,
|
|
148
|
+
description,
|
|
149
|
+
author,
|
|
150
|
+
type: 'package'
|
|
151
|
+
};
|
|
152
|
+
// Create package with default exclusions
|
|
153
|
+
const exclusions = {
|
|
154
|
+
directories: ['.git', '.prmd', 'node_modules', '__pycache__'],
|
|
155
|
+
patterns: ['*.log', '*.tmp', '*.cache', '.env*']
|
|
156
|
+
};
|
|
157
|
+
await createPackage(sourceDir, outputPath, manifest, exclusions);
|
|
158
|
+
console.log(chalk_1.default.green('✓ Package created successfully!'));
|
|
159
|
+
console.log(chalk_1.default.cyan(` Package: ${outputPath}`));
|
|
160
|
+
try {
|
|
161
|
+
const stats = await fs.stat(outputPath);
|
|
162
|
+
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
163
|
+
console.log(chalk_1.default.gray(` Size: ${sizeKB} KB`));
|
|
164
|
+
}
|
|
165
|
+
catch (error) {
|
|
166
|
+
// Ignore stat errors
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
async function createPackage(sourceDir, outputPath, manifest, exclusions) {
|
|
170
|
+
// First pass: scan for secrets
|
|
171
|
+
const secretsFound = [];
|
|
172
|
+
const scanDir = async (dir, relativePath = '') => {
|
|
173
|
+
const items = await fs.readdir(dir);
|
|
174
|
+
for (const item of items) {
|
|
175
|
+
const itemPath = path.join(dir, item);
|
|
176
|
+
const itemRelPath = path.join(relativePath, item);
|
|
177
|
+
const stat = await fs.stat(itemPath);
|
|
178
|
+
if (shouldExclude(itemRelPath, stat.isDirectory(), exclusions)) {
|
|
179
|
+
continue;
|
|
180
|
+
}
|
|
181
|
+
if (stat.isDirectory()) {
|
|
182
|
+
await scanDir(itemPath, itemRelPath);
|
|
183
|
+
}
|
|
184
|
+
else {
|
|
185
|
+
// Scan text files for secrets
|
|
186
|
+
const ext = path.extname(itemPath).toLowerCase();
|
|
187
|
+
const textExtensions = ['.prmd', '.txt', '.json', '.yaml', '.yml', '.md', '.js', '.ts', '.py', '.sh', '.env'];
|
|
188
|
+
if (textExtensions.includes(ext)) {
|
|
189
|
+
const scanResult = await security_1.SecurityManager.scanFileForSecrets(itemPath);
|
|
190
|
+
if (scanResult.hasSecrets) {
|
|
191
|
+
scanResult.secrets.forEach(secret => {
|
|
192
|
+
secretsFound.push({
|
|
193
|
+
file: itemRelPath,
|
|
194
|
+
type: secret.type,
|
|
195
|
+
line: secret.line
|
|
196
|
+
});
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
};
|
|
203
|
+
// Scan for secrets before packaging
|
|
204
|
+
await scanDir(sourceDir);
|
|
205
|
+
if (secretsFound.length > 0) {
|
|
206
|
+
console.error(chalk_1.default.red.bold('\n⚠️ SECURITY WARNING: Potential secrets detected!'));
|
|
207
|
+
console.error(chalk_1.default.yellow('\nThe following files contain potential secrets:'));
|
|
208
|
+
secretsFound.forEach(({ file, type, line }) => {
|
|
209
|
+
console.error(chalk_1.default.yellow(` • ${file}:${line} - ${type}`));
|
|
210
|
+
});
|
|
211
|
+
console.error(chalk_1.default.red('\n❌ Package creation blocked to prevent secret exposure.'));
|
|
212
|
+
console.error(chalk_1.default.gray('Please remove secrets from these files before packaging.'));
|
|
213
|
+
console.error(chalk_1.default.gray('Use environment variables or secure vaults for sensitive data.\n'));
|
|
214
|
+
throw new Error('Secrets detected in package contents');
|
|
215
|
+
}
|
|
216
|
+
// Second pass: collect files, apply frontmatter, and compute hashes
|
|
217
|
+
const fileHashes = {};
|
|
218
|
+
const fileContents = [];
|
|
219
|
+
const collectFiles = (dir, relativePath = '') => {
|
|
220
|
+
const items = fs.readdirSync(dir);
|
|
221
|
+
for (const item of items) {
|
|
222
|
+
const itemPath = path.join(dir, item);
|
|
223
|
+
const itemRelPath = path.join(relativePath, item);
|
|
224
|
+
const stat = fs.statSync(itemPath);
|
|
225
|
+
if (shouldExclude(itemRelPath, stat.isDirectory(), exclusions)) {
|
|
226
|
+
continue;
|
|
227
|
+
}
|
|
228
|
+
if (stat.isDirectory()) {
|
|
229
|
+
collectFiles(itemPath, itemRelPath);
|
|
230
|
+
}
|
|
231
|
+
else {
|
|
232
|
+
const zipPath = itemRelPath.replace(/\\/g, '/');
|
|
233
|
+
// For code files, add frontmatter protection
|
|
234
|
+
if ((0, index_js_1.needsFrontmatterProtection)(itemPath)) {
|
|
235
|
+
const content = fs.readFileSync(itemPath, 'utf-8');
|
|
236
|
+
const filename = path.basename(itemPath);
|
|
237
|
+
const protectedContent = addContentFrontmatter(content, filename);
|
|
238
|
+
fileHashes[zipPath] = (0, crypto_1.createHash)('sha256').update(protectedContent).digest('hex');
|
|
239
|
+
fileContents.push({ zipPath, content: protectedContent });
|
|
240
|
+
}
|
|
241
|
+
else {
|
|
242
|
+
// For non-code files, read content for hashing
|
|
243
|
+
const content = fs.readFileSync(itemPath);
|
|
244
|
+
fileHashes[zipPath] = (0, crypto_1.createHash)('sha256').update(content).digest('hex');
|
|
245
|
+
fileContents.push({ zipPath, content });
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
};
|
|
250
|
+
collectFiles(sourceDir);
|
|
251
|
+
// Third pass: create the package with integrity hashes
|
|
252
|
+
return new Promise((resolve, reject) => {
|
|
253
|
+
const output = fs.createWriteStream(outputPath);
|
|
254
|
+
const archive = (0, archiver_1.default)('zip', { zlib: { level: 9 } });
|
|
255
|
+
output.on('close', () => resolve());
|
|
256
|
+
archive.on('error', (err) => reject(err));
|
|
257
|
+
archive.pipe(output);
|
|
258
|
+
// Add prompd.json with integrity hashes
|
|
259
|
+
const fullManifest = {
|
|
260
|
+
...manifest,
|
|
261
|
+
integrity: {
|
|
262
|
+
algorithm: 'sha256',
|
|
263
|
+
files: fileHashes
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
archive.append(JSON.stringify(fullManifest, null, 2), { name: 'prompd.json' });
|
|
267
|
+
// Add all files to archive
|
|
268
|
+
for (const { zipPath, content } of fileContents) {
|
|
269
|
+
archive.append(content, { name: zipPath });
|
|
270
|
+
}
|
|
271
|
+
archive.finalize();
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
function shouldExclude(relPath, isDirectory, exclusions) {
|
|
275
|
+
const fileName = path.basename(relPath);
|
|
276
|
+
// Always exclude .pdproj files - they're only for packaging metadata
|
|
277
|
+
if (fileName.endsWith('.pdproj')) {
|
|
278
|
+
return true;
|
|
279
|
+
}
|
|
280
|
+
// Check directory exclusions
|
|
281
|
+
if (isDirectory && exclusions.directories) {
|
|
282
|
+
for (const excl of exclusions.directories) {
|
|
283
|
+
if (fileName === excl) {
|
|
284
|
+
return true;
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
// Check pattern exclusions
|
|
289
|
+
if (exclusions.patterns) {
|
|
290
|
+
for (const pattern of exclusions.patterns) {
|
|
291
|
+
// Convert glob pattern to regex
|
|
292
|
+
const regex = new RegExp('^' + pattern.replace(/\*/g, '.*').replace(/\?/g, '.') + '$');
|
|
293
|
+
if (regex.test(fileName)) {
|
|
294
|
+
return true;
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
return false;
|
|
299
|
+
}
|
|
300
|
+
async function validatePdpkgFile(filePath) {
|
|
301
|
+
// For .pdpkg files (ZIP archives), we need to check the structure
|
|
302
|
+
const AdmZip = require('adm-zip');
|
|
303
|
+
let zip;
|
|
304
|
+
try {
|
|
305
|
+
zip = new AdmZip(filePath);
|
|
306
|
+
}
|
|
307
|
+
catch (error) {
|
|
308
|
+
throw new Error(`Failed to open ZIP file: ${error.message}`);
|
|
309
|
+
}
|
|
310
|
+
const entries = zip.getEntries();
|
|
311
|
+
// SECURITY: Check for ZIP slip/directory traversal attacks
|
|
312
|
+
for (const entry of entries) {
|
|
313
|
+
const entryName = entry.entryName;
|
|
314
|
+
const normalizedPath = path.normalize(entryName);
|
|
315
|
+
// Check for path traversal
|
|
316
|
+
if (normalizedPath.includes('..') || path.isAbsolute(entryName)) {
|
|
317
|
+
throw new Error(`Security violation: Path traversal detected in ${entryName}`);
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
// Check for prompd.json (or legacy manifest.json for backwards compatibility)
|
|
321
|
+
let manifestFound = false;
|
|
322
|
+
for (const entry of entries) {
|
|
323
|
+
if (entry.entryName === 'prompd.json' || entry.entryName === 'manifest.json') {
|
|
324
|
+
manifestFound = true;
|
|
325
|
+
// Read and validate manifest
|
|
326
|
+
const manifestContent = entry.getData().toString('utf8');
|
|
327
|
+
let manifest;
|
|
328
|
+
try {
|
|
329
|
+
manifest = JSON.parse(manifestContent);
|
|
330
|
+
}
|
|
331
|
+
catch (error) {
|
|
332
|
+
const message = error instanceof Error ? error.message : 'Unknown error';
|
|
333
|
+
throw new Error(`Invalid ${entry.entryName}: ${message}`);
|
|
334
|
+
}
|
|
335
|
+
// Validate required fields
|
|
336
|
+
if (!manifest.name) {
|
|
337
|
+
throw new Error(`Missing 'name' in ${entry.entryName}`);
|
|
338
|
+
}
|
|
339
|
+
if (!manifest.version) {
|
|
340
|
+
throw new Error(`Missing 'version' in ${entry.entryName}`);
|
|
341
|
+
}
|
|
342
|
+
if (!manifest.description) {
|
|
343
|
+
throw new Error(`Missing 'description' in ${entry.entryName}`);
|
|
344
|
+
}
|
|
345
|
+
break;
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
if (!manifestFound) {
|
|
349
|
+
throw new Error('Missing prompd.json in package');
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
/**
|
|
353
|
+
* Creates the 'pack' command (alias for 'package create')
|
|
354
|
+
* Convenient shorthand like 'npm pack'
|
|
355
|
+
*/
|
|
356
|
+
function createPackCommand() {
|
|
357
|
+
const packCommand = new commander_1.Command('pack');
|
|
358
|
+
packCommand
|
|
359
|
+
.description('Create a .pdpkg package (alias for "package create")')
|
|
360
|
+
.argument('<source>', 'Source .pdproj file or directory')
|
|
361
|
+
.argument('[output]', 'Output .pdpkg file path (optional)')
|
|
362
|
+
.option('--name <name>', 'Package name (overrides .pdproj)')
|
|
363
|
+
.option('-n, --name <name>', 'Package name (overrides .pdproj)')
|
|
364
|
+
.option('--version <version>', 'Package version (overrides .pdproj)')
|
|
365
|
+
.option('-V, --version <version>', 'Package version (overrides .pdproj)')
|
|
366
|
+
.option('--description <description>', 'Package description (overrides .pdproj)')
|
|
367
|
+
.option('-d, --description <description>', 'Package description (overrides .pdproj)')
|
|
368
|
+
.option('--author <author>', 'Package author (overrides .pdproj)')
|
|
369
|
+
.option('-a, --author <author>', 'Package author (overrides .pdproj)')
|
|
370
|
+
.action(async (source, output, options) => {
|
|
371
|
+
try {
|
|
372
|
+
await handlePackageCreate(source, output, options);
|
|
373
|
+
}
|
|
374
|
+
catch (error) {
|
|
375
|
+
console.error(chalk_1.default.red(`❌ Package creation failed: ${error.message}`));
|
|
376
|
+
process.exit(1);
|
|
377
|
+
}
|
|
378
|
+
});
|
|
379
|
+
return packCommand;
|
|
380
|
+
}
|
|
381
|
+
/** Default file extensions that are valid for packaging */
|
|
382
|
+
const PACKABLE_EXTENSIONS = [
|
|
383
|
+
'.prmd', '.prompd', '.pdflow', // Prompd files
|
|
384
|
+
'.md', '.txt', // Documentation
|
|
385
|
+
'.json', '.yaml', '.yml', // Config/data files
|
|
386
|
+
'.js', '.ts', '.mjs', '.cjs', // JavaScript/TypeScript
|
|
387
|
+
'.py', '.sh', '.bash', // Scripts
|
|
388
|
+
'.csv', '.xml', // Data files
|
|
389
|
+
];
|
|
390
|
+
/** Directories to always exclude */
|
|
391
|
+
const DEFAULT_EXCLUDE_DIRS = [
|
|
392
|
+
'node_modules', '.git', '.prompd', '__pycache__', '.venv', 'venv',
|
|
393
|
+
'dist', 'build', 'out', '.next', '.nuxt', 'coverage', '.nyc_output',
|
|
394
|
+
'.idea', '.vscode', '.vs',
|
|
395
|
+
];
|
|
396
|
+
/** Patterns to always exclude */
|
|
397
|
+
const DEFAULT_EXCLUDE_PATTERNS = [
|
|
398
|
+
'.env', '.env.*', '*.log', '*.tmp', '*.cache', '*.lock',
|
|
399
|
+
'package-lock.json', 'yarn.lock', 'pnpm-lock.yaml',
|
|
400
|
+
'.DS_Store', 'Thumbs.db', '*.pdpkg', '*.pdproj',
|
|
401
|
+
'dist/**', '.prompd/**', // Build output and installed dependencies
|
|
402
|
+
'prompd.json', // Excluded - we generate this in the archive with populated files array
|
|
403
|
+
];
|
|
404
|
+
/**
|
|
405
|
+
* Check if a file path matches any of the ignore patterns
|
|
406
|
+
*/
|
|
407
|
+
function matchesIgnorePattern(filePath, patterns) {
|
|
408
|
+
const fileName = path.basename(filePath);
|
|
409
|
+
const normalizedPath = filePath.replace(/\\/g, '/');
|
|
410
|
+
for (const pattern of patterns) {
|
|
411
|
+
// Simple glob matching
|
|
412
|
+
const regexPattern = pattern
|
|
413
|
+
.replace(/\./g, '\\.') // Escape dots
|
|
414
|
+
.replace(/\*\*/g, '<<<GLOBSTAR>>>') // Placeholder for **
|
|
415
|
+
.replace(/\*/g, '[^/]*') // Single * = any chars except /
|
|
416
|
+
.replace(/<<<GLOBSTAR>>>/g, '.*') // ** = any path including /
|
|
417
|
+
.replace(/\?/g, '.'); // ? = single char
|
|
418
|
+
const regex = new RegExp(`^${regexPattern}$|/${regexPattern}$|^${regexPattern}/|/${regexPattern}/`);
|
|
419
|
+
if (regex.test(normalizedPath) || regex.test(fileName)) {
|
|
420
|
+
return true;
|
|
421
|
+
}
|
|
422
|
+
// Also check exact match
|
|
423
|
+
if (fileName === pattern || normalizedPath === pattern || normalizedPath.endsWith('/' + pattern)) {
|
|
424
|
+
return true;
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
return false;
|
|
428
|
+
}
|
|
429
|
+
/**
|
|
430
|
+
* Discover all packable files in a workspace, applying ignore patterns
|
|
431
|
+
*/
|
|
432
|
+
async function discoverPackableFiles(workspacePath, ignorePatterns = []) {
|
|
433
|
+
const files = [];
|
|
434
|
+
// Combine default and custom ignore patterns
|
|
435
|
+
const allIgnorePatterns = [
|
|
436
|
+
...DEFAULT_EXCLUDE_PATTERNS,
|
|
437
|
+
...ignorePatterns
|
|
438
|
+
];
|
|
439
|
+
const walkDir = async (dir, relativePath = '') => {
|
|
440
|
+
let items;
|
|
441
|
+
try {
|
|
442
|
+
items = await fs.readdir(dir);
|
|
443
|
+
}
|
|
444
|
+
catch (err) {
|
|
445
|
+
return; // Skip unreadable directories
|
|
446
|
+
}
|
|
447
|
+
for (const item of items) {
|
|
448
|
+
const itemPath = path.join(dir, item);
|
|
449
|
+
const itemRelPath = relativePath ? path.join(relativePath, item) : item;
|
|
450
|
+
let stat;
|
|
451
|
+
try {
|
|
452
|
+
stat = await fs.stat(itemPath);
|
|
453
|
+
}
|
|
454
|
+
catch (err) {
|
|
455
|
+
continue; // Skip unreadable files
|
|
456
|
+
}
|
|
457
|
+
if (stat.isDirectory()) {
|
|
458
|
+
// Skip default excluded directories
|
|
459
|
+
if (DEFAULT_EXCLUDE_DIRS.includes(item)) {
|
|
460
|
+
continue;
|
|
461
|
+
}
|
|
462
|
+
// Skip directories matching ignore patterns
|
|
463
|
+
if (matchesIgnorePattern(itemRelPath, allIgnorePatterns)) {
|
|
464
|
+
continue;
|
|
465
|
+
}
|
|
466
|
+
await walkDir(itemPath, itemRelPath);
|
|
467
|
+
}
|
|
468
|
+
else {
|
|
469
|
+
// Skip files matching ignore patterns
|
|
470
|
+
if (matchesIgnorePattern(itemRelPath, allIgnorePatterns)) {
|
|
471
|
+
continue;
|
|
472
|
+
}
|
|
473
|
+
// Check if file extension is packable
|
|
474
|
+
const ext = path.extname(item).toLowerCase();
|
|
475
|
+
if (PACKABLE_EXTENSIONS.includes(ext)) {
|
|
476
|
+
// Use forward slashes for consistency
|
|
477
|
+
files.push(itemRelPath.replace(/\\/g, '/'));
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
};
|
|
482
|
+
await walkDir(workspacePath);
|
|
483
|
+
return files.sort();
|
|
484
|
+
}
|
|
485
|
+
async function createPackageFromPrompdJson(workspacePath, outputDir) {
|
|
486
|
+
const prompdJsonPath = path.join(workspacePath, 'prompd.json');
|
|
487
|
+
// 1. Check prompd.json exists
|
|
488
|
+
if (!await fs.pathExists(prompdJsonPath)) {
|
|
489
|
+
return {
|
|
490
|
+
success: false,
|
|
491
|
+
error: 'No prompd.json found in workspace root. Create a prompd.json file first.'
|
|
492
|
+
};
|
|
493
|
+
}
|
|
494
|
+
// 2. Parse prompd.json
|
|
495
|
+
let prompdJson;
|
|
496
|
+
try {
|
|
497
|
+
prompdJson = await fs.readJson(prompdJsonPath);
|
|
498
|
+
}
|
|
499
|
+
catch (parseErr) {
|
|
500
|
+
return {
|
|
501
|
+
success: false,
|
|
502
|
+
error: `Invalid prompd.json: ${parseErr.message}`
|
|
503
|
+
};
|
|
504
|
+
}
|
|
505
|
+
// 3. Validate required fields
|
|
506
|
+
if (!prompdJson.name) {
|
|
507
|
+
return { success: false, error: 'prompd.json is missing required field: name' };
|
|
508
|
+
}
|
|
509
|
+
if (!prompdJson.version) {
|
|
510
|
+
return { success: false, error: 'prompd.json is missing required field: version' };
|
|
511
|
+
}
|
|
512
|
+
if (!prompdJson.description || prompdJson.description.length < 10) {
|
|
513
|
+
return { success: false, error: 'prompd.json description must be at least 10 characters' };
|
|
514
|
+
}
|
|
515
|
+
if (!prompdJson.main) {
|
|
516
|
+
return { success: false, error: 'prompd.json is missing required field: main (main .prmd entry point)' };
|
|
517
|
+
}
|
|
518
|
+
// 4. Auto-discover files if files array is empty or missing
|
|
519
|
+
let filesToPackage = prompdJson.files || [];
|
|
520
|
+
let autoDiscovered = false;
|
|
521
|
+
if (!Array.isArray(filesToPackage) || filesToPackage.length === 0) {
|
|
522
|
+
// Auto-discover packable files using ignore patterns
|
|
523
|
+
const ignorePatterns = prompdJson.ignore || [];
|
|
524
|
+
filesToPackage = await discoverPackableFiles(workspacePath, ignorePatterns);
|
|
525
|
+
autoDiscovered = true;
|
|
526
|
+
if (filesToPackage.length === 0) {
|
|
527
|
+
return {
|
|
528
|
+
success: false,
|
|
529
|
+
error: 'No packable files found in workspace. Add .prmd, .md, .json, or other valid files.'
|
|
530
|
+
};
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
// 5. Validate main file exists
|
|
534
|
+
const mainFilePath = path.join(workspacePath, prompdJson.main);
|
|
535
|
+
if (!await fs.pathExists(mainFilePath)) {
|
|
536
|
+
return { success: false, error: `Main file not found: ${prompdJson.main}` };
|
|
537
|
+
}
|
|
538
|
+
// Ensure main file is included in the files to package
|
|
539
|
+
const mainFileNormalized = prompdJson.main.replace(/\\/g, '/');
|
|
540
|
+
if (!filesToPackage.includes(mainFileNormalized)) {
|
|
541
|
+
filesToPackage.unshift(mainFileNormalized);
|
|
542
|
+
}
|
|
543
|
+
// 6. Validate all files exist and scan for secrets
|
|
544
|
+
const secretsFound = [];
|
|
545
|
+
const missingFiles = [];
|
|
546
|
+
for (const filePath of filesToPackage) {
|
|
547
|
+
const fullPath = path.join(workspacePath, filePath);
|
|
548
|
+
// Check file exists
|
|
549
|
+
if (!await fs.pathExists(fullPath)) {
|
|
550
|
+
missingFiles.push(filePath);
|
|
551
|
+
continue;
|
|
552
|
+
}
|
|
553
|
+
// Scan for secrets in text files
|
|
554
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
555
|
+
const textExtensions = ['.prmd', '.txt', '.json', '.yaml', '.yml', '.md', '.js', '.ts', '.py', '.sh', '.env'];
|
|
556
|
+
if (textExtensions.includes(ext)) {
|
|
557
|
+
try {
|
|
558
|
+
const scanResult = await security_1.SecurityManager.scanFileForSecrets(fullPath);
|
|
559
|
+
if (scanResult.hasSecrets) {
|
|
560
|
+
scanResult.secrets.forEach((secret) => {
|
|
561
|
+
secretsFound.push({
|
|
562
|
+
file: filePath,
|
|
563
|
+
type: secret.type,
|
|
564
|
+
line: secret.line
|
|
565
|
+
});
|
|
566
|
+
});
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
catch (scanErr) {
|
|
570
|
+
console.warn(`[Package] Secret scan failed for ${filePath}: ${scanErr.message}`);
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
}
|
|
574
|
+
if (missingFiles.length > 0) {
|
|
575
|
+
return {
|
|
576
|
+
success: false,
|
|
577
|
+
error: `Missing files:\n${missingFiles.map(f => ' - ' + f).join('\n')}`
|
|
578
|
+
};
|
|
579
|
+
}
|
|
580
|
+
if (secretsFound.length > 0) {
|
|
581
|
+
const secretsList = secretsFound.map(s => ` - ${s.file}:${s.line} (${s.type})`).join('\n');
|
|
582
|
+
return {
|
|
583
|
+
success: false,
|
|
584
|
+
error: `Secrets detected! Remove before packaging:\n${secretsList}`
|
|
585
|
+
};
|
|
586
|
+
}
|
|
587
|
+
// 6b. Validate all .prmd files compile without errors (check inherits, parameters, etc.)
|
|
588
|
+
const prmdValidationErrors = [];
|
|
589
|
+
const compiler = new compiler_1.PrompdCompiler();
|
|
590
|
+
const fileSystem = new compiler_1.NodeFileSystem();
|
|
591
|
+
for (const filePath of filesToPackage) {
|
|
592
|
+
if (!filePath.endsWith('.prmd'))
|
|
593
|
+
continue;
|
|
594
|
+
const fullPath = path.join(workspacePath, filePath);
|
|
595
|
+
try {
|
|
596
|
+
const context = await compiler.compileWithContext(fullPath, {
|
|
597
|
+
outputFormat: 'markdown',
|
|
598
|
+
fileSystem,
|
|
599
|
+
workspaceRoot: workspacePath
|
|
600
|
+
});
|
|
601
|
+
// Collect errors from compilation
|
|
602
|
+
const errors = context.getDiagnostics()
|
|
603
|
+
.filter(d => d.severity === 'error')
|
|
604
|
+
.map(d => {
|
|
605
|
+
const location = d.line ? ` (line ${d.line})` : '';
|
|
606
|
+
return `${d.message}${location}`;
|
|
607
|
+
});
|
|
608
|
+
if (errors.length > 0) {
|
|
609
|
+
prmdValidationErrors.push({ file: filePath, errors });
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
catch (compileErr) {
|
|
613
|
+
prmdValidationErrors.push({
|
|
614
|
+
file: filePath,
|
|
615
|
+
errors: [compileErr.message || 'Compilation failed']
|
|
616
|
+
});
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
if (prmdValidationErrors.length > 0) {
|
|
620
|
+
const errorList = prmdValidationErrors.map(e => ` ${e.file}:\n${e.errors.map(err => ` - ${err}`).join('\n')}`).join('\n');
|
|
621
|
+
return {
|
|
622
|
+
success: false,
|
|
623
|
+
error: `Validation errors in .prmd files:\n${errorList}`
|
|
624
|
+
};
|
|
625
|
+
}
|
|
626
|
+
// 7. Create output directory
|
|
627
|
+
const distDir = outputDir || path.join(workspacePath, 'dist');
|
|
628
|
+
await fs.ensureDir(distDir);
|
|
629
|
+
// 8. Generate output filename (strip namespace for scoped packages)
|
|
630
|
+
const packageName = prompdJson.name.includes('/')
|
|
631
|
+
? prompdJson.name.split('/')[1]
|
|
632
|
+
: prompdJson.name;
|
|
633
|
+
const outputFileName = `${packageName}-v${prompdJson.version}.pdpkg`;
|
|
634
|
+
const outputPath = path.join(distDir, outputFileName);
|
|
635
|
+
// 9. Create manifest for package (includes files array for archive only)
|
|
636
|
+
const manifest = {
|
|
637
|
+
name: prompdJson.name,
|
|
638
|
+
version: prompdJson.version,
|
|
639
|
+
description: prompdJson.description,
|
|
640
|
+
author: prompdJson.author
|
|
641
|
+
};
|
|
642
|
+
// 10. Create the package
|
|
643
|
+
try {
|
|
644
|
+
// Instead of createPackage (which walks all files), we only include files from the array
|
|
645
|
+
await createPackageFromFiles(workspacePath, outputPath, manifest, filesToPackage, prompdJson.main, prompdJson.readme, prompdJson.ignore // Pass ignore patterns for archive manifest
|
|
646
|
+
);
|
|
647
|
+
const stats = await fs.stat(outputPath);
|
|
648
|
+
const sizeKB = (stats.size / 1024).toFixed(1);
|
|
649
|
+
const autoNote = autoDiscovered ? ' (auto-discovered)' : '';
|
|
650
|
+
// Build detailed log for raw output display
|
|
651
|
+
const logLines = [
|
|
652
|
+
`Package: ${prompdJson.name}@${prompdJson.version}`,
|
|
653
|
+
`Output: dist/${outputFileName}`,
|
|
654
|
+
`Size: ${sizeKB} KB`,
|
|
655
|
+
'',
|
|
656
|
+
`Files included (${filesToPackage.length}):`,
|
|
657
|
+
...filesToPackage.map(f => ` - ${f}`)
|
|
658
|
+
];
|
|
659
|
+
return {
|
|
660
|
+
success: true,
|
|
661
|
+
outputPath: outputPath,
|
|
662
|
+
fileName: outputFileName,
|
|
663
|
+
size: stats.size,
|
|
664
|
+
fileCount: filesToPackage.length,
|
|
665
|
+
message: `Package created: dist/${outputFileName} (${sizeKB} KB, ${filesToPackage.length} files${autoNote})`,
|
|
666
|
+
log: logLines.join('\n')
|
|
667
|
+
};
|
|
668
|
+
}
|
|
669
|
+
catch (err) {
|
|
670
|
+
return {
|
|
671
|
+
success: false,
|
|
672
|
+
error: err.message || 'Package creation failed'
|
|
673
|
+
};
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
/**
|
|
677
|
+
* Add prompd content frontmatter to a file for security.
|
|
678
|
+
* This makes code files non-executable (frontmatter breaks parsing).
|
|
679
|
+
*/
|
|
680
|
+
function addContentFrontmatter(content, filename) {
|
|
681
|
+
const contentType = (0, index_js_1.getContentType)(filename);
|
|
682
|
+
const frontmatter = `---
|
|
683
|
+
prompd_content_file: true
|
|
684
|
+
original_filename: ${filename}
|
|
685
|
+
content_type: ${contentType}
|
|
686
|
+
---
|
|
687
|
+
`;
|
|
688
|
+
return frontmatter + content;
|
|
689
|
+
}
|
|
690
|
+
/**
|
|
691
|
+
* Create package from specific file list (not directory walk)
|
|
692
|
+
* The files array is written to the archive's prompd.json (not the filesystem)
|
|
693
|
+
*/
|
|
694
|
+
async function createPackageFromFiles(workspacePath, outputPath, manifest, files, mainFile, readmeFile, ignorePatterns) {
|
|
695
|
+
return new Promise((resolve, reject) => {
|
|
696
|
+
const output = fs.createWriteStream(outputPath);
|
|
697
|
+
const archive = (0, archiver_1.default)('zip', { zlib: { level: 9 } });
|
|
698
|
+
output.on('close', () => resolve());
|
|
699
|
+
archive.on('error', (err) => reject(err));
|
|
700
|
+
archive.pipe(output);
|
|
701
|
+
// Files array uses original paths (no transformation needed with frontmatter approach)
|
|
702
|
+
const normalizedFiles = files.map(f => f.replace(/\\/g, '/'));
|
|
703
|
+
// Collect file contents and compute integrity hashes
|
|
704
|
+
const fileHashes = {};
|
|
705
|
+
const fileContents = [];
|
|
706
|
+
for (const filePath of files) {
|
|
707
|
+
const fullPath = path.join(workspacePath, filePath);
|
|
708
|
+
const zipPath = filePath.replace(/\\/g, '/');
|
|
709
|
+
// For code files, add frontmatter protection
|
|
710
|
+
if ((0, index_js_1.needsFrontmatterProtection)(filePath)) {
|
|
711
|
+
const content = fs.readFileSync(fullPath, 'utf-8');
|
|
712
|
+
const filename = path.basename(filePath);
|
|
713
|
+
const protectedContent = addContentFrontmatter(content, filename);
|
|
714
|
+
// Hash the protected content (with frontmatter) - matches what's in archive
|
|
715
|
+
fileHashes[zipPath] = (0, crypto_1.createHash)('sha256').update(protectedContent).digest('hex');
|
|
716
|
+
fileContents.push({ zipPath, content: protectedContent });
|
|
717
|
+
}
|
|
718
|
+
else {
|
|
719
|
+
// For non-code files, read content for hashing
|
|
720
|
+
const content = fs.readFileSync(fullPath);
|
|
721
|
+
fileHashes[zipPath] = (0, crypto_1.createHash)('sha256').update(content).digest('hex');
|
|
722
|
+
fileContents.push({ zipPath, content });
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
// Add prompd.json (inside the package) with files array and integrity hashes
|
|
726
|
+
// This writes the files array to the archive only, not the filesystem
|
|
727
|
+
const fullManifest = {
|
|
728
|
+
...manifest,
|
|
729
|
+
main: mainFile,
|
|
730
|
+
readme: readmeFile,
|
|
731
|
+
files: normalizedFiles,
|
|
732
|
+
integrity: {
|
|
733
|
+
algorithm: 'sha256',
|
|
734
|
+
files: fileHashes
|
|
735
|
+
},
|
|
736
|
+
...(ignorePatterns && ignorePatterns.length > 0 ? { ignore: ignorePatterns } : {})
|
|
737
|
+
};
|
|
738
|
+
archive.append(JSON.stringify(fullManifest, null, 2), { name: 'prompd.json' });
|
|
739
|
+
// Add all files to archive
|
|
740
|
+
for (const { zipPath, content } of fileContents) {
|
|
741
|
+
archive.append(content, { name: zipPath });
|
|
742
|
+
}
|
|
743
|
+
archive.finalize();
|
|
744
|
+
});
|
|
745
|
+
}
|
|
746
|
+
//# sourceMappingURL=package.js.map
|