@tamyla/clodo-framework 4.0.13 ā 4.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/README.md +7 -0
- package/dist/cli/commands/create.js +2 -1
- package/dist/middleware/Composer.js +38 -0
- package/dist/middleware/Registry.js +14 -0
- package/dist/middleware/index.js +3 -0
- package/dist/middleware/shared/basicAuth.js +21 -0
- package/dist/middleware/shared/cors.js +28 -0
- package/dist/middleware/shared/index.js +3 -0
- package/dist/middleware/shared/logging.js +14 -0
- package/dist/service-management/GenerationEngine.js +13 -2
- package/dist/service-management/ServiceOrchestrator.js +6 -2
- package/dist/service-management/generators/code/ServiceMiddlewareGenerator.js +156 -10
- package/dist/service-management/generators/code/WorkerIndexGenerator.js +75 -9
- package/dist/simple-api.js +32 -1
- package/docs/MIDDLEWARE_MIGRATION_SUMMARY.md +121 -0
- package/package.json +4 -1
- package/scripts/DEPLOY_COMMAND_NEW.js +128 -0
- package/scripts/README-automated-testing-suite.md +356 -0
- package/scripts/README-test-clodo-deployment.md +157 -0
- package/scripts/README.md +50 -0
- package/scripts/analyze-imports.ps1 +104 -0
- package/scripts/analyze-mixed-code.js +163 -0
- package/scripts/analyze-mixed-rationale.js +149 -0
- package/scripts/automated-testing-suite.js +776 -0
- package/scripts/deployment/README.md +31 -0
- package/scripts/deployment/deploy-domain.ps1 +449 -0
- package/scripts/deployment/deploy-staging.js +120 -0
- package/scripts/deployment/validate-staging.js +166 -0
- package/scripts/diagnose-imports.js +362 -0
- package/scripts/framework-diagnostic.js +368 -0
- package/scripts/migration/migrate-middleware-legacy-to-contract.js +47 -0
- package/scripts/post-publish-test.js +663 -0
- package/scripts/scan-worker-issues.js +52 -0
- package/scripts/service-management/README.md +27 -0
- package/scripts/service-management/setup-interactive.ps1 +693 -0
- package/scripts/test-clodo-deployment.js +588 -0
- package/scripts/test-downstream-install.js +237 -0
- package/scripts/test-local-package.ps1 +126 -0
- package/scripts/test-local-package.sh +166 -0
- package/scripts/test-package.js +339 -0
- package/scripts/testing/README.md +49 -0
- package/scripts/testing/test-first.ps1 +0 -0
- package/scripts/testing/test-first50.ps1 +0 -0
- package/scripts/testing/test.ps1 +0 -0
- package/scripts/utilities/README.md +61 -0
- package/scripts/utilities/check-bin.js +8 -0
- package/scripts/utilities/check-bundle.js +23 -0
- package/scripts/utilities/check-dist-imports.js +65 -0
- package/scripts/utilities/check-import-paths.js +191 -0
- package/scripts/utilities/cleanup-cli.js +159 -0
- package/scripts/utilities/deployment-helpers.ps1 +199 -0
- package/scripts/utilities/fix-dist-imports.js +135 -0
- package/scripts/utilities/generate-secrets.js +159 -0
- package/scripts/utilities/safe-push.ps1 +51 -0
- package/scripts/utilities/setup-helpers.ps1 +206 -0
- package/scripts/utilities/test-packaged-artifact.js +92 -0
- package/scripts/utilities/validate-dist-imports.js +189 -0
- package/scripts/utilities/validate-schema.js +102 -0
- package/scripts/verify-exports.js +193 -0
- package/scripts/verify-worker-safety.js +73 -0
- package/types/middleware.d.ts +1 -0
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Secret Generation Utility
|
|
5
|
+
* Generates cryptographically secure secrets for production deployment
|
|
6
|
+
* Now with secret persistence for cross-service sharing
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { randomBytes } from 'crypto';
|
|
10
|
+
import { writeFileSync, readFileSync, mkdirSync, existsSync } from 'fs';
|
|
11
|
+
import { join, dirname } from 'path';
|
|
12
|
+
import { fileURLToPath } from 'url';
|
|
13
|
+
|
|
14
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
15
|
+
const PROJECT_ROOT = join(__dirname, '..', '..');
|
|
16
|
+
const SECRETS_DIR = join(PROJECT_ROOT, 'secrets');
|
|
17
|
+
|
|
18
|
+
function generateSecret(length = 32) {
|
|
19
|
+
return randomBytes(length).toString('hex');
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function generateSecrets(options = {}) {
|
|
23
|
+
const { domainName, reuseSecrets = false, saveToFile = false, environment = 'production' } = options;
|
|
24
|
+
|
|
25
|
+
console.log('š PRODUCTION SECRETS GENERATOR');
|
|
26
|
+
console.log('================================');
|
|
27
|
+
console.log('');
|
|
28
|
+
console.log('ā ļø IMPORTANT: Store these secrets securely and never commit them to git!');
|
|
29
|
+
console.log('');
|
|
30
|
+
|
|
31
|
+
let secrets = {};
|
|
32
|
+
let secretsFile = null;
|
|
33
|
+
|
|
34
|
+
// Set up file paths if domain name provided
|
|
35
|
+
if (domainName) {
|
|
36
|
+
if (!existsSync(SECRETS_DIR)) {
|
|
37
|
+
mkdirSync(SECRETS_DIR, { recursive: true });
|
|
38
|
+
}
|
|
39
|
+
secretsFile = join(SECRETS_DIR, `${domainName}-secrets.json`);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Always try to reuse existing secrets for consistency across services
|
|
43
|
+
if (secretsFile && existsSync(secretsFile)) {
|
|
44
|
+
try {
|
|
45
|
+
const existing = JSON.parse(readFileSync(secretsFile, 'utf8'));
|
|
46
|
+
const { domain: _domain, environment: _env, generated: _gen, note: _note, ...existingSecrets } = existing;
|
|
47
|
+
secrets = existingSecrets;
|
|
48
|
+
console.log(`š Reusing existing secrets for domain '${domainName}' (consistent across services)`);
|
|
49
|
+
console.log(`š Originally generated: ${existing.generated}`);
|
|
50
|
+
console.log('');
|
|
51
|
+
console.log('š IMPORTANT: Using existing secrets ensures all services can authenticate with each other');
|
|
52
|
+
} catch (error) {
|
|
53
|
+
console.log('ā ļø Could not load existing secrets, generating new ones...');
|
|
54
|
+
}
|
|
55
|
+
} else if (domainName) {
|
|
56
|
+
console.log(`š Creating new secrets for domain '${domainName}'`);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Generate any missing secrets
|
|
60
|
+
if (!secrets.AUTH_JWT_SECRET) secrets.AUTH_JWT_SECRET = generateSecret(32);
|
|
61
|
+
if (!secrets.X_SERVICE_KEY) secrets.X_SERVICE_KEY = generateSecret(32);
|
|
62
|
+
if (!secrets.AUTH_SERVICE_API_KEY) secrets.AUTH_SERVICE_API_KEY = generateSecret(24);
|
|
63
|
+
if (!secrets.LOGGER_SERVICE_API_KEY) secrets.LOGGER_SERVICE_API_KEY = generateSecret(24);
|
|
64
|
+
if (!secrets.CONTENT_SKIMMER_API_KEY) secrets.CONTENT_SKIMMER_API_KEY = generateSecret(24);
|
|
65
|
+
|
|
66
|
+
// Save secrets to file if requested
|
|
67
|
+
if ((saveToFile || domainName) && secretsFile) {
|
|
68
|
+
try {
|
|
69
|
+
const secretsData = {
|
|
70
|
+
...secrets,
|
|
71
|
+
domain: domainName,
|
|
72
|
+
environment,
|
|
73
|
+
generated: new Date().toISOString(),
|
|
74
|
+
note: 'Generated by generate-secrets.js - Share with other services for authentication'
|
|
75
|
+
};
|
|
76
|
+
writeFileSync(secretsFile, JSON.stringify(secretsData, null, 2));
|
|
77
|
+
console.log(`š¾ Secrets saved to: ${secretsFile}`);
|
|
78
|
+
console.log('š Other services can load these secrets for consistent authentication');
|
|
79
|
+
} catch (error) {
|
|
80
|
+
console.log(`ā ļø Could not save secrets to file: ${error.message}`);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
console.log('');
|
|
85
|
+
console.log('Generated secrets:');
|
|
86
|
+
console.log('');
|
|
87
|
+
|
|
88
|
+
for (const [key, value] of Object.entries(secrets)) {
|
|
89
|
+
console.log(`${key}=${value}`);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
console.log('');
|
|
93
|
+
console.log('š To set these in Cloudflare Workers:');
|
|
94
|
+
console.log('');
|
|
95
|
+
|
|
96
|
+
for (const [key, value] of Object.entries(secrets)) {
|
|
97
|
+
console.log(`echo "${value}" | wrangler secret put ${key} --env ${environment}`);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
console.log('');
|
|
101
|
+
console.log(`For other environments, replace --env ${environment} with --env staging or --env development`);
|
|
102
|
+
console.log('');
|
|
103
|
+
console.log('š” You can also set these individually when prompted by the deployment script.');
|
|
104
|
+
|
|
105
|
+
return secrets;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Command line interface
|
|
109
|
+
function parseArgs() {
|
|
110
|
+
const args = process.argv.slice(2);
|
|
111
|
+
const options = {};
|
|
112
|
+
|
|
113
|
+
for (let i = 0; i < args.length; i++) {
|
|
114
|
+
switch (args[i]) {
|
|
115
|
+
case '--domain':
|
|
116
|
+
options.domainName = args[++i];
|
|
117
|
+
options.saveToFile = true;
|
|
118
|
+
break;
|
|
119
|
+
case '--reuse':
|
|
120
|
+
options.reuseSecrets = true;
|
|
121
|
+
break;
|
|
122
|
+
case '--environment':
|
|
123
|
+
case '--env':
|
|
124
|
+
options.environment = args[++i];
|
|
125
|
+
break;
|
|
126
|
+
case '--save':
|
|
127
|
+
options.saveToFile = true;
|
|
128
|
+
break;
|
|
129
|
+
case '--help':
|
|
130
|
+
console.log(`
|
|
131
|
+
Usage: node generate-secrets.js [options]
|
|
132
|
+
|
|
133
|
+
Options:
|
|
134
|
+
--domain <name> Domain name (enables file saving)
|
|
135
|
+
--reuse Try to reuse existing secrets
|
|
136
|
+
--env <environment> Environment (production, staging, development)
|
|
137
|
+
--save Force save to file
|
|
138
|
+
--help Show this help
|
|
139
|
+
|
|
140
|
+
Examples:
|
|
141
|
+
node generate-secrets.js # Generate one-time secrets
|
|
142
|
+
node generate-secrets.js --domain newclient --env production # Create domain secrets (reusable)
|
|
143
|
+
node generate-secrets.js --domain newclient # Reuse existing domain secrets
|
|
144
|
+
`);
|
|
145
|
+
process.exit(0);
|
|
146
|
+
break;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
return options;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Run if called directly
|
|
154
|
+
if (process.argv[1] && import.meta.url.endsWith(process.argv[1].replace(/\\/g, '/'))) {
|
|
155
|
+
const options = parseArgs();
|
|
156
|
+
generateSecrets(options);
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
export { generateSecret, generateSecrets };
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# Clodo Framework - Safe Push Workflow
|
|
2
|
+
# This script prevents the "rejected push" issue when working with semantic-release
|
|
3
|
+
|
|
4
|
+
Write-Host "`nClodo Framework - Safe Push Workflow`n" -ForegroundColor Cyan
|
|
5
|
+
|
|
6
|
+
# Step 1: Fetch latest changes
|
|
7
|
+
Write-Host "Fetching latest changes from remote..." -ForegroundColor Yellow
|
|
8
|
+
git fetch
|
|
9
|
+
|
|
10
|
+
if ($LASTEXITCODE -ne 0) {
|
|
11
|
+
Write-Host "Failed to fetch from remote" -ForegroundColor Red
|
|
12
|
+
exit 1
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
# Step 2: Check if we're behind
|
|
16
|
+
$behind = git rev-list HEAD..origin/master --count 2>$null
|
|
17
|
+
if ($behind -gt 0) {
|
|
18
|
+
Write-Host "Local branch is $behind commit(s) behind remote" -ForegroundColor Yellow
|
|
19
|
+
Write-Host "Rebasing local changes on top of remote..." -ForegroundColor Yellow
|
|
20
|
+
|
|
21
|
+
git rebase origin/master
|
|
22
|
+
|
|
23
|
+
if ($LASTEXITCODE -ne 0) {
|
|
24
|
+
Write-Host "Rebase failed - resolve conflicts and run 'git rebase --continue'" -ForegroundColor Red
|
|
25
|
+
exit 1
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
Write-Host "Rebase successful" -ForegroundColor Green
|
|
29
|
+
} else {
|
|
30
|
+
Write-Host "Local branch is up to date" -ForegroundColor Green
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
# Step 3: Check if we're ahead (have commits to push)
|
|
34
|
+
$ahead = git rev-list origin/master..HEAD --count 2>$null
|
|
35
|
+
if ($ahead -eq 0) {
|
|
36
|
+
Write-Host "`nNothing to push - already up to date`n" -ForegroundColor Green
|
|
37
|
+
exit 0
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
Write-Host "Pushing $ahead commit(s) to remote..." -ForegroundColor Yellow
|
|
41
|
+
|
|
42
|
+
# Step 4: Push
|
|
43
|
+
git push
|
|
44
|
+
|
|
45
|
+
if ($LASTEXITCODE -eq 0) {
|
|
46
|
+
Write-Host "`nPush successful!" -ForegroundColor Green
|
|
47
|
+
Write-Host "Semantic-release will run shortly and create a new version`n" -ForegroundColor Cyan
|
|
48
|
+
} else {
|
|
49
|
+
Write-Host "`nPush failed" -ForegroundColor Red
|
|
50
|
+
exit 1
|
|
51
|
+
}
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
# PowerShell Setup Helper Functions
|
|
2
|
+
# Interactive setup utilities for Clodo Framework services
|
|
3
|
+
|
|
4
|
+
function Get-UserInput {
|
|
5
|
+
param(
|
|
6
|
+
[Parameter(Mandatory=$true)]
|
|
7
|
+
[string]$Prompt,
|
|
8
|
+
|
|
9
|
+
[Parameter(Mandatory=$false)]
|
|
10
|
+
[string]$DefaultValue = "",
|
|
11
|
+
|
|
12
|
+
[Parameter(Mandatory=$false)]
|
|
13
|
+
[switch]$Required
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
do {
|
|
17
|
+
if ($DefaultValue) {
|
|
18
|
+
$userInput = Read-Host "$Prompt [$DefaultValue]"
|
|
19
|
+
if ([string]::IsNullOrEmpty($userInput)) {
|
|
20
|
+
$userInput = $DefaultValue
|
|
21
|
+
}
|
|
22
|
+
} else {
|
|
23
|
+
$userInput = Read-Host $Prompt
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
if ($Required -and [string]::IsNullOrEmpty($userInput)) {
|
|
27
|
+
Write-Host "This field is required. Please provide a value." -ForegroundColor Red
|
|
28
|
+
}
|
|
29
|
+
} while ($Required -and [string]::IsNullOrEmpty($userInput))
|
|
30
|
+
|
|
31
|
+
return $userInput
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function Get-UserChoice {
|
|
35
|
+
param(
|
|
36
|
+
[Parameter(Mandatory=$true)]
|
|
37
|
+
[string]$Prompt,
|
|
38
|
+
|
|
39
|
+
[Parameter(Mandatory=$true)]
|
|
40
|
+
[string[]]$Options,
|
|
41
|
+
|
|
42
|
+
[Parameter(Mandatory=$false)]
|
|
43
|
+
[int]$DefaultIndex = 0
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
Write-Host $Prompt -ForegroundColor Yellow
|
|
47
|
+
|
|
48
|
+
for ($i = 0; $i -lt $Options.Length; $i++) {
|
|
49
|
+
$marker = if ($i -eq $DefaultIndex) { " (default)" } else { "" }
|
|
50
|
+
Write-Host " $($i + 1). $($Options[$i])$marker" -ForegroundColor White
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
do {
|
|
54
|
+
$choice = Read-Host "Enter choice (1-$($Options.Length))"
|
|
55
|
+
|
|
56
|
+
if ([string]::IsNullOrEmpty($choice)) {
|
|
57
|
+
return $Options[$DefaultIndex]
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if ([int]::TryParse($choice, [ref]$null)) {
|
|
61
|
+
$index = [int]$choice - 1
|
|
62
|
+
if ($index -ge 0 -and $index -lt $Options.Length) {
|
|
63
|
+
return $Options[$index]
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
Write-Host "Invalid choice. Please select 1-$($Options.Length)." -ForegroundColor Red
|
|
68
|
+
} while ($true)
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function Test-ServiceName {
|
|
72
|
+
param(
|
|
73
|
+
[Parameter(Mandatory=$true)]
|
|
74
|
+
[string]$ServiceName
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
if ($ServiceName -notmatch '^[a-z0-9-]+$') {
|
|
78
|
+
return @{
|
|
79
|
+
Valid = $false
|
|
80
|
+
Error = "Service name must contain only lowercase letters, numbers, and hyphens"
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if ($ServiceName.Length -lt 3 -or $ServiceName.Length -gt 50) {
|
|
85
|
+
return @{
|
|
86
|
+
Valid = $false
|
|
87
|
+
Error = "Service name must be between 3 and 50 characters"
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return @{
|
|
92
|
+
Valid = $true
|
|
93
|
+
Error = $null
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
function Test-DomainName {
|
|
98
|
+
param(
|
|
99
|
+
[Parameter(Mandatory=$true)]
|
|
100
|
+
[string]$DomainName
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
if ($DomainName -notmatch '^[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9]*\.[a-zA-Z]{2,}$') {
|
|
104
|
+
return @{
|
|
105
|
+
Valid = $false
|
|
106
|
+
Error = "Domain name format is invalid"
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return @{
|
|
111
|
+
Valid = $true
|
|
112
|
+
Error = $null
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
function Show-SetupSummary {
|
|
117
|
+
param(
|
|
118
|
+
[Parameter(Mandatory=$true)]
|
|
119
|
+
[hashtable]$Configuration
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
Write-Host "`nš Configuration Summary:" -ForegroundColor Cyan
|
|
123
|
+
Write-Host "=========================" -ForegroundColor Cyan
|
|
124
|
+
|
|
125
|
+
foreach ($key in $Configuration.Keys) {
|
|
126
|
+
$value = $Configuration[$key]
|
|
127
|
+
if ($key -like "*token*" -or $key -like "*secret*" -or $key -like "*key*") {
|
|
128
|
+
$value = "*" * 8 + $value.Substring([Math]::Max(0, $value.Length - 4))
|
|
129
|
+
}
|
|
130
|
+
Write-Host " $key`: $value" -ForegroundColor White
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
Write-Host "`nProceed with this configuration? (y/N)" -ForegroundColor Yellow -NoNewline
|
|
134
|
+
$confirm = Read-Host
|
|
135
|
+
|
|
136
|
+
return $confirm -eq 'y' -or $confirm -eq 'Y' -or $confirm -eq 'yes'
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
function New-ServiceDirectory {
|
|
140
|
+
param(
|
|
141
|
+
[Parameter(Mandatory=$true)]
|
|
142
|
+
[string]$ServiceName,
|
|
143
|
+
|
|
144
|
+
[Parameter(Mandatory=$true)]
|
|
145
|
+
[string]$OutputPath
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
$servicePath = Join-Path $OutputPath $ServiceName
|
|
149
|
+
|
|
150
|
+
if (Test-Path $servicePath) {
|
|
151
|
+
Write-Host "Directory already exists: $servicePath" -ForegroundColor Yellow
|
|
152
|
+
Write-Host "Overwrite existing directory? (y/N)" -ForegroundColor Yellow -NoNewline
|
|
153
|
+
$overwrite = Read-Host
|
|
154
|
+
|
|
155
|
+
if ($overwrite -eq 'y' -or $overwrite -eq 'Y' -or $overwrite -eq 'yes') {
|
|
156
|
+
Remove-Item $servicePath -Recurse -Force
|
|
157
|
+
} else {
|
|
158
|
+
throw "Service directory already exists and user chose not to overwrite"
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
New-Item -ItemType Directory -Path $servicePath -Force | Out-Null
|
|
163
|
+
Write-Host "ā
Created service directory: $servicePath" -ForegroundColor Green
|
|
164
|
+
|
|
165
|
+
return $servicePath
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
function Get-ServiceTypeFeatures {
|
|
169
|
+
param(
|
|
170
|
+
[Parameter(Mandatory=$true)]
|
|
171
|
+
[string]$ServiceType
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
$features = @{
|
|
175
|
+
"data-service" = @("Authentication", "Authorization", "File Storage", "Search", "Filtering", "Pagination")
|
|
176
|
+
"auth-service" = @("Authentication", "Authorization", "User Profiles", "Email Notifications", "Magic Link Auth")
|
|
177
|
+
"content-service" = @("File Storage", "Search", "Filtering", "Pagination", "Caching")
|
|
178
|
+
"api-gateway" = @("Authentication", "Authorization", "Rate Limiting", "Caching", "Monitoring")
|
|
179
|
+
"generic" = @("Logging", "Monitoring", "Error Reporting")
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return $features[$ServiceType] -or @()
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
function Show-WelcomeMessage {
|
|
186
|
+
Write-Host @"
|
|
187
|
+
š Clodo Framework Interactive Setup Wizard
|
|
188
|
+
==========================================
|
|
189
|
+
|
|
190
|
+
This wizard will guide you through creating a new Clodo service.
|
|
191
|
+
You'll be prompted for the following information:
|
|
192
|
+
|
|
193
|
+
⢠Service Name (required)
|
|
194
|
+
⢠Service Type (required)
|
|
195
|
+
⢠Domain Name (required)
|
|
196
|
+
⢠Cloudflare Configuration (required)
|
|
197
|
+
⢠Additional Options (optional)
|
|
198
|
+
|
|
199
|
+
Press Enter to continue or Ctrl+C to exit...
|
|
200
|
+
"@ -ForegroundColor Cyan
|
|
201
|
+
|
|
202
|
+
Read-Host
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
# Export functions for use by other scripts
|
|
206
|
+
Export-ModuleMember -Function *
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Test Packaged Artifact
|
|
5
|
+
*
|
|
6
|
+
* Steps:
|
|
7
|
+
* 1. Run npm pack and get tarball name
|
|
8
|
+
* 2. Create temp directory, initialize npm project
|
|
9
|
+
* 3. Install the tarball into temp project
|
|
10
|
+
* 4. Require the package and a couple of internal modules/cli entry points
|
|
11
|
+
*
|
|
12
|
+
* Exit non-zero on any failure.
|
|
13
|
+
*/
|
|
14
|
+
import fs from 'fs';
|
|
15
|
+
import path from 'path';
|
|
16
|
+
import { execSync } from 'child_process';
|
|
17
|
+
import os from 'os';
|
|
18
|
+
|
|
19
|
+
const cwd = process.cwd();
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
console.log('Packing package...');
|
|
23
|
+
const tarball = execSync('npm pack', { cwd, stdio: 'pipe' }).toString().trim().split('\n').pop();
|
|
24
|
+
const tarPath = path.join(cwd, tarball);
|
|
25
|
+
console.log('Tarball:', tarPath);
|
|
26
|
+
|
|
27
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'clodo-pack-'));
|
|
28
|
+
console.log('Using temp dir:', tmpDir);
|
|
29
|
+
fs.writeFileSync(path.join(tmpDir, 'package.json'), JSON.stringify({ name: 'clodo-test-temp', version: '0.0.0' }));
|
|
30
|
+
|
|
31
|
+
console.log('Installing tarball into temp project...');
|
|
32
|
+
execSync(`npm install "${tarPath}" --no-audit --no-fund --no-package-lock`, { cwd: tmpDir, stdio: 'inherit' });
|
|
33
|
+
|
|
34
|
+
console.log('Running smoke checks...');
|
|
35
|
+
// Run a node script that requires the package and exercises the CLI via installed bin
|
|
36
|
+
const nodeScript = `try {
|
|
37
|
+
const path = require('path');
|
|
38
|
+
const { execSync } = require('child_process');
|
|
39
|
+
const pkg = require('@tamyla/clodo-framework');
|
|
40
|
+
console.log('package loaded, exports count:', Object.keys(pkg).length);
|
|
41
|
+
// require a named export that is part of the public exports map
|
|
42
|
+
const services = require('@tamyla/clodo-framework/services');
|
|
43
|
+
console.log('named export services loaded');
|
|
44
|
+
// Verify middleware export and included migration script/docs
|
|
45
|
+
let middleware;
|
|
46
|
+
try {
|
|
47
|
+
middleware = require('@tamyla/clodo-framework/middleware');
|
|
48
|
+
console.log('middleware export loaded, keys:', Object.keys(middleware));
|
|
49
|
+
} catch (e) {
|
|
50
|
+
console.error('middleware export missing:', e && e.stack || e);
|
|
51
|
+
throw e;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (!middleware || !middleware.MiddlewareComposer) {
|
|
55
|
+
throw new Error('MiddlewareComposer not exported from @tamyla/clodo-framework/middleware');
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const migrationScript = path.join(process.cwd(), 'node_modules', '@tamyla', 'clodo-framework', 'scripts', 'migration', 'migrate-middleware-legacy-to-contract.js');
|
|
59
|
+
if (!require('fs').existsSync(migrationScript)) {
|
|
60
|
+
throw new Error('Migration script missing in packaged artifact: ' + migrationScript);
|
|
61
|
+
}
|
|
62
|
+
const migrationDoc = path.join(process.cwd(), 'node_modules', '@tamyla', 'clodo-framework', 'docs', 'MIDDLEWARE_MIGRATION_SUMMARY.md');
|
|
63
|
+
if (!require('fs').existsSync(migrationDoc)) {
|
|
64
|
+
throw new Error('Migration doc missing in packaged artifact: ' + migrationDoc);
|
|
65
|
+
}
|
|
66
|
+
// Execute CLI via the installed dist path to avoid shell wrapper issues on Windows
|
|
67
|
+
const pkgRoot = path.join(process.cwd(), 'node_modules', '@tamyla', 'clodo-framework');
|
|
68
|
+
const cliPath = path.join(pkgRoot, 'dist', 'cli', 'clodo-service.js');
|
|
69
|
+
// Run --version and --help to ensure CLI executes successfully
|
|
70
|
+
execSync('node ' + JSON.stringify(cliPath) + ' --version', { stdio: 'inherit' });
|
|
71
|
+
execSync('node ' + JSON.stringify(cliPath) + ' --help', { stdio: 'ignore' });
|
|
72
|
+
console.log('cli executed successfully');
|
|
73
|
+
process.exit(0);
|
|
74
|
+
} catch (err) {
|
|
75
|
+
console.error('Smoke check failed:', err && err.stack || err);
|
|
76
|
+
process.exit(2);
|
|
77
|
+
}
|
|
78
|
+
`;
|
|
79
|
+
|
|
80
|
+
// Write the smoke script to a file in the temp project and run it directly to avoid shell quoting issues
|
|
81
|
+
const smokeScriptPath = path.join(tmpDir, 'smoke-test.js');
|
|
82
|
+
fs.writeFileSync(smokeScriptPath, nodeScript.replace(/\r\n/g, '\n'));
|
|
83
|
+
execSync(`node "${smokeScriptPath}"`, { cwd: tmpDir, stdio: 'inherit' });
|
|
84
|
+
|
|
85
|
+
console.log('\nAll packaged-artifact smoke checks passed.');
|
|
86
|
+
// Clean up tarball to avoid leaving artifacts
|
|
87
|
+
try { fs.unlinkSync(tarPath); } catch (e) {}
|
|
88
|
+
process.exit(0);
|
|
89
|
+
} catch (err) {
|
|
90
|
+
console.error('\nPackaged artifact test failed:', err && err.stack || err);
|
|
91
|
+
process.exit(1);
|
|
92
|
+
}
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* CRITICAL: Pre-Publish Distribution Import Validator
|
|
5
|
+
*
|
|
6
|
+
* This script ACTUALLY TESTS that all exports can be imported from dist/
|
|
7
|
+
* It catches import path issues that static validators miss.
|
|
8
|
+
*
|
|
9
|
+
* Must run AFTER babel compilation (in postbuild)
|
|
10
|
+
* Prevents publishing packages with broken imports
|
|
11
|
+
*
|
|
12
|
+
* Exit codes:
|
|
13
|
+
* - 0: All exports load successfully
|
|
14
|
+
* - 1: One or more imports failed
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import fs from 'fs';
|
|
18
|
+
import path from 'path';
|
|
19
|
+
import { fileURLToPath } from 'url';
|
|
20
|
+
|
|
21
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
22
|
+
const projectRoot = path.join(__dirname, '../../');
|
|
23
|
+
const distPath = path.join(projectRoot, 'dist');
|
|
24
|
+
|
|
25
|
+
// Color codes
|
|
26
|
+
const colors = {
|
|
27
|
+
reset: '\x1b[0m',
|
|
28
|
+
green: '\x1b[32m',
|
|
29
|
+
red: '\x1b[31m',
|
|
30
|
+
yellow: '\x1b[33m',
|
|
31
|
+
blue: '\x1b[34m',
|
|
32
|
+
cyan: '\x1b[36m',
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
function log(color, text) {
|
|
36
|
+
console.log(`${color}${text}${colors.reset}`);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async function validateDistImports() {
|
|
40
|
+
log(colors.cyan, '\nāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā');
|
|
41
|
+
log(colors.cyan, 'ā PRE-PUBLISH DISTRIBUTION IMPORT VALIDATOR ā');
|
|
42
|
+
log(colors.cyan, 'ā Testing all exports with actual import attempts ā');
|
|
43
|
+
log(colors.cyan, 'āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā\n');
|
|
44
|
+
|
|
45
|
+
if (!fs.existsSync(distPath)) {
|
|
46
|
+
log(colors.red, 'ā ERROR: dist/ directory not found. Run "npm run build" first.');
|
|
47
|
+
process.exit(1);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const packageJson = JSON.parse(
|
|
51
|
+
fs.readFileSync(path.join(projectRoot, 'package.json'), 'utf8')
|
|
52
|
+
);
|
|
53
|
+
|
|
54
|
+
const exports = packageJson.exports;
|
|
55
|
+
const bins = packageJson.bin;
|
|
56
|
+
|
|
57
|
+
if (!exports) {
|
|
58
|
+
log(colors.red, 'ā ERROR: No exports defined in package.json');
|
|
59
|
+
process.exit(1);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
let successCount = 0;
|
|
63
|
+
let failCount = 0;
|
|
64
|
+
const failures = [];
|
|
65
|
+
|
|
66
|
+
// Test main entry point with actual import
|
|
67
|
+
log(colors.blue, 'š¦ Testing Main Entry Point');
|
|
68
|
+
log(colors.blue, 'ā'.repeat(60));
|
|
69
|
+
|
|
70
|
+
try {
|
|
71
|
+
const mainPath = path.join(projectRoot, exports['.']);
|
|
72
|
+
if (!fs.existsSync(mainPath)) {
|
|
73
|
+
throw new Error(`Main entry not found: ${exports['.']}`);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Try to import main entry (using dynamic import)
|
|
77
|
+
try {
|
|
78
|
+
const fileUrl = `file://${mainPath}`;
|
|
79
|
+
await import(fileUrl);
|
|
80
|
+
log(colors.green, `ā
"${exports['.']}" exists and imports successfully`);
|
|
81
|
+
} catch (importErr) {
|
|
82
|
+
// File exists but import fails - this is a critical error
|
|
83
|
+
throw new Error(`Import failed: ${importErr.message}`);
|
|
84
|
+
}
|
|
85
|
+
successCount++;
|
|
86
|
+
} catch (e) {
|
|
87
|
+
log(colors.red, `ā Main entry failed: ${e.message}`);
|
|
88
|
+
failures.push(`Main: ${e.message}`);
|
|
89
|
+
failCount++;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Test all named exports (file existence only - imports on main tested)
|
|
93
|
+
log(colors.blue, '\nš Testing Named Exports (23+ files)');
|
|
94
|
+
log(colors.blue, 'ā'.repeat(60));
|
|
95
|
+
|
|
96
|
+
const exportEntries = Object.entries(exports).slice(1); // Skip main
|
|
97
|
+
const maxNameLength = Math.max(...exportEntries.map(([name]) => name.length));
|
|
98
|
+
|
|
99
|
+
for (const [name, exportPath] of exportEntries) {
|
|
100
|
+
try {
|
|
101
|
+
const fullPath = path.join(projectRoot, exportPath);
|
|
102
|
+
|
|
103
|
+
// Check file exists
|
|
104
|
+
if (!fs.existsSync(fullPath)) {
|
|
105
|
+
throw new Error(`File not found: ${exportPath}`);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Check it's not a dist/internal/ export (these should be blocked)
|
|
109
|
+
if (exportPath.includes('dist/internal/')) {
|
|
110
|
+
throw new Error(`Internal export leaked to public API: ${exportPath}`);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Check it's actually a .js file that can be imported
|
|
114
|
+
if (!exportPath.endsWith('.js')) {
|
|
115
|
+
throw new Error(`Invalid export path (must be .js): ${exportPath}`);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Format output with consistent padding
|
|
119
|
+
const paddedName = name.padEnd(maxNameLength);
|
|
120
|
+
log(colors.green, `ā
${paddedName} ā ${exportPath}`);
|
|
121
|
+
successCount++;
|
|
122
|
+
} catch (e) {
|
|
123
|
+
const paddedName = name.padEnd(maxNameLength);
|
|
124
|
+
log(colors.red, `ā ${paddedName} ā ERROR: ${e.message}`);
|
|
125
|
+
failures.push(`${name}: ${e.message}`);
|
|
126
|
+
failCount++;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Test CLI binaries
|
|
131
|
+
log(colors.blue, '\nš ļø Testing CLI Binaries');
|
|
132
|
+
log(colors.blue, 'ā'.repeat(60));
|
|
133
|
+
|
|
134
|
+
for (const [binName, binPath] of Object.entries(bins || {})) {
|
|
135
|
+
try {
|
|
136
|
+
const fullPath = path.join(projectRoot, binPath);
|
|
137
|
+
|
|
138
|
+
if (!fs.existsSync(fullPath)) {
|
|
139
|
+
throw new Error(`Binary file not found: ${binPath}`);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Check shebang for CLI files
|
|
143
|
+
const content = fs.readFileSync(fullPath, 'utf8');
|
|
144
|
+
if (!content.startsWith('#!/usr/bin/env node')) {
|
|
145
|
+
throw new Error(`Missing shebang in CLI binary`);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
log(colors.green, `ā
${binName.padEnd(20)} ā ${binPath}`);
|
|
149
|
+
successCount++;
|
|
150
|
+
} catch (e) {
|
|
151
|
+
log(colors.red, `ā ${binName.padEnd(20)} ā ERROR: ${e.message}`);
|
|
152
|
+
failures.push(`CLI ${binName}: ${e.message}`);
|
|
153
|
+
failCount++;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Summary
|
|
158
|
+
log(colors.blue, '\n' + 'ā'.repeat(60));
|
|
159
|
+
log(colors.blue, 'š VALIDATION SUMMARY');
|
|
160
|
+
log(colors.blue, 'ā'.repeat(60));
|
|
161
|
+
|
|
162
|
+
log(colors.green, `ā
Successful: ${successCount}`);
|
|
163
|
+
if (failCount > 0) {
|
|
164
|
+
log(colors.red, `ā Failed: ${failCount}`);
|
|
165
|
+
log(colors.yellow, '\nā ļø FAILURES:');
|
|
166
|
+
failures.forEach((f, i) => {
|
|
167
|
+
log(colors.yellow, ` ${i + 1}. ${f}`);
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const total = successCount + failCount;
|
|
172
|
+
const percentage = ((successCount / total) * 100).toFixed(1);
|
|
173
|
+
|
|
174
|
+
log(colors.blue, '\n' + 'ā'.repeat(60));
|
|
175
|
+
if (failCount === 0) {
|
|
176
|
+
log(colors.green, `ā
ALL ${successCount} EXPORTS VALIDATED SUCCESSFULLY`);
|
|
177
|
+
log(colors.green, `\nš Package is ready for npm publication!\n`);
|
|
178
|
+
process.exit(0);
|
|
179
|
+
} else {
|
|
180
|
+
log(colors.red, `ā VALIDATION FAILED: ${failCount}/${total} exports have issues`);
|
|
181
|
+
log(colors.red, `${percentage}% pass rate\n`);
|
|
182
|
+
process.exit(1);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
validateDistImports().catch(err => {
|
|
187
|
+
log(colors.red, `\nā FATAL ERROR: ${err.message}\n`);
|
|
188
|
+
process.exit(1);
|
|
189
|
+
});
|