agentic-flow 1.9.4 → 1.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +246 -0
- package/dist/proxy/adaptive-proxy.js +224 -0
- package/dist/proxy/anthropic-to-gemini.js +2 -2
- package/dist/proxy/http2-proxy-optimized.js +191 -0
- package/dist/proxy/http2-proxy.js +381 -0
- package/dist/proxy/http3-proxy-old.js +331 -0
- package/dist/proxy/http3-proxy.js +51 -0
- package/dist/proxy/websocket-proxy.js +406 -0
- package/dist/utils/auth.js +52 -0
- package/dist/utils/compression-middleware.js +149 -0
- package/dist/utils/connection-pool.js +184 -0
- package/dist/utils/rate-limiter.js +48 -0
- package/dist/utils/response-cache.js +211 -0
- package/dist/utils/streaming-optimizer.js +141 -0
- package/docs/.claude-flow/metrics/performance.json +3 -3
- package/docs/.claude-flow/metrics/task-metrics.json +3 -3
- package/docs/ISSUE-55-VALIDATION.md +152 -0
- package/docs/OPTIMIZATIONS.md +460 -0
- package/docs/README.md +217 -0
- package/docs/issues/ISSUE-xenova-transformers-dependency.md +380 -0
- package/package.json +1 -1
- package/scripts/claude +31 -0
- package/validation/test-gemini-exclusiveMinimum-fix.ts +142 -0
- package/validation/validate-v1.10.0-docker.sh +296 -0
- package/wasm/reasoningbank/reasoningbank_wasm_bg.js +2 -2
- package/wasm/reasoningbank/reasoningbank_wasm_bg.wasm +0 -0
- package/docs/INDEX.md +0 -279
- package/docs/guides/.claude-flow/metrics/agent-metrics.json +0 -1
- package/docs/guides/.claude-flow/metrics/performance.json +0 -9
- package/docs/guides/.claude-flow/metrics/task-metrics.json +0 -10
- package/docs/router/.claude-flow/metrics/agent-metrics.json +0 -1
- package/docs/router/.claude-flow/metrics/performance.json +0 -9
- package/docs/router/.claude-flow/metrics/task-metrics.json +0 -10
- /package/docs/{TEST-V1.7.8.Dockerfile → docker-tests/TEST-V1.7.8.Dockerfile} +0 -0
- /package/docs/{TEST-V1.7.9-NODE20.Dockerfile → docker-tests/TEST-V1.7.9-NODE20.Dockerfile} +0 -0
- /package/docs/{TEST-V1.7.9.Dockerfile → docker-tests/TEST-V1.7.9.Dockerfile} +0 -0
- /package/docs/{v1.7.1-QUICK-START.md → guides/QUICK-START-v1.7.1.md} +0 -0
- /package/docs/{INTEGRATION-COMPLETE.md → integration-docs/INTEGRATION-COMPLETE.md} +0 -0
- /package/docs/{LANDING-PAGE-PROVIDER-CONTENT.md → providers/LANDING-PAGE-PROVIDER-CONTENT.md} +0 -0
- /package/docs/{PROVIDER-FALLBACK-GUIDE.md → providers/PROVIDER-FALLBACK-GUIDE.md} +0 -0
- /package/docs/{PROVIDER-FALLBACK-SUMMARY.md → providers/PROVIDER-FALLBACK-SUMMARY.md} +0 -0
- /package/docs/{QUIC_FINAL_STATUS.md → quic/QUIC_FINAL_STATUS.md} +0 -0
- /package/docs/{README_QUIC_PHASE1.md → quic/README_QUIC_PHASE1.md} +0 -0
- /package/docs/{AGENTDB_TESTING.md → testing/AGENTDB_TESTING.md} +0 -0
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
#!/usr/bin/env tsx
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Test script to validate fix for issue #55
|
|
5
|
+
* Tests that Gemini proxy properly strips exclusiveMinimum/exclusiveMaximum from tool schemas
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import Anthropic from '@anthropic-ai/sdk';
|
|
9
|
+
|
|
10
|
+
const GEMINI_PROXY_URL = process.env.GEMINI_PROXY_URL || 'http://localhost:3000';
|
|
11
|
+
const GOOGLE_GEMINI_API_KEY = process.env.GOOGLE_GEMINI_API_KEY;
|
|
12
|
+
|
|
13
|
+
if (!GOOGLE_GEMINI_API_KEY) {
|
|
14
|
+
console.error('❌ GOOGLE_GEMINI_API_KEY not set in environment');
|
|
15
|
+
process.exit(1);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
console.log('🧪 Testing Gemini Proxy - exclusiveMinimum/exclusiveMaximum Fix\n');
|
|
19
|
+
console.log(`Proxy URL: ${GEMINI_PROXY_URL}`);
|
|
20
|
+
console.log(`API Key: ${GOOGLE_GEMINI_API_KEY.substring(0, 10)}...\n`);
|
|
21
|
+
|
|
22
|
+
// Test tool definition with exclusiveMinimum (like Claude Code uses)
|
|
23
|
+
const testTool: Anthropic.Tool = {
|
|
24
|
+
name: 'test_tool_with_exclusive_minimum',
|
|
25
|
+
description: 'Test tool that includes exclusiveMinimum in schema',
|
|
26
|
+
input_schema: {
|
|
27
|
+
type: 'object',
|
|
28
|
+
properties: {
|
|
29
|
+
limit: {
|
|
30
|
+
type: 'number',
|
|
31
|
+
exclusiveMinimum: 0, // This should be stripped by cleanSchema
|
|
32
|
+
description: 'Limit parameter (must be > 0)'
|
|
33
|
+
},
|
|
34
|
+
offset: {
|
|
35
|
+
type: 'number',
|
|
36
|
+
exclusiveMinimum: 0,
|
|
37
|
+
exclusiveMaximum: 1000, // This should also be stripped
|
|
38
|
+
description: 'Offset parameter'
|
|
39
|
+
},
|
|
40
|
+
name: {
|
|
41
|
+
type: 'string',
|
|
42
|
+
description: 'Name parameter (should be preserved)'
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
required: ['limit']
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
async function testGeminiProxy() {
|
|
50
|
+
try {
|
|
51
|
+
console.log('📋 Test Tool Schema (BEFORE cleanSchema):');
|
|
52
|
+
console.log(JSON.stringify(testTool.input_schema, null, 2));
|
|
53
|
+
console.log('\n');
|
|
54
|
+
|
|
55
|
+
// Create Anthropic client pointing to Gemini proxy
|
|
56
|
+
const client = new Anthropic({
|
|
57
|
+
apiKey: GOOGLE_GEMINI_API_KEY,
|
|
58
|
+
baseURL: GEMINI_PROXY_URL
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
console.log('🚀 Sending request to Gemini proxy with tool definition...\n');
|
|
62
|
+
|
|
63
|
+
const response = await client.messages.create({
|
|
64
|
+
model: 'claude-3-5-sonnet-20241022',
|
|
65
|
+
max_tokens: 1024,
|
|
66
|
+
messages: [
|
|
67
|
+
{
|
|
68
|
+
role: 'user',
|
|
69
|
+
content: 'Can you tell me what tools you have available? Just list them briefly.'
|
|
70
|
+
}
|
|
71
|
+
],
|
|
72
|
+
tools: [testTool]
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
console.log('✅ SUCCESS: Request completed without errors!\n');
|
|
76
|
+
console.log('Response:');
|
|
77
|
+
console.log(JSON.stringify(response, null, 2));
|
|
78
|
+
console.log('\n');
|
|
79
|
+
|
|
80
|
+
// Verify the response
|
|
81
|
+
if (response.content && response.content.length > 0) {
|
|
82
|
+
console.log('✅ Response received successfully');
|
|
83
|
+
console.log('✅ Tool schema with exclusiveMinimum/exclusiveMaximum was accepted');
|
|
84
|
+
console.log('✅ Fix for issue #55 is WORKING!\n');
|
|
85
|
+
|
|
86
|
+
console.log('📊 Test Results:');
|
|
87
|
+
console.log(' - Tool definition sent: ✅');
|
|
88
|
+
console.log(' - exclusiveMinimum handled: ✅');
|
|
89
|
+
console.log(' - exclusiveMaximum handled: ✅');
|
|
90
|
+
console.log(' - No 400 errors: ✅');
|
|
91
|
+
console.log(' - Valid response received: ✅');
|
|
92
|
+
|
|
93
|
+
return true;
|
|
94
|
+
} else {
|
|
95
|
+
console.error('❌ FAIL: Response content is empty');
|
|
96
|
+
return false;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
} catch (error: any) {
|
|
100
|
+
console.error('❌ ERROR occurred during test:\n');
|
|
101
|
+
|
|
102
|
+
if (error.status === 400 && error.message?.includes('exclusiveMinimum')) {
|
|
103
|
+
console.error('❌ FAIL: Gemini API still rejecting exclusiveMinimum');
|
|
104
|
+
console.error(' This means the fix is NOT working correctly\n');
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
console.error('Error details:');
|
|
108
|
+
console.error(` Status: ${error.status}`);
|
|
109
|
+
console.error(` Message: ${error.message}`);
|
|
110
|
+
if (error.error) {
|
|
111
|
+
console.error(` Error object: ${JSON.stringify(error.error, null, 2)}`);
|
|
112
|
+
}
|
|
113
|
+
console.error('\n');
|
|
114
|
+
|
|
115
|
+
return false;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
async function main() {
|
|
120
|
+
console.log('═══════════════════════════════════════════════════════════');
|
|
121
|
+
console.log(' GEMINI PROXY - EXCLUSIVE MINIMUM FIX VALIDATION');
|
|
122
|
+
console.log(' Testing fix for GitHub issue #55');
|
|
123
|
+
console.log('═══════════════════════════════════════════════════════════\n');
|
|
124
|
+
|
|
125
|
+
const success = await testGeminiProxy();
|
|
126
|
+
|
|
127
|
+
console.log('═══════════════════════════════════════════════════════════');
|
|
128
|
+
if (success) {
|
|
129
|
+
console.log('✅ ALL TESTS PASSED - Fix is working correctly!');
|
|
130
|
+
console.log('═══════════════════════════════════════════════════════════\n');
|
|
131
|
+
process.exit(0);
|
|
132
|
+
} else {
|
|
133
|
+
console.log('❌ TESTS FAILED - Fix needs more work');
|
|
134
|
+
console.log('═══════════════════════════════════════════════════════════\n');
|
|
135
|
+
process.exit(1);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
main().catch(err => {
|
|
140
|
+
console.error('Fatal error:', err);
|
|
141
|
+
process.exit(1);
|
|
142
|
+
});
|
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# v1.10.0 Docker Validation Script
|
|
3
|
+
# Validates all multi-protocol proxy features in isolated environment
|
|
4
|
+
|
|
5
|
+
set -e
|
|
6
|
+
|
|
7
|
+
echo "🔍 v1.10.0 Multi-Protocol Proxy Validation"
|
|
8
|
+
echo "=========================================="
|
|
9
|
+
echo ""
|
|
10
|
+
|
|
11
|
+
# Colors
|
|
12
|
+
GREEN='\033[0;32m'
|
|
13
|
+
RED='\033[0;31m'
|
|
14
|
+
YELLOW='\033[1;33m'
|
|
15
|
+
NC='\033[0m' # No Color
|
|
16
|
+
|
|
17
|
+
# Configuration
|
|
18
|
+
DOCKER_IMAGE="agentic-flow-v1.10.0-test"
|
|
19
|
+
TEST_API_KEY="test-api-key-$(date +%s)"
|
|
20
|
+
|
|
21
|
+
# Cleanup function
|
|
22
|
+
cleanup() {
|
|
23
|
+
echo ""
|
|
24
|
+
echo "🧹 Cleaning up..."
|
|
25
|
+
docker ps -a | grep "$DOCKER_IMAGE" | awk '{print $1}' | xargs -r docker stop 2>/dev/null || true
|
|
26
|
+
docker ps -a | grep "$DOCKER_IMAGE" | awk '{print $1}' | xargs -r docker rm 2>/dev/null || true
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
trap cleanup EXIT
|
|
30
|
+
|
|
31
|
+
# Test results
|
|
32
|
+
TESTS_PASSED=0
|
|
33
|
+
TESTS_FAILED=0
|
|
34
|
+
|
|
35
|
+
test_result() {
|
|
36
|
+
local test_name=$1
|
|
37
|
+
local result=$2
|
|
38
|
+
|
|
39
|
+
if [ $result -eq 0 ]; then
|
|
40
|
+
echo -e "${GREEN}✅ PASS${NC}: $test_name"
|
|
41
|
+
((TESTS_PASSED++))
|
|
42
|
+
else
|
|
43
|
+
echo -e "${RED}❌ FAIL${NC}: $test_name"
|
|
44
|
+
((TESTS_FAILED++))
|
|
45
|
+
fi
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
# Step 1: Build Docker image
|
|
49
|
+
echo "📦 Step 1: Building Docker image..."
|
|
50
|
+
docker build -f Dockerfile.multi-protocol -t $DOCKER_IMAGE . > /dev/null 2>&1
|
|
51
|
+
test_result "Docker image build" $?
|
|
52
|
+
|
|
53
|
+
# Step 2: Test TypeScript compilation
|
|
54
|
+
echo ""
|
|
55
|
+
echo "🔧 Step 2: Validating TypeScript compilation..."
|
|
56
|
+
docker run --rm $DOCKER_IMAGE npm run build > /tmp/docker-build.log 2>&1
|
|
57
|
+
if grep -q "error TS" /tmp/docker-build.log; then
|
|
58
|
+
# Check if errors are only in unrelated files
|
|
59
|
+
if grep -q "error TS" /tmp/docker-build.log | grep -v "federation\|memory/Shared\|onnx-local\|supabase-adapter"; then
|
|
60
|
+
test_result "TypeScript compilation (proxy files)" 1
|
|
61
|
+
else
|
|
62
|
+
test_result "TypeScript compilation (proxy files)" 0
|
|
63
|
+
fi
|
|
64
|
+
else
|
|
65
|
+
test_result "TypeScript compilation" 0
|
|
66
|
+
fi
|
|
67
|
+
|
|
68
|
+
# Step 3: Test HTTP/1.1 Proxy
|
|
69
|
+
echo ""
|
|
70
|
+
echo "🌐 Step 3: Testing HTTP/1.1 Proxy..."
|
|
71
|
+
docker run -d --name ${DOCKER_IMAGE}-http1 \
|
|
72
|
+
-e GOOGLE_GEMINI_API_KEY=test-key \
|
|
73
|
+
-p 3000:3000 \
|
|
74
|
+
$DOCKER_IMAGE \
|
|
75
|
+
node dist/proxy/anthropic-to-gemini.js > /dev/null 2>&1
|
|
76
|
+
|
|
77
|
+
sleep 3
|
|
78
|
+
|
|
79
|
+
# Check if proxy started
|
|
80
|
+
if curl -s http://localhost:3000/health > /dev/null 2>&1; then
|
|
81
|
+
test_result "HTTP/1.1 Proxy startup" 0
|
|
82
|
+
else
|
|
83
|
+
test_result "HTTP/1.1 Proxy startup" 1
|
|
84
|
+
fi
|
|
85
|
+
|
|
86
|
+
docker stop ${DOCKER_IMAGE}-http1 > /dev/null 2>&1
|
|
87
|
+
docker rm ${DOCKER_IMAGE}-http1 > /dev/null 2>&1
|
|
88
|
+
|
|
89
|
+
# Step 4: Test HTTP/2 Proxy
|
|
90
|
+
echo ""
|
|
91
|
+
echo "🚀 Step 4: Testing HTTP/2 Proxy..."
|
|
92
|
+
docker run -d --name ${DOCKER_IMAGE}-http2 \
|
|
93
|
+
-e GOOGLE_GEMINI_API_KEY=test-key \
|
|
94
|
+
-e PROXY_API_KEYS=$TEST_API_KEY \
|
|
95
|
+
-p 3001:3001 \
|
|
96
|
+
$DOCKER_IMAGE \
|
|
97
|
+
node dist/proxy/http2-proxy.js > /dev/null 2>&1
|
|
98
|
+
|
|
99
|
+
sleep 3
|
|
100
|
+
|
|
101
|
+
# Check if proxy started
|
|
102
|
+
if docker logs ${DOCKER_IMAGE}-http2 2>&1 | grep -q "HTTP/2 Proxy running"; then
|
|
103
|
+
test_result "HTTP/2 Proxy startup" 0
|
|
104
|
+
else
|
|
105
|
+
test_result "HTTP/2 Proxy startup" 1
|
|
106
|
+
fi
|
|
107
|
+
|
|
108
|
+
# Check security features
|
|
109
|
+
if docker logs ${DOCKER_IMAGE}-http2 2>&1 | grep -q "Rate limiting enabled"; then
|
|
110
|
+
test_result "HTTP/2 Rate limiting configuration" 0
|
|
111
|
+
else
|
|
112
|
+
test_result "HTTP/2 Rate limiting configuration" 1
|
|
113
|
+
fi
|
|
114
|
+
|
|
115
|
+
docker stop ${DOCKER_IMAGE}-http2 > /dev/null 2>&1
|
|
116
|
+
docker rm ${DOCKER_IMAGE}-http2 > /dev/null 2>&1
|
|
117
|
+
|
|
118
|
+
# Step 5: Test Optimized HTTP/2 Proxy
|
|
119
|
+
echo ""
|
|
120
|
+
echo "⚡ Step 5: Testing Optimized HTTP/2 Proxy..."
|
|
121
|
+
docker run -d --name ${DOCKER_IMAGE}-optimized \
|
|
122
|
+
-e GOOGLE_GEMINI_API_KEY=test-key \
|
|
123
|
+
-e PROXY_API_KEYS=$TEST_API_KEY \
|
|
124
|
+
-p 3002:3001 \
|
|
125
|
+
$DOCKER_IMAGE \
|
|
126
|
+
node dist/proxy/http2-proxy-optimized.js > /dev/null 2>&1
|
|
127
|
+
|
|
128
|
+
sleep 3
|
|
129
|
+
|
|
130
|
+
# Check if optimizations are enabled
|
|
131
|
+
if docker logs ${DOCKER_IMAGE}-optimized 2>&1 | grep -q "Connection pooling enabled"; then
|
|
132
|
+
test_result "Connection pooling enabled" 0
|
|
133
|
+
else
|
|
134
|
+
test_result "Connection pooling enabled" 1
|
|
135
|
+
fi
|
|
136
|
+
|
|
137
|
+
if docker logs ${DOCKER_IMAGE}-optimized 2>&1 | grep -q "Response caching enabled"; then
|
|
138
|
+
test_result "Response caching enabled" 0
|
|
139
|
+
else
|
|
140
|
+
test_result "Response caching enabled" 1
|
|
141
|
+
fi
|
|
142
|
+
|
|
143
|
+
if docker logs ${DOCKER_IMAGE}-optimized 2>&1 | grep -q "Streaming optimization enabled"; then
|
|
144
|
+
test_result "Streaming optimization enabled" 0
|
|
145
|
+
else
|
|
146
|
+
test_result "Streaming optimization enabled" 1
|
|
147
|
+
fi
|
|
148
|
+
|
|
149
|
+
if docker logs ${DOCKER_IMAGE}-optimized 2>&1 | grep -q "Compression enabled"; then
|
|
150
|
+
test_result "Compression enabled" 0
|
|
151
|
+
else
|
|
152
|
+
test_result "Compression enabled" 1
|
|
153
|
+
fi
|
|
154
|
+
|
|
155
|
+
docker stop ${DOCKER_IMAGE}-optimized > /dev/null 2>&1
|
|
156
|
+
docker rm ${DOCKER_IMAGE}-optimized > /dev/null 2>&1
|
|
157
|
+
|
|
158
|
+
# Step 6: Test WebSocket Proxy
|
|
159
|
+
echo ""
|
|
160
|
+
echo "🔌 Step 6: Testing WebSocket Proxy..."
|
|
161
|
+
docker run -d --name ${DOCKER_IMAGE}-ws \
|
|
162
|
+
-e GOOGLE_GEMINI_API_KEY=test-key \
|
|
163
|
+
-p 8080:8080 \
|
|
164
|
+
$DOCKER_IMAGE \
|
|
165
|
+
node dist/proxy/websocket-proxy.js > /dev/null 2>&1
|
|
166
|
+
|
|
167
|
+
sleep 3
|
|
168
|
+
|
|
169
|
+
if docker logs ${DOCKER_IMAGE}-ws 2>&1 | grep -q "WebSocket proxy running"; then
|
|
170
|
+
test_result "WebSocket Proxy startup" 0
|
|
171
|
+
else
|
|
172
|
+
test_result "WebSocket Proxy startup" 1
|
|
173
|
+
fi
|
|
174
|
+
|
|
175
|
+
# Check DoS protection
|
|
176
|
+
if docker logs ${DOCKER_IMAGE}-ws 2>&1 | grep -q "DoS protection"; then
|
|
177
|
+
test_result "WebSocket DoS protection" 0
|
|
178
|
+
else
|
|
179
|
+
test_result "WebSocket DoS protection" 1
|
|
180
|
+
fi
|
|
181
|
+
|
|
182
|
+
docker stop ${DOCKER_IMAGE}-ws > /dev/null 2>&1
|
|
183
|
+
docker rm ${DOCKER_IMAGE}-ws > /dev/null 2>&1
|
|
184
|
+
|
|
185
|
+
# Step 7: Test Adaptive Proxy
|
|
186
|
+
echo ""
|
|
187
|
+
echo "🎯 Step 7: Testing Adaptive Multi-Protocol Proxy..."
|
|
188
|
+
docker run -d --name ${DOCKER_IMAGE}-adaptive \
|
|
189
|
+
-e GOOGLE_GEMINI_API_KEY=test-key \
|
|
190
|
+
-p 3003:3000 \
|
|
191
|
+
$DOCKER_IMAGE \
|
|
192
|
+
node dist/proxy/adaptive-proxy.js > /dev/null 2>&1
|
|
193
|
+
|
|
194
|
+
sleep 3
|
|
195
|
+
|
|
196
|
+
if docker logs ${DOCKER_IMAGE}-adaptive 2>&1 | grep -q "Adaptive"; then
|
|
197
|
+
test_result "Adaptive Proxy startup" 0
|
|
198
|
+
else
|
|
199
|
+
test_result "Adaptive Proxy startup" 1
|
|
200
|
+
fi
|
|
201
|
+
|
|
202
|
+
docker stop ${DOCKER_IMAGE}-adaptive > /dev/null 2>&1
|
|
203
|
+
docker rm ${DOCKER_IMAGE}-adaptive > /dev/null 2>&1
|
|
204
|
+
|
|
205
|
+
# Step 8: Test Utility Files
|
|
206
|
+
echo ""
|
|
207
|
+
echo "🛠️ Step 8: Testing Utility Files..."
|
|
208
|
+
|
|
209
|
+
# Check if utility files were built
|
|
210
|
+
docker run --rm $DOCKER_IMAGE ls dist/utils/ > /tmp/utils-files.txt 2>&1
|
|
211
|
+
|
|
212
|
+
if grep -q "connection-pool.js" /tmp/utils-files.txt; then
|
|
213
|
+
test_result "Connection pool utility compiled" 0
|
|
214
|
+
else
|
|
215
|
+
test_result "Connection pool utility compiled" 1
|
|
216
|
+
fi
|
|
217
|
+
|
|
218
|
+
if grep -q "response-cache.js" /tmp/utils-files.txt; then
|
|
219
|
+
test_result "Response cache utility compiled" 0
|
|
220
|
+
else
|
|
221
|
+
test_result "Response cache utility compiled" 1
|
|
222
|
+
fi
|
|
223
|
+
|
|
224
|
+
if grep -q "streaming-optimizer.js" /tmp/utils-files.txt; then
|
|
225
|
+
test_result "Streaming optimizer utility compiled" 0
|
|
226
|
+
else
|
|
227
|
+
test_result "Streaming optimizer utility compiled" 1
|
|
228
|
+
fi
|
|
229
|
+
|
|
230
|
+
if grep -q "compression-middleware.js" /tmp/utils-files.txt; then
|
|
231
|
+
test_result "Compression middleware utility compiled" 0
|
|
232
|
+
else
|
|
233
|
+
test_result "Compression middleware utility compiled" 1
|
|
234
|
+
fi
|
|
235
|
+
|
|
236
|
+
if grep -q "rate-limiter.js" /tmp/utils-files.txt; then
|
|
237
|
+
test_result "Rate limiter utility compiled" 0
|
|
238
|
+
else
|
|
239
|
+
test_result "Rate limiter utility compiled" 1
|
|
240
|
+
fi
|
|
241
|
+
|
|
242
|
+
if grep -q "auth.js" /tmp/utils-files.txt; then
|
|
243
|
+
test_result "Auth utility compiled" 0
|
|
244
|
+
else
|
|
245
|
+
test_result "Auth utility compiled" 1
|
|
246
|
+
fi
|
|
247
|
+
|
|
248
|
+
# Step 9: Test Documentation
|
|
249
|
+
echo ""
|
|
250
|
+
echo "📚 Step 9: Validating Documentation..."
|
|
251
|
+
|
|
252
|
+
if [ -f "docs/OPTIMIZATIONS.md" ]; then
|
|
253
|
+
test_result "OPTIMIZATIONS.md exists" 0
|
|
254
|
+
else
|
|
255
|
+
test_result "OPTIMIZATIONS.md exists" 1
|
|
256
|
+
fi
|
|
257
|
+
|
|
258
|
+
if grep -q "Connection Pooling" docs/OPTIMIZATIONS.md 2>/dev/null; then
|
|
259
|
+
test_result "Documentation includes optimizations" 0
|
|
260
|
+
else
|
|
261
|
+
test_result "Documentation includes optimizations" 1
|
|
262
|
+
fi
|
|
263
|
+
|
|
264
|
+
# Step 10: Test CHANGELOG
|
|
265
|
+
echo ""
|
|
266
|
+
echo "📝 Step 10: Validating CHANGELOG..."
|
|
267
|
+
|
|
268
|
+
if grep -q "1.10.0" CHANGELOG.md; then
|
|
269
|
+
test_result "CHANGELOG includes v1.10.0" 0
|
|
270
|
+
else
|
|
271
|
+
test_result "CHANGELOG includes v1.10.0" 1
|
|
272
|
+
fi
|
|
273
|
+
|
|
274
|
+
if grep -q "Phase 1 Optimizations" CHANGELOG.md; then
|
|
275
|
+
test_result "CHANGELOG includes optimization details" 0
|
|
276
|
+
else
|
|
277
|
+
test_result "CHANGELOG includes optimization details" 1
|
|
278
|
+
fi
|
|
279
|
+
|
|
280
|
+
# Summary
|
|
281
|
+
echo ""
|
|
282
|
+
echo "=========================================="
|
|
283
|
+
echo "📊 VALIDATION SUMMARY"
|
|
284
|
+
echo "=========================================="
|
|
285
|
+
echo -e "Total Tests: $((TESTS_PASSED + TESTS_FAILED))"
|
|
286
|
+
echo -e "${GREEN}Passed: $TESTS_PASSED${NC}"
|
|
287
|
+
echo -e "${RED}Failed: $TESTS_FAILED${NC}"
|
|
288
|
+
echo ""
|
|
289
|
+
|
|
290
|
+
if [ $TESTS_FAILED -eq 0 ]; then
|
|
291
|
+
echo -e "${GREEN}✅ ALL TESTS PASSED - Ready for v1.10.0 release!${NC}"
|
|
292
|
+
exit 0
|
|
293
|
+
else
|
|
294
|
+
echo -e "${YELLOW}⚠️ Some tests failed - Review before publishing${NC}"
|
|
295
|
+
exit 1
|
|
296
|
+
fi
|
|
@@ -258,7 +258,7 @@ export function log(message) {
|
|
|
258
258
|
wasm.log(ptr0, len0);
|
|
259
259
|
}
|
|
260
260
|
|
|
261
|
-
function
|
|
261
|
+
function __wbg_adapter_4(arg0, arg1, arg2) {
|
|
262
262
|
wasm.__wbindgen_export_5(arg0, arg1, addHeapObject(arg2));
|
|
263
263
|
}
|
|
264
264
|
|
|
@@ -540,7 +540,7 @@ export function __wbindgen_cast_2241b6af4c4b2941(arg0, arg1) {
|
|
|
540
540
|
|
|
541
541
|
export function __wbindgen_cast_8eb6fd44e7238d11(arg0, arg1) {
|
|
542
542
|
// Cast intrinsic for `Closure(Closure { dtor_idx: 62, function: Function { arguments: [Externref], shim_idx: 63, ret: Unit, inner_ret: Some(Unit) }, mutable: true }) -> Externref`.
|
|
543
|
-
const ret = makeMutClosure(arg0, arg1, 62,
|
|
543
|
+
const ret = makeMutClosure(arg0, arg1, 62, __wbg_adapter_4);
|
|
544
544
|
return addHeapObject(ret);
|
|
545
545
|
};
|
|
546
546
|
|
|
Binary file
|