haven-cypress-integration 1.5.0 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "haven-cypress-integration",
3
- "version": "1.5.0",
3
+ "version": "1.6.0",
4
4
  "description": "Seamless Cypress integration with HAVEN test case management",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -5,20 +5,15 @@ WORKDIR /app
5
5
  # Copy project files
6
6
  COPY . .
7
7
 
8
- # Install zip and AWS CLI v2 via official method
8
+ # Install zip utility (AWS CLI removed - EC2 handles S3 uploads)
9
9
  RUN apt-get update && \
10
- apt-get install -y \
11
- zip \
12
- unzip \
13
- curl && \
14
- curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \
15
- unzip awscliv2.zip && \
16
- ./aws/install && \
17
- rm -rf aws awscliv2.zip
10
+ apt-get install -y zip && \
11
+ apt-get clean && \
12
+ rm -rf /var/lib/apt/lists/*
18
13
 
19
14
  # ✅ Install Node deps (including dev dependencies for mochawesome)
20
15
  RUN npm ci --include=dev
21
- RUN npm install --no-save aws-sdk mochawesome mochawesome-merge mochawesome-report-generator
16
+ RUN npm install --no-save mochawesome mochawesome-merge mochawesome-report-generator
22
17
 
23
18
  # ✅ Ensure script is executable
24
19
  COPY run-filtered.sh /app/run-filtered.sh
@@ -108,24 +108,22 @@ else
108
108
  echo "❌ Mochawesome report folder not found: ${MOCHAWESOME_DIR}"
109
109
  fi
110
110
 
111
- # Upload zipped report to S3
111
+ # Copy artifacts to shared volume for EC2 to upload
112
+ mkdir -p /shared/test-logs
112
113
  if [ -f "${ZIP_PATH}" ]; then
113
- S3_BASE_KEY="artifacts/${PRODUCT_NAME}/${PLAN_ID}/automation/${RUN_ID}"
114
- S3_ZIP_KEY="${S3_BASE_KEY}/${ZIP_NAME}"
115
- echo "☁️ Uploading ZIP to s3://${BUCKET_NAME}/${S3_ZIP_KEY}"
116
- aws s3 cp "${ZIP_PATH}" "s3://${BUCKET_NAME}/${S3_ZIP_KEY}" || echo "❌ S3 ZIP upload failed"
114
+ echo "📁 Copying ZIP to shared volume: ${ZIP_NAME}"
115
+ cp "${ZIP_PATH}" "/shared/test-logs/${ZIP_NAME}" || echo "❌ Failed to copy ZIP to shared volume"
117
116
  else
118
- echo "❌ No zip file to upload"
117
+ echo "❌ No zip file to copy"
119
118
  fi
120
119
 
121
- # Upload HTML report file to S3
120
+ # Copy HTML report file to shared volume
122
121
  if [ -f "${REPORT_HTML}" ]; then
123
122
  HTML_NAME=$(basename "$REPORT_HTML")
124
- S3_HTML_KEY="${S3_BASE_KEY}/${HTML_NAME}"
125
- echo "🌐 Uploading HTML to s3://${BUCKET_NAME}/${S3_HTML_KEY}"
126
- aws s3 cp "${REPORT_HTML}" "s3://${BUCKET_NAME}/${S3_HTML_KEY}" || echo "❌ S3 HTML upload failed"
123
+ echo "📁 Copying HTML to shared volume: ${HTML_NAME}"
124
+ cp "${REPORT_HTML}" "/shared/test-logs/${HTML_NAME}" || echo " Failed to copy HTML to shared volume"
127
125
  else
128
- echo "❌ No HTML report found to upload"
126
+ echo "❌ No HTML report found to copy"
129
127
  fi
130
128
 
131
129
  # Run result sync
@@ -2,13 +2,7 @@ const fs = require("fs");
2
2
  const path = require("path");
3
3
  const axios = require("axios");
4
4
  const glob = require("glob");
5
- const AWS = require("aws-sdk");
6
-
7
- // AWS setup
8
- const s3 = new AWS.S3({
9
- region: process.env.AWS_REGION || "us-east-1",
10
- });
11
- const bucketName = process.env.S3_BUCKET || "your-bucket-name";
5
+ // AWS SDK removed - EC2 instance handles S3 uploads
12
6
 
13
7
  // Paths
14
8
  const automationCasesPath = "/shared/automation-cases.json";
@@ -165,7 +159,7 @@ async function postSummary(results, notFound, planId, runId) {
165
159
  const url = baseUrl.replace('/api/test-results', '/api/test-run-summary');
166
160
 
167
161
  const logs =
168
- (await uploadLogToS3(planId, runId)) || "Log upload failed or missing";
162
+ (await copyLogToShared(planId, runId)) || "Log copy failed or missing";
169
163
 
170
164
  const summaryPayload = {
171
165
  runId,
@@ -197,30 +191,26 @@ async function postSummary(results, notFound, planId, runId) {
197
191
  }
198
192
  }
199
193
 
200
- async function uploadLogToS3(planId, runId) {
194
+ async function copyLogToShared(planId, runId) {
201
195
  const logsPath = "results/logs.txt";
202
196
 
203
197
  if (!fs.existsSync(logsPath)) {
204
- console.warn("⚠️ No logs.txt file found to upload.");
198
+ console.warn("⚠️ No logs.txt file found to copy.");
205
199
  return null;
206
200
  }
207
201
 
208
- const key = `artifacts/${process.env.PRODUCT_NAME}/${planId}/automation/${runId}/run_log`;
209
- const fileStream = fs.createReadStream(logsPath);
202
+ const sharedDir = "/shared/test-logs";
203
+ if (!fs.existsSync(sharedDir)) {
204
+ fs.mkdirSync(sharedDir, { recursive: true });
205
+ }
210
206
 
211
207
  try {
212
- await s3
213
- .upload({
214
- Bucket: bucketName,
215
- Key: key,
216
- Body: fileStream,
217
- ContentType: "text/plain",
218
- })
219
- .promise();
220
-
221
- return `s3://${bucketName}/${key}`;
208
+ const destPath = path.join(sharedDir, "run_log.txt");
209
+ fs.copyFileSync(logsPath, destPath);
210
+ console.log(`📁 Logs copied to shared volume: ${destPath}`);
211
+ return destPath;
222
212
  } catch (err) {
223
- console.error("❌ Failed to upload log to S3:", err.message);
213
+ console.error("❌ Failed to copy log to shared volume:", err.message);
224
214
  return null;
225
215
  }
226
216
  }