haven-cypress-integration 1.5.0 โ†’ 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -19,13 +19,16 @@ Options:
19
19
  --tag=name:version Docker image tag (default: haven-cypress-tests:latest)
20
20
  --product=name Product name for ECR organization (required for --push)
21
21
  --push Push image to ECR haven-test-images repository
22
- --automationIds=ID1,ID2 Run specific test cases
22
+ --automationIds=ID1,ID2 Run specific test cases by automation ID
23
+ --customTags=tag1,tag2 Run tests with custom tags (smoke, regression, p1, etc.)
23
24
 
24
25
  Examples:
25
26
  npx haven-cypress build --product=BE
26
27
  npx haven-cypress build --product=BE --push
27
28
  npx haven-cypress build --product=BE --tag=v2.1.0 --push
28
29
  npx haven-cypress run --automationIds=TC-AUTO-123,TC-AUTO-124
30
+ npx haven-cypress run --customTags=smoke,p1
31
+ npx haven-cypress run --automationIds=TC-AUTO-123 --customTags=smoke
29
32
 
30
33
  Versioning:
31
34
  --tag=latest โ†’ ECR tag: BE-{package.json.version} or BE-1.0.{BUILD_NUMBER}
@@ -60,7 +63,8 @@ try {
60
63
 
61
64
  case 'run':
62
65
  const automationIds = options.automationIds || '';
63
- integration.runTests(automationIds);
66
+ const customTags = options.customTags || '';
67
+ integration.runTests(automationIds, customTags);
64
68
  break;
65
69
 
66
70
  default:
package/index.js CHANGED
@@ -217,9 +217,10 @@ class HavenCypressIntegration {
217
217
  * Run tests with Haven integration
218
218
  * This is called by Haven when the container runs
219
219
  */
220
- runTests(automationIds = '') {
220
+ runTests(automationIds = '', customTags = '') {
221
221
  console.log('๐Ÿงช Running Haven-integrated Cypress tests...');
222
- console.log(`๐Ÿ” Automation IDs: ${automationIds || 'All tests'}`);
222
+ console.log(`๐Ÿ” Automation IDs: ${automationIds || 'None'}`);
223
+ console.log(`๐Ÿ” Custom Tags: ${customTags || 'None'}`);
223
224
 
224
225
  // This will be handled by run-filtered.sh when container runs
225
226
  // The library just provides the interface
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "haven-cypress-integration",
3
- "version": "1.5.0",
3
+ "version": "1.6.1",
4
4
  "description": "Seamless Cypress integration with HAVEN test case management",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -4,31 +4,56 @@ echo "๐Ÿ”ฅ RUN SCRIPT:"
4
4
  echo "โœ… ENTERED run-filtered.sh"
5
5
  echo "โš™๏ธ Raw args: $@"
6
6
 
7
- # Extract automation IDs from CLI args
7
+ # Extract automation IDs and custom tags from CLI args
8
8
  AUTOMATION_IDS=""
9
+ CUSTOM_TAGS=""
9
10
  for arg in "$@"; do
10
11
  case $arg in
11
12
  --automationIds=*)
12
13
  AUTOMATION_IDS="${arg#*=}"
13
14
  shift
14
15
  ;;
16
+ --customTags=*)
17
+ CUSTOM_TAGS="${arg#*=}"
18
+ shift
19
+ ;;
15
20
  esac
16
21
  done
17
22
 
18
23
  echo "๐Ÿ” Extracted automation IDs: ${AUTOMATION_IDS}"
24
+ echo "๐Ÿ” Extracted custom tags: ${CUSTOM_TAGS}"
19
25
  echo "๐Ÿ“‚ Current working directory: $(pwd)"
20
26
 
27
+ # Build grep arguments for Cypress
28
+ CYPRESS_GREP=""
29
+ GREP_TAGS=""
21
30
 
22
- if [ -z "$AUTOMATION_IDS" ]; then
23
- echo "๐Ÿ” No automation IDs provided. Running all tests..."
24
- CYPRESS_GREP=""
25
- else
31
+ if [ -n "$AUTOMATION_IDS" ]; then
26
32
  # Strip quotes from arg, replace commas with spaces
27
33
  RAW_IDS=$(echo "$AUTOMATION_IDS" | sed "s/^['\"]//;s/['\"]$//")
28
34
  CLEANED_IDS="${RAW_IDS//,/ }"
35
+ GREP_TAGS="${CLEANED_IDS}"
36
+ echo "๐Ÿš€ Running Cypress with automation IDs: ${CLEANED_IDS}"
37
+ fi
38
+
39
+ if [ -n "$CUSTOM_TAGS" ]; then
40
+ # Strip quotes from arg, replace commas with spaces
41
+ RAW_TAGS=$(echo "$CUSTOM_TAGS" | sed "s/^['\"]//;s/['\"]$//")
42
+ CLEANED_TAGS="${RAW_TAGS//,/ }"
43
+
44
+ if [ -n "$GREP_TAGS" ]; then
45
+ GREP_TAGS="${GREP_TAGS} ${CLEANED_TAGS}"
46
+ else
47
+ GREP_TAGS="${CLEANED_TAGS}"
48
+ fi
49
+ echo "๐Ÿš€ Running Cypress with custom tags: ${CLEANED_TAGS}"
50
+ fi
29
51
 
30
- echo "๐Ÿš€ Running Cypress with filtered tags: ${CLEANED_IDS}"
31
- CYPRESS_GREP="--env grepTags='${CLEANED_IDS}'"
52
+ if [ -n "$GREP_TAGS" ]; then
53
+ CYPRESS_GREP="--env grepTags='${GREP_TAGS}'"
54
+ echo "๐Ÿ’ก Final grep tags: ${GREP_TAGS}"
55
+ else
56
+ echo "๐Ÿ” No tags provided. Running all tests..."
32
57
  fi
33
58
 
34
59
  # Set environment variables
@@ -37,6 +62,7 @@ PRODUCT_NAME=${PRODUCT_NAME:-unknown_product}
37
62
  PLAN_ID=${PLAN_ID:-unknown_plan}
38
63
  RUN_ID=${RUN_ID:-unknown_run}
39
64
  BUCKET_NAME=${BUCKET_NAME:-your-default-bucket}
65
+ TEST_ENVIRONMENT=${TEST_ENVIRONMENT:-QA}
40
66
 
41
67
  # Ensure results directory exists
42
68
  mkdir -p results/mochawesome
@@ -45,14 +71,14 @@ echo "๐Ÿงน Cleaning mochawesome reports"
45
71
  rm -rf results/mochawesome/*
46
72
 
47
73
  # Run Cypress
48
- echo "๐Ÿš€ Running Cypress with filtered tags: ${AUTOMATION_IDS}"
74
+ echo "๐Ÿš€ Running Cypress tests..."
49
75
  echo "๐Ÿ’ก Final grep arg: $CYPRESS_GREP"
50
76
  eval "npx cypress run \
51
77
  --headless \
52
78
  --browser chrome \
53
79
  --reporter mochawesome \
54
80
  --reporter-options 'reportDir=results/mochawesome,overwrite=false,html=false,json=true,timestamp=mmddyyyy_HHMMss' \
55
- --env grepFilterSpecs=true,grepUntagged=false,grepEnabled=true \
81
+ --env grepFilterSpecs=true,grepUntagged=false,grepEnabled=true,TEST_ENVIRONMENT=${TEST_ENVIRONMENT} \
56
82
  $CYPRESS_GREP \
57
83
  > results/logs.txt 2>&1"
58
84
 
@@ -108,29 +134,27 @@ else
108
134
  echo "โŒ Mochawesome report folder not found: ${MOCHAWESOME_DIR}"
109
135
  fi
110
136
 
111
- # Upload zipped report to S3
137
+ # Copy artifacts to shared volume for EC2 to upload
138
+ mkdir -p /shared/test-logs
112
139
  if [ -f "${ZIP_PATH}" ]; then
113
- S3_BASE_KEY="artifacts/${PRODUCT_NAME}/${PLAN_ID}/automation/${RUN_ID}"
114
- S3_ZIP_KEY="${S3_BASE_KEY}/${ZIP_NAME}"
115
- echo "โ˜๏ธ Uploading ZIP to s3://${BUCKET_NAME}/${S3_ZIP_KEY}"
116
- aws s3 cp "${ZIP_PATH}" "s3://${BUCKET_NAME}/${S3_ZIP_KEY}" || echo "โŒ S3 ZIP upload failed"
140
+ echo "๐Ÿ“ Copying ZIP to shared volume: ${ZIP_NAME}"
141
+ cp "${ZIP_PATH}" "/shared/test-logs/${ZIP_NAME}" || echo "โŒ Failed to copy ZIP to shared volume"
117
142
  else
118
- echo "โŒ No zip file to upload"
143
+ echo "โŒ No zip file to copy"
119
144
  fi
120
145
 
121
- # Upload HTML report file to S3
146
+ # Copy HTML report file to shared volume
122
147
  if [ -f "${REPORT_HTML}" ]; then
123
148
  HTML_NAME=$(basename "$REPORT_HTML")
124
- S3_HTML_KEY="${S3_BASE_KEY}/${HTML_NAME}"
125
- echo "๐ŸŒ Uploading HTML to s3://${BUCKET_NAME}/${S3_HTML_KEY}"
126
- aws s3 cp "${REPORT_HTML}" "s3://${BUCKET_NAME}/${S3_HTML_KEY}" || echo "โŒ S3 HTML upload failed"
149
+ echo "๐Ÿ“ Copying HTML to shared volume: ${HTML_NAME}"
150
+ cp "${REPORT_HTML}" "/shared/test-logs/${HTML_NAME}" || echo "โŒ Failed to copy HTML to shared volume"
127
151
  else
128
- echo "โŒ No HTML report found to upload"
152
+ echo "โŒ No HTML report found to copy"
129
153
  fi
130
154
 
131
155
  # Run result sync
132
156
  echo "๐Ÿ”„ Running syncCypressResults.js"
133
- PLAN_ID="${PLAN_ID}" RUN_ID="${RUN_ID}" node syncCypressResults.js >> results/logs.txt 2>&1 || echo "โš ๏ธ syncCypressResults.js failed"
157
+ PLAN_ID="${PLAN_ID}" RUN_ID="${RUN_ID}" TEST_ENVIRONMENT="${TEST_ENVIRONMENT}" node syncCypressResults.js >> results/logs.txt 2>&1 || echo "โš ๏ธ syncCypressResults.js failed"
134
158
 
135
159
  # Final output
136
160
  echo "๐Ÿ“ค Dumping results/logs.txt for runner capture:"
@@ -2,13 +2,7 @@ const fs = require("fs");
2
2
  const path = require("path");
3
3
  const axios = require("axios");
4
4
  const glob = require("glob");
5
- const AWS = require("aws-sdk");
6
-
7
- // AWS setup
8
- const s3 = new AWS.S3({
9
- region: process.env.AWS_REGION || "us-east-1",
10
- });
11
- const bucketName = process.env.S3_BUCKET || "your-bucket-name";
5
+ // AWS SDK removed - EC2 instance handles S3 uploads
12
6
 
13
7
  // Paths
14
8
  const automationCasesPath = "/shared/automation-cases.json";
@@ -118,23 +112,25 @@ try {
118
112
 
119
113
  const planId = process.env.PLAN_ID || "unknown";
120
114
  const runId = process.env.RUN_ID || "unknown";
115
+ const testEnvironment = process.env.TEST_ENVIRONMENT || "QA";
121
116
 
122
117
  if (!planId || !runId || isNaN(Number(planId)) || isNaN(Number(runId))) {
123
118
  console.error("โŒ Invalid or missing PLAN_ID / RUN_ID");
124
119
  process.exit(1);
125
120
  }
126
121
 
127
- await postResults(formatted, notFound, planId, runId);
128
- await postSummary(formatted, notFound, planId, runId);
122
+ await postResults(formatted, notFound, planId, runId, testEnvironment);
123
+ await postSummary(formatted, notFound, planId, runId, testEnvironment);
129
124
  })();
130
125
 
131
- async function postResults(formattedResults, notFoundList, planId, runId) {
126
+ async function postResults(formattedResults, notFoundList, planId, runId, testEnvironment) {
132
127
  const postUrl = baseUrl; // baseUrl already includes the complete endpoint
133
128
  console.log(`๐Ÿ”— Posting to URL: ${postUrl}`);
134
129
 
135
130
  const payload = {
136
131
  planId,
137
132
  runId,
133
+ environment: testEnvironment,
138
134
  results: formattedResults,
139
135
  not_found: notFoundList,
140
136
  triggered_by: triggeredBy,
@@ -160,16 +156,17 @@ async function postResults(formattedResults, notFoundList, planId, runId) {
160
156
  }
161
157
  }
162
158
 
163
- async function postSummary(results, notFound, planId, runId) {
159
+ async function postSummary(results, notFound, planId, runId, testEnvironment) {
164
160
  // Replace test-results with test-run-summary in the baseUrl
165
161
  const url = baseUrl.replace('/api/test-results', '/api/test-run-summary');
166
162
 
167
163
  const logs =
168
- (await uploadLogToS3(planId, runId)) || "Log upload failed or missing";
164
+ (await copyLogToShared(planId, runId)) || "Log copy failed or missing";
169
165
 
170
166
  const summaryPayload = {
171
167
  runId,
172
168
  planId,
169
+ environment: testEnvironment,
173
170
  status: computeOverallStatus(results, notFound),
174
171
  logs,
175
172
  result: {
@@ -197,30 +194,26 @@ async function postSummary(results, notFound, planId, runId) {
197
194
  }
198
195
  }
199
196
 
200
- async function uploadLogToS3(planId, runId) {
197
+ async function copyLogToShared(planId, runId) {
201
198
  const logsPath = "results/logs.txt";
202
199
 
203
200
  if (!fs.existsSync(logsPath)) {
204
- console.warn("โš ๏ธ No logs.txt file found to upload.");
201
+ console.warn("โš ๏ธ No logs.txt file found to copy.");
205
202
  return null;
206
203
  }
207
204
 
208
- const key = `artifacts/${process.env.PRODUCT_NAME}/${planId}/automation/${runId}/run_log`;
209
- const fileStream = fs.createReadStream(logsPath);
205
+ const sharedDir = "/shared/test-logs";
206
+ if (!fs.existsSync(sharedDir)) {
207
+ fs.mkdirSync(sharedDir, { recursive: true });
208
+ }
210
209
 
211
210
  try {
212
- await s3
213
- .upload({
214
- Bucket: bucketName,
215
- Key: key,
216
- Body: fileStream,
217
- ContentType: "text/plain",
218
- })
219
- .promise();
220
-
221
- return `s3://${bucketName}/${key}`;
211
+ const destPath = path.join(sharedDir, "run_log.txt");
212
+ fs.copyFileSync(logsPath, destPath);
213
+ console.log(`๐Ÿ“ Logs copied to shared volume: ${destPath}`);
214
+ return destPath;
222
215
  } catch (err) {
223
- console.error("โŒ Failed to upload log to S3:", err.message);
216
+ console.error("โŒ Failed to copy log to shared volume:", err.message);
224
217
  return null;
225
218
  }
226
219
  }