haven-cypress-integration 1.6.2 โ 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +224 -120
- package/bin/haven-cypress.js +46 -59
- package/index.js +87 -215
- package/package.json +4 -3
- package/templates/Dockerfile +32 -22
- package/templates/run-filtered.sh +184 -90
- package/templates/syncCypressResults.js +230 -153
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
|
+
set -e
|
|
2
3
|
|
|
3
|
-
echo "
|
|
4
|
-
echo "
|
|
5
|
-
echo "โ๏ธ Raw args: $@"
|
|
4
|
+
echo "CYPRESS RUN SCRIPT"
|
|
5
|
+
echo "Raw args: $@"
|
|
6
6
|
|
|
7
|
-
#
|
|
7
|
+
# Parse --automationIds and --customTags input from args
|
|
8
8
|
AUTOMATION_IDS=""
|
|
9
9
|
CUSTOM_TAGS=""
|
|
10
10
|
for arg in "$@"; do
|
|
@@ -20,92 +20,189 @@ for arg in "$@"; do
|
|
|
20
20
|
esac
|
|
21
21
|
done
|
|
22
22
|
|
|
23
|
-
echo "
|
|
24
|
-
echo "
|
|
25
|
-
echo "๐ Current working directory: $(pwd)"
|
|
23
|
+
echo "automationIds: ${AUTOMATION_IDS}"
|
|
24
|
+
echo "customTags: ${CUSTOM_TAGS}"
|
|
26
25
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
26
|
+
TIMESTAMP=$(date +%m%d%Y_%H%M%S)
|
|
27
|
+
PRODUCT_NAME=${PRODUCT_NAME:-unknown_product}
|
|
28
|
+
PLAN_ID=${PLAN_ID:-unknown_plan}
|
|
29
|
+
RUN_ID=${RUN_ID:-unknown_run}
|
|
30
|
+
BUCKET_NAME=${BUCKET_NAME:-your-default-bucket}
|
|
31
|
+
TEST_ENVIRONMENT=${TEST_ENVIRONMENT:-QA}
|
|
32
|
+
|
|
33
|
+
# =============================================================================
|
|
34
|
+
# ADO CLONE MODE (Thin Image)
|
|
35
|
+
# =============================================================================
|
|
36
|
+
# If ADO_REPO is set, clone app code from Azure DevOps at runtime.
|
|
37
|
+
# This enables "thin image" mode where app code is NOT baked into the image.
|
|
38
|
+
#
|
|
39
|
+
# Required env vars:
|
|
40
|
+
# ADO_REPO - Full ADO repo path (e.g., dev.azure.com/org/project/_git/repo)
|
|
41
|
+
# ADO_PAT - Personal Access Token for authentication
|
|
42
|
+
# Optional env vars:
|
|
43
|
+
# ADO_BRANCH - Branch to clone (default: main)
|
|
44
|
+
# =============================================================================
|
|
30
45
|
|
|
31
|
-
if [ -n "$
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
46
|
+
if [ -n "$ADO_REPO" ]; then
|
|
47
|
+
echo "=== ADO Clone Mode ==="
|
|
48
|
+
ADO_BRANCH=${ADO_BRANCH:-main}
|
|
49
|
+
|
|
50
|
+
git clone --branch "$ADO_BRANCH" --depth 1 \
|
|
51
|
+
"https://${ADO_PAT}@${ADO_REPO}" /app
|
|
52
|
+
|
|
53
|
+
cd /app
|
|
54
|
+
npm ci
|
|
55
|
+
echo "=== Clone Complete ==="
|
|
56
|
+
else
|
|
57
|
+
echo "=== Full Image Mode (app code pre-baked) ==="
|
|
37
58
|
fi
|
|
38
59
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
60
|
+
# S3 artifact path for uploads
|
|
61
|
+
S3_ARTIFACT_PATH="s3://${BUCKET_NAME}/artifacts/${PRODUCT_NAME}/${PLAN_ID}/automation/${RUN_ID}"
|
|
62
|
+
echo "S3 artifact path: ${S3_ARTIFACT_PATH}"
|
|
63
|
+
|
|
64
|
+
# Ensure results directory exists
|
|
65
|
+
RESULTS_DIR="results"
|
|
66
|
+
MOCHAWESOME_DIR="${RESULTS_DIR}/mochawesome"
|
|
67
|
+
mkdir -p "$MOCHAWESOME_DIR"
|
|
68
|
+
|
|
69
|
+
# Clean previous reports
|
|
70
|
+
echo "Cleaning mochawesome reports"
|
|
71
|
+
rm -rf ${MOCHAWESOME_DIR}/*
|
|
72
|
+
|
|
73
|
+
# Start background log uploader (uploads every 30 seconds for real-time visibility)
|
|
74
|
+
echo "Starting real-time log uploader..."
|
|
75
|
+
LOG_UPLOADER_PID=""
|
|
76
|
+
(
|
|
77
|
+
while true; do
|
|
78
|
+
sleep 30
|
|
79
|
+
if [ -f ${RESULTS_DIR}/logs.txt ]; then
|
|
80
|
+
aws s3 cp ${RESULTS_DIR}/logs.txt "${S3_ARTIFACT_PATH}/live-output.log" --quiet 2>/dev/null || true
|
|
81
|
+
fi
|
|
82
|
+
done
|
|
83
|
+
) &
|
|
84
|
+
LOG_UPLOADER_PID=$!
|
|
85
|
+
|
|
86
|
+
# Build grep patterns for Cypress
|
|
87
|
+
GREP_TAGS=""
|
|
88
|
+
GREP_INVERT_TAGS=""
|
|
89
|
+
|
|
90
|
+
# First, check if HAVEN_GREP_PATTERN was passed as env var from Haven
|
|
91
|
+
if [ -n "$HAVEN_GREP_PATTERN" ]; then
|
|
92
|
+
echo "HAVEN_GREP_PATTERN from env: ${HAVEN_GREP_PATTERN}"
|
|
43
93
|
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
94
|
+
# Parse patterns into include/exclude (supports NOT: prefix)
|
|
95
|
+
IFS='|' read -ra PATTERN_ARRAY <<< "$HAVEN_GREP_PATTERN"
|
|
96
|
+
for pattern in "${PATTERN_ARRAY[@]}"; do
|
|
97
|
+
[ -z "$pattern" ] && continue
|
|
98
|
+
if [[ "$pattern" == NOT:* ]]; then
|
|
99
|
+
CLEAN_PATTERN="${pattern:4}"
|
|
100
|
+
GREP_INVERT_TAGS="${GREP_INVERT_TAGS:+$GREP_INVERT_TAGS }${CLEAN_PATTERN}"
|
|
101
|
+
else
|
|
102
|
+
GREP_TAGS="${GREP_TAGS:+$GREP_TAGS }${pattern}"
|
|
103
|
+
fi
|
|
104
|
+
done
|
|
105
|
+
fi
|
|
106
|
+
|
|
107
|
+
# If no env var, build from CLI args (fallback for non-Haven runs)
|
|
108
|
+
if [ -z "$GREP_TAGS" ] && [ -z "$GREP_INVERT_TAGS" ]; then
|
|
109
|
+
if [ -n "$AUTOMATION_IDS" ]; then
|
|
110
|
+
RAW_IDS=$(echo "$AUTOMATION_IDS" | sed "s/^['\"]//;s/['\"]$//")
|
|
111
|
+
CLEANED_IDS="${RAW_IDS//,/ }"
|
|
112
|
+
GREP_TAGS="${CLEANED_IDS}"
|
|
113
|
+
echo "Automation IDs pattern: ${CLEANED_IDS}"
|
|
114
|
+
fi
|
|
115
|
+
|
|
116
|
+
if [ -n "$CUSTOM_TAGS" ]; then
|
|
117
|
+
RAW_TAGS=$(echo "$CUSTOM_TAGS" | sed "s/^['\"]//;s/['\"]$//")
|
|
118
|
+
CLEANED_TAGS="${RAW_TAGS//,/ }"
|
|
119
|
+
|
|
120
|
+
if [ -n "$GREP_TAGS" ]; then
|
|
121
|
+
GREP_TAGS="${GREP_TAGS} ${CLEANED_TAGS}"
|
|
122
|
+
else
|
|
123
|
+
GREP_TAGS="${CLEANED_TAGS}"
|
|
124
|
+
fi
|
|
125
|
+
echo "Custom tags pattern: ${CLEANED_TAGS}"
|
|
48
126
|
fi
|
|
49
|
-
echo "๐ Running Cypress with custom tags: ${CLEANED_TAGS}"
|
|
50
127
|
fi
|
|
51
128
|
|
|
129
|
+
# Build Cypress grep arguments
|
|
130
|
+
CYPRESS_GREP_ARGS=""
|
|
52
131
|
if [ -n "$GREP_TAGS" ]; then
|
|
53
|
-
|
|
54
|
-
|
|
132
|
+
export HAVEN_GREP_INCLUDE="${GREP_TAGS}"
|
|
133
|
+
CYPRESS_GREP_ARGS="--env grepTags='${GREP_TAGS}'"
|
|
134
|
+
echo "Include pattern (HAVEN_GREP_INCLUDE): ${GREP_TAGS}"
|
|
135
|
+
fi
|
|
136
|
+
|
|
137
|
+
if [ -n "$GREP_INVERT_TAGS" ]; then
|
|
138
|
+
export HAVEN_GREP_EXCLUDE="${GREP_INVERT_TAGS}"
|
|
139
|
+
# Cypress grep uses grepInvert for exclusion
|
|
140
|
+
if [ -n "$CYPRESS_GREP_ARGS" ]; then
|
|
141
|
+
CYPRESS_GREP_ARGS="${CYPRESS_GREP_ARGS},grepInvert='${GREP_INVERT_TAGS}'"
|
|
142
|
+
else
|
|
143
|
+
CYPRESS_GREP_ARGS="--env grepInvert='${GREP_INVERT_TAGS}'"
|
|
144
|
+
fi
|
|
145
|
+
echo "Exclude pattern (HAVEN_GREP_EXCLUDE): ${GREP_INVERT_TAGS}"
|
|
146
|
+
fi
|
|
147
|
+
|
|
148
|
+
if [ -z "$GREP_TAGS" ] && [ -z "$GREP_INVERT_TAGS" ]; then
|
|
149
|
+
echo "No tags provided. Running all tests..."
|
|
150
|
+
fi
|
|
151
|
+
|
|
152
|
+
# Run Cypress
|
|
153
|
+
echo "Running Cypress tests..."
|
|
154
|
+
set +e
|
|
155
|
+
|
|
156
|
+
if [ -n "$E2E_COMMAND" ]; then
|
|
157
|
+
echo "Using custom E2E command: $E2E_COMMAND"
|
|
158
|
+
eval "$E2E_COMMAND" 2>&1 | tee ${RESULTS_DIR}/logs.txt
|
|
55
159
|
else
|
|
56
|
-
echo "
|
|
160
|
+
echo "Using default: npx cypress run"
|
|
161
|
+
echo "Final grep args: $CYPRESS_GREP_ARGS"
|
|
162
|
+
eval "npx cypress run \
|
|
163
|
+
--headless \
|
|
164
|
+
--browser chrome \
|
|
165
|
+
--reporter mochawesome \
|
|
166
|
+
--reporter-options 'reportDir=${MOCHAWESOME_DIR},overwrite=false,html=false,json=true,timestamp=mmddyyyy_HHMMss' \
|
|
167
|
+
--env grepFilterSpecs=true,grepUntagged=false,grepEnabled=true,TEST_ENVIRONMENT=${TEST_ENVIRONMENT} \
|
|
168
|
+
$CYPRESS_GREP_ARGS" 2>&1 | tee ${RESULTS_DIR}/logs.txt
|
|
57
169
|
fi
|
|
58
170
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
PRODUCT_NAME=${PRODUCT_NAME:-unknown_product}
|
|
62
|
-
PLAN_ID=${PLAN_ID:-unknown_plan}
|
|
63
|
-
RUN_ID=${RUN_ID:-unknown_run}
|
|
64
|
-
BUCKET_NAME=${BUCKET_NAME:-your-default-bucket}
|
|
65
|
-
TEST_ENVIRONMENT=${TEST_ENVIRONMENT:-QA}
|
|
171
|
+
EXIT_CODE=${PIPESTATUS[0]}
|
|
172
|
+
set -e
|
|
66
173
|
|
|
67
|
-
#
|
|
68
|
-
|
|
174
|
+
# Stop background log uploader
|
|
175
|
+
if [ -n "$LOG_UPLOADER_PID" ]; then
|
|
176
|
+
kill $LOG_UPLOADER_PID 2>/dev/null || true
|
|
177
|
+
fi
|
|
69
178
|
|
|
70
|
-
|
|
71
|
-
|
|
179
|
+
# Upload final complete log to S3
|
|
180
|
+
if [ -f ${RESULTS_DIR}/logs.txt ]; then
|
|
181
|
+
echo "Uploading final test output to S3..."
|
|
182
|
+
aws s3 cp ${RESULTS_DIR}/logs.txt "${S3_ARTIFACT_PATH}/test-output.log" || echo "Failed to upload test output"
|
|
183
|
+
fi
|
|
72
184
|
|
|
73
|
-
#
|
|
74
|
-
|
|
75
|
-
echo "๐ก Final grep arg: $CYPRESS_GREP"
|
|
76
|
-
eval "npx cypress run \
|
|
77
|
-
--headless \
|
|
78
|
-
--browser chrome \
|
|
79
|
-
--reporter mochawesome \
|
|
80
|
-
--reporter-options 'reportDir=results/mochawesome,overwrite=false,html=false,json=true,timestamp=mmddyyyy_HHMMss' \
|
|
81
|
-
--env grepFilterSpecs=true,grepUntagged=false,grepEnabled=true,TEST_ENVIRONMENT=${TEST_ENVIRONMENT} \
|
|
82
|
-
$CYPRESS_GREP \
|
|
83
|
-
> results/logs.txt 2>&1"
|
|
84
|
-
|
|
85
|
-
CYPRESS_EXIT_CODE=$?
|
|
86
|
-
|
|
87
|
-
# Smart HTML report generation logic
|
|
88
|
-
MOCHAWESOME_DIR="results/mochawesome"
|
|
89
|
-
MERGED_JSON="results/results.json"
|
|
185
|
+
# Merge mochawesome JSON files
|
|
186
|
+
MERGED_JSON="${RESULTS_DIR}/results.json"
|
|
90
187
|
REPORT_HTML="${MOCHAWESOME_DIR}/report.html"
|
|
91
188
|
|
|
92
|
-
echo "
|
|
93
|
-
echo "
|
|
94
|
-
ls -la ${MOCHAWESOME_DIR}/ || echo "
|
|
95
|
-
echo "
|
|
96
|
-
ls ${MOCHAWESOME_DIR}/*.json 2>/dev/null || echo "
|
|
97
|
-
npx mochawesome-merge ${MOCHAWESOME_DIR}/*.json > "${MERGED_JSON}" 2>&1 || echo "
|
|
189
|
+
echo "Attempting to merge mochawesome JSON files..."
|
|
190
|
+
echo "Contents of ${MOCHAWESOME_DIR}:"
|
|
191
|
+
ls -la ${MOCHAWESOME_DIR}/ || echo "Directory not found"
|
|
192
|
+
echo "JSON files found:"
|
|
193
|
+
ls ${MOCHAWESOME_DIR}/*.json 2>/dev/null || echo "No JSON files found"
|
|
194
|
+
npx mochawesome-merge ${MOCHAWESOME_DIR}/*.json > "${MERGED_JSON}" 2>&1 || echo "Merge failed"
|
|
98
195
|
|
|
99
196
|
if [ -s "${MERGED_JSON}" ]; then
|
|
100
|
-
echo "
|
|
197
|
+
echo "Merge successful. Generating report from merged JSON."
|
|
101
198
|
npx marge "${MERGED_JSON}" --reportDir "${MOCHAWESOME_DIR}" --reportFilename report.html
|
|
102
199
|
else
|
|
103
|
-
echo "
|
|
200
|
+
echo "Merge failed or empty. Falling back to first mochawesome JSON file..."
|
|
104
201
|
FIRST_JSON=$(ls ${MOCHAWESOME_DIR}/*.json 2>/dev/null | head -n 1)
|
|
105
202
|
if [ -f "${FIRST_JSON}" ]; then
|
|
106
203
|
npx marge "${FIRST_JSON}" --reportDir "${MOCHAWESOME_DIR}" --reportFilename report.html
|
|
107
204
|
else
|
|
108
|
-
echo "
|
|
205
|
+
echo "No valid mochawesome JSON file found for fallback."
|
|
109
206
|
fi
|
|
110
207
|
fi
|
|
111
208
|
|
|
@@ -113,11 +210,11 @@ fi
|
|
|
113
210
|
SCREENSHOTS_DIR="cypress/screenshots"
|
|
114
211
|
DEST_SCREENSHOTS_DIR="${MOCHAWESOME_DIR}/screenshots"
|
|
115
212
|
if [ -d "$SCREENSHOTS_DIR" ]; then
|
|
116
|
-
echo "
|
|
213
|
+
echo "Copying screenshots..."
|
|
117
214
|
mkdir -p "$DEST_SCREENSHOTS_DIR"
|
|
118
|
-
cp -r "$SCREENSHOTS_DIR"/* "$DEST_SCREENSHOTS_DIR" || echo "
|
|
215
|
+
cp -r "$SCREENSHOTS_DIR"/* "$DEST_SCREENSHOTS_DIR" || echo "No screenshots to copy"
|
|
119
216
|
else
|
|
120
|
-
echo "
|
|
217
|
+
echo "No screenshots directory found."
|
|
121
218
|
fi
|
|
122
219
|
|
|
123
220
|
# Zip the mochawesome report
|
|
@@ -125,39 +222,36 @@ ZIP_NAME="${PRODUCT_NAME}_${TIMESTAMP}.zip"
|
|
|
125
222
|
ZIP_PATH="/tmp/${ZIP_NAME}"
|
|
126
223
|
|
|
127
224
|
if [ -d "${MOCHAWESOME_DIR}" ]; then
|
|
128
|
-
echo "
|
|
129
|
-
echo "
|
|
225
|
+
echo "Zipping mochawesome report and screenshots..."
|
|
226
|
+
echo "Contents to zip:"
|
|
130
227
|
ls -la "${MOCHAWESOME_DIR}/"
|
|
131
|
-
zip -r "${ZIP_PATH}" "${MOCHAWESOME_DIR}" || echo "
|
|
132
|
-
echo "
|
|
228
|
+
zip -r "${ZIP_PATH}" "${MOCHAWESOME_DIR}" || echo "ZIP creation failed"
|
|
229
|
+
echo "ZIP created: ${ZIP_PATH}"
|
|
133
230
|
else
|
|
134
|
-
echo "
|
|
231
|
+
echo "Mochawesome report folder not found: ${MOCHAWESOME_DIR}"
|
|
135
232
|
fi
|
|
136
233
|
|
|
137
234
|
# Copy artifacts to shared volume for EC2 to upload
|
|
138
235
|
mkdir -p /shared/test-logs
|
|
139
236
|
if [ -f "${ZIP_PATH}" ]; then
|
|
140
|
-
echo "
|
|
141
|
-
cp "${ZIP_PATH}" "/shared/test-logs/${ZIP_NAME}" || echo "
|
|
142
|
-
else
|
|
143
|
-
echo "โ No zip file to copy"
|
|
237
|
+
echo "Copying ZIP to shared volume: ${ZIP_NAME}"
|
|
238
|
+
cp "${ZIP_PATH}" "/shared/test-logs/${ZIP_NAME}" || echo "Failed to copy ZIP to shared volume"
|
|
144
239
|
fi
|
|
145
240
|
|
|
146
|
-
# Copy HTML report file to shared volume
|
|
147
241
|
if [ -f "${REPORT_HTML}" ]; then
|
|
148
|
-
|
|
149
|
-
echo "
|
|
150
|
-
cp "${REPORT_HTML}" "/shared/test-logs/${HTML_NAME}" || echo "โ Failed to copy HTML to shared volume"
|
|
151
|
-
else
|
|
152
|
-
echo "โ No HTML report found to copy"
|
|
242
|
+
echo "Copying HTML to shared volume: report.html"
|
|
243
|
+
cp "${REPORT_HTML}" "/shared/test-logs/report.html" || echo "Failed to copy HTML to shared volume"
|
|
153
244
|
fi
|
|
154
245
|
|
|
155
|
-
#
|
|
156
|
-
|
|
157
|
-
|
|
246
|
+
# Copy raw mochawesome reports for EC2 host to upload
|
|
247
|
+
if [ -d "${MOCHAWESOME_DIR}" ]; then
|
|
248
|
+
echo "Copying mochawesome reports to shared volume for EC2 upload..."
|
|
249
|
+
mkdir -p /shared/mochawesome-report
|
|
250
|
+
cp -r "${MOCHAWESOME_DIR}"/* /shared/mochawesome-report/ || echo "Failed to copy mochawesome reports"
|
|
251
|
+
fi
|
|
158
252
|
|
|
159
|
-
#
|
|
160
|
-
echo "
|
|
161
|
-
|
|
253
|
+
# Post results to HAVEN API
|
|
254
|
+
echo "Running syncCypressResults.js"
|
|
255
|
+
PLAN_ID="${PLAN_ID}" RUN_ID="${RUN_ID}" TEST_ENVIRONMENT="${TEST_ENVIRONMENT}" node /haven/syncCypressResults.js "${MERGED_JSON}" || echo "Result sync script finished with warnings"
|
|
162
256
|
|
|
163
|
-
exit $
|
|
257
|
+
exit $EXIT_CODE
|