mirror of
https://github.com/OpenBankProject/OBP-API.git
synced 2026-02-06 11:06:49 +00:00
Merge pull request #2657 from hongwei1/develop
feature/update the run_all_test script
This commit is contained in:
commit
5d0ed75231
@ -3,7 +3,7 @@ name: Build and publish container non develop
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
- '**'
|
||||
- '!develop'
|
||||
|
||||
env:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -44,3 +44,4 @@ project/project
|
||||
coursier
|
||||
metals.sbt
|
||||
obp-http4s-runner/src/main/resources/git.properties
|
||||
test-results
|
||||
@ -586,9 +586,15 @@
|
||||
<forkMode>once</forkMode>
|
||||
<junitxml>.</junitxml>
|
||||
<filereports>WDF TestSuite.txt</filereports>
|
||||
<argLine>-Drun.mode=test -XX:MaxMetaspaceSize=512m -Xms512m -Xmx512m --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.jar=ALL-UNNAMED --add-opens java.base/java.security=ALL-UNNAMED</argLine>
|
||||
<!-- Increased memory for faster test execution -->
|
||||
<argLine>-Drun.mode=test -XX:MaxMetaspaceSize=1G -Xms2G -Xmx4G -XX:+UseG1GC -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+UseStringDeduplication --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.jar=ALL-UNNAMED --add-opens java.base/java.security=ALL-UNNAMED</argLine>
|
||||
<tagsToExclude>code.external</tagsToExclude>
|
||||
<testFailureIgnore>${maven.test.failure.ignore}</testFailureIgnore>
|
||||
<!-- Disable parallel test execution to avoid shared database state issues -->
|
||||
<!-- Tests share an in-memory H2 database which causes conflicts when run in parallel -->
|
||||
<!-- <parallel>true</parallel>-->
|
||||
<!-- <threadCount>4</threadCount>-->
|
||||
<parallel>false</parallel>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
@ -667,15 +673,26 @@
|
||||
<version>4.8.1</version>
|
||||
<configuration>
|
||||
<fork>true</fork>
|
||||
<recompileMode>incremental</recompileMode>
|
||||
<useZincServer>true</useZincServer>
|
||||
<jvmArgs>
|
||||
<jvmArg>-Xms4G</jvmArg>
|
||||
<jvmArg>-Xmx12G</jvmArg>
|
||||
<jvmArg>-Xss4m</jvmArg>
|
||||
<jvmArg>-XX:MaxMetaspaceSize=4G</jvmArg>
|
||||
<jvmArg>-XX:+UseG1GC</jvmArg>
|
||||
<jvmArg>-XX:+TieredCompilation</jvmArg>
|
||||
<jvmArg>-XX:TieredStopAtLevel=1</jvmArg>
|
||||
</jvmArgs>
|
||||
<args>
|
||||
<arg>-deprecation</arg>
|
||||
<arg>-feature</arg>
|
||||
<!-- Enable language features to suppress warnings -->
|
||||
<arg>-language:implicitConversions</arg>
|
||||
<arg>-language:reflectiveCalls</arg>
|
||||
<arg>-language:postfixOps</arg>
|
||||
<!-- Suppress auto-application deprecation warning -->
|
||||
<arg>-Wconf:cat=deprecation&msg=auto-application:s</arg>
|
||||
</args>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
@ -113,9 +113,14 @@
|
||||
<forkMode>once</forkMode>
|
||||
<junitxml>.</junitxml>
|
||||
<filereports>WDF TestSuite.txt</filereports>
|
||||
<argLine>-Drun.mode=test -XX:MaxMetaspaceSize=512m -Xms512m -Xmx512m</argLine>
|
||||
<!-- Increased memory for faster test execution -->
|
||||
<argLine>-Drun.mode=test -XX:MaxMetaspaceSize=1G -Xms2G -Xmx4G -XX:+UseG1GC -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+UseStringDeduplication</argLine>
|
||||
<tagsToExclude>code.external</tagsToExclude>
|
||||
<testFailureIgnore>${maven.test.failure.ignore}</testFailureIgnore>
|
||||
<!-- Disable parallel test execution to avoid shared state issues -->
|
||||
<!-- <parallel>true</parallel>-->
|
||||
<!-- <threadCount>4</threadCount>-->
|
||||
<parallel>false</parallel>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
|
||||
16
pom.xml
16
pom.xml
@ -21,7 +21,7 @@
|
||||
<!-- Common plugin settings -->
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<project.reporting.outputEncoding>${project.build.sourceEncoding}</project.reporting.outputEncoding>
|
||||
<maven.test.failure.ignore>true</maven.test.failure.ignore>
|
||||
<maven.test.failure.ignore>false</maven.test.failure.ignore>
|
||||
<!-- vscaladoc settings -->
|
||||
<maven.scaladoc.vscaladocVersion>1.2-m1</maven.scaladoc.vscaladocVersion>
|
||||
<vscaladoc.links.liftweb.pathsufix>scaladocs/</vscaladoc.links.liftweb.pathsufix>
|
||||
@ -134,10 +134,14 @@
|
||||
<scalaVersion>${scala.compiler}</scalaVersion>
|
||||
<charset>${project.build.sourceEncoding}</charset>
|
||||
<displayCmd>true</displayCmd>
|
||||
<recompileMode>incremental</recompileMode>
|
||||
<useZincServer>true</useZincServer>
|
||||
<jvmArgs>
|
||||
<jvmArg>-DpackageLinkDefs=file://${project.build.directory}/packageLinkDefs.properties</jvmArg>
|
||||
<jvmArg>-Xms64m</jvmArg>
|
||||
<jvmArg>-Xmx1024m</jvmArg>
|
||||
<jvmArg>-Xms512m</jvmArg>
|
||||
<jvmArg>-Xmx2G</jvmArg>
|
||||
<jvmArg>-XX:+TieredCompilation</jvmArg>
|
||||
<jvmArg>-XX:TieredStopAtLevel=1</jvmArg>
|
||||
</jvmArgs>
|
||||
<args>
|
||||
<arg>-unchecked</arg>
|
||||
@ -147,6 +151,12 @@
|
||||
<arg>-deprecation</arg>
|
||||
-->
|
||||
<arg>-Ypartial-unification</arg>
|
||||
<!-- Enable language features to suppress warnings -->
|
||||
<arg>-language:implicitConversions</arg>
|
||||
<arg>-language:reflectiveCalls</arg>
|
||||
<arg>-language:postfixOps</arg>
|
||||
<!-- Suppress auto-application deprecation warning -->
|
||||
<arg>-Wconf:cat=deprecation&msg=auto-application:s</arg>
|
||||
</args>
|
||||
</configuration>
|
||||
<executions>
|
||||
|
||||
547
run_all_tests.sh
547
run_all_tests.sh
@ -14,30 +14,66 @@
|
||||
# Usage:
|
||||
# ./run_all_tests.sh - Run full test suite
|
||||
# ./run_all_tests.sh --summary-only - Regenerate summary from existing log
|
||||
# ./run_all_tests.sh --timeout=60 - Run with 60 minute timeout
|
||||
################################################################################
|
||||
|
||||
set -e
|
||||
# Don't use set -e globally - it causes issues with grep returning 1 when no match
|
||||
# Instead, we handle errors explicitly where needed
|
||||
|
||||
################################################################################
|
||||
# PARSE COMMAND LINE ARGUMENTS
|
||||
################################################################################
|
||||
|
||||
SUMMARY_ONLY=false
|
||||
if [ "$1" = "--summary-only" ]; then
|
||||
SUMMARY_ONLY=true
|
||||
fi
|
||||
TIMEOUT_MINUTES=0 # 0 means no timeout
|
||||
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--summary-only)
|
||||
SUMMARY_ONLY=true
|
||||
;;
|
||||
--timeout=*)
|
||||
TIMEOUT_MINUTES="${arg#*=}"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
################################################################################
|
||||
# TERMINAL STYLING FUNCTIONS
|
||||
################################################################################
|
||||
|
||||
# Set terminal to "test mode" - blue background, special title
|
||||
# Set terminal to "test mode" - different colors for different phases
|
||||
set_terminal_style() {
|
||||
local phase="${1:-Running}"
|
||||
echo -ne "\033]0;OBP-API Tests ${phase}...\007" # Title
|
||||
echo -ne "\033]11;#001f3f\007" # Dark blue background
|
||||
echo -ne "\033]10;#ffffff\007" # White text
|
||||
# Print header bar
|
||||
|
||||
# Set different background colors for different phases
|
||||
case "$phase" in
|
||||
"Starting")
|
||||
echo -ne "\033]11;#4a4a4a\007" # Dark gray background
|
||||
echo -ne "\033]10;#ffffff\007" # White text
|
||||
;;
|
||||
"Building")
|
||||
echo -ne "\033]11;#ff6b35\007" # Orange background
|
||||
echo -ne "\033]10;#ffffff\007" # White text
|
||||
;;
|
||||
"Testing")
|
||||
echo -ne "\033]11;#001f3f\007" # Dark blue background
|
||||
echo -ne "\033]10;#ffffff\007" # White text
|
||||
;;
|
||||
"Complete")
|
||||
echo -ne "\033]11;#2ecc40\007" # Green background
|
||||
echo -ne "\033]10;#ffffff\007" # White text
|
||||
;;
|
||||
*)
|
||||
echo -ne "\033]11;#001f3f\007" # Default blue background
|
||||
echo -ne "\033]10;#ffffff\007" # White text
|
||||
;;
|
||||
esac
|
||||
|
||||
# Set window title
|
||||
echo -ne "\033]0;OBP-API Tests ${phase}...\007"
|
||||
|
||||
# Print header bar with phase-specific styling
|
||||
printf "\033[44m\033[1;37m%-$(tput cols)s\r OBP-API TEST RUNNER ACTIVE - ${phase} \n%-$(tput cols)s\033[0m\n" " " " "
|
||||
}
|
||||
|
||||
@ -91,8 +127,74 @@ DETAIL_LOG="${LOG_DIR}/last_run.log" # Full Maven output
|
||||
SUMMARY_LOG="${LOG_DIR}/last_run_summary.log" # Summary only
|
||||
FAILED_TESTS_FILE="${LOG_DIR}/failed_tests.txt" # Failed test list for run_specific_tests.sh
|
||||
|
||||
# Phase timing variables (stored in temporary file)
|
||||
PHASE_START_TIME=0
|
||||
|
||||
mkdir -p "${LOG_DIR}"
|
||||
|
||||
# Function to get current time in milliseconds
|
||||
get_time_ms() {
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# macOS
|
||||
python3 -c "import time; print(int(time.time() * 1000))"
|
||||
else
|
||||
# Linux
|
||||
date +%s%3N
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to record phase timing
|
||||
record_phase_time() {
|
||||
local phase="$1"
|
||||
local current_time=$(get_time_ms)
|
||||
local timing_file="${LOG_DIR}/phase_timing.tmp"
|
||||
|
||||
case "$phase" in
|
||||
"starting")
|
||||
echo "PHASE_START_TIME=$current_time" > "$timing_file"
|
||||
;;
|
||||
"building")
|
||||
if [ -f "$timing_file" ]; then
|
||||
local phase_start=$(grep "PHASE_START_TIME=" "$timing_file" | cut -d= -f2)
|
||||
if [ "$phase_start" -gt 0 ]; then
|
||||
local starting_time=$((current_time - phase_start))
|
||||
echo "STARTING_TIME=$starting_time" >> "$timing_file"
|
||||
fi
|
||||
fi
|
||||
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
|
||||
;;
|
||||
"testing")
|
||||
if [ -f "$timing_file" ]; then
|
||||
local phase_start=$(grep "PHASE_START_TIME=" "$timing_file" | tail -1 | cut -d= -f2)
|
||||
if [ "$phase_start" -gt 0 ]; then
|
||||
local building_time=$((current_time - phase_start))
|
||||
echo "BUILDING_TIME=$building_time" >> "$timing_file"
|
||||
fi
|
||||
fi
|
||||
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
|
||||
;;
|
||||
"complete")
|
||||
if [ -f "$timing_file" ]; then
|
||||
local phase_start=$(grep "PHASE_START_TIME=" "$timing_file" | tail -1 | cut -d= -f2)
|
||||
if [ "$phase_start" -gt 0 ]; then
|
||||
local testing_time=$((current_time - phase_start))
|
||||
echo "TESTING_TIME=$testing_time" >> "$timing_file"
|
||||
fi
|
||||
fi
|
||||
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
|
||||
;;
|
||||
"end")
|
||||
if [ -f "$timing_file" ]; then
|
||||
local phase_start=$(grep "PHASE_START_TIME=" "$timing_file" | tail -1 | cut -d= -f2)
|
||||
if [ "$phase_start" -gt 0 ]; then
|
||||
local complete_time=$((current_time - phase_start))
|
||||
echo "COMPLETE_TIME=$complete_time" >> "$timing_file"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# If summary-only mode, skip to summary generation
|
||||
if [ "$SUMMARY_ONLY" = true ]; then
|
||||
if [ ! -f "${DETAIL_LOG}" ]; then
|
||||
@ -130,16 +232,21 @@ fi
|
||||
rm -f "${LOG_DIR}/recent_lines.tmp"
|
||||
echo " - Removed stale temp file"
|
||||
fi
|
||||
if [ -f "${LOG_DIR}/phase_timing.tmp" ]; then
|
||||
rm -f "${LOG_DIR}/phase_timing.tmp"
|
||||
echo " - Removed stale timing file"
|
||||
fi
|
||||
fi # End of if [ "$SUMMARY_ONLY" = true ]
|
||||
|
||||
################################################################################
|
||||
# HELPER FUNCTIONS
|
||||
################################################################################
|
||||
|
||||
# Log message to terminal and summary file
|
||||
# Log message to terminal and both log files
|
||||
log_message() {
|
||||
echo "$1"
|
||||
echo "[$(date +"%Y-%m-%d %H:%M:%S")] $1" >> "${SUMMARY_LOG}"
|
||||
echo "$1" >> "${DETAIL_LOG}"
|
||||
}
|
||||
|
||||
# Print section header
|
||||
@ -231,8 +338,10 @@ generate_summary() {
|
||||
# If no timing info (summary-only mode), extract from log
|
||||
if [ $duration -eq 0 ] && grep -q "Total time:" "$detail_log"; then
|
||||
local time_str=$(grep "Total time:" "$detail_log" | tail -1)
|
||||
duration_min=$(echo "$time_str" | grep -oP '\d+(?= min)' || echo "0")
|
||||
duration_sec=$(echo "$time_str" | grep -oP '\d+(?=\.\d+ s)' || echo "0")
|
||||
duration_min=$(echo "$time_str" | sed 's/.*: //' | sed 's/ min.*//' | grep -o '[0-9]*' | head -1)
|
||||
[ -z "$duration_min" ] && duration_min="0"
|
||||
duration_sec=$(echo "$time_str" | sed 's/.* min //' | sed 's/\..*//' | grep -o '[0-9]*' | head -1)
|
||||
[ -z "$duration_sec" ] && duration_sec="0"
|
||||
fi
|
||||
|
||||
print_header "Test Results Summary"
|
||||
@ -244,22 +353,36 @@ generate_summary() {
|
||||
# Suites: completed M, aborted 0
|
||||
# Tests: succeeded N, failed 0, canceled 0, ignored 0, pending 0
|
||||
# All tests passed.
|
||||
# We need to extract the stats from the last test run (in case there are multiple modules)
|
||||
SCALATEST_SECTION=$(grep -A 4 "Run completed" "${detail_log}" | tail -5)
|
||||
if [ -n "$SCALATEST_SECTION" ]; then
|
||||
TOTAL_TESTS=$(echo "$SCALATEST_SECTION" | grep -oP "Total number of tests run: \K\d+" || echo "UNKNOWN")
|
||||
SUCCEEDED=$(echo "$SCALATEST_SECTION" | grep -oP "succeeded \K\d+" || echo "UNKNOWN")
|
||||
FAILED=$(echo "$SCALATEST_SECTION" | grep -oP "failed \K\d+" || echo "UNKNOWN")
|
||||
ERRORS=$(echo "$SCALATEST_SECTION" | grep -oP "errors \K\d+" || echo "0")
|
||||
SKIPPED=$(echo "$SCALATEST_SECTION" | grep -oP "ignored \K\d+" || echo "UNKNOWN")
|
||||
# We need to sum stats from ALL test runs (multiple modules: obp-commons, obp-api, etc.)
|
||||
|
||||
# Sum up all "Total number of tests run" values (macOS compatible - no grep -P)
|
||||
TOTAL_TESTS=$(grep "Total number of tests run:" "${detail_log}" 2>/dev/null | sed 's/.*Total number of tests run: //' | awk '{sum+=$1} END {print sum}' || echo "0")
|
||||
[ -z "$TOTAL_TESTS" ] || [ "$TOTAL_TESTS" = "0" ] && TOTAL_TESTS="UNKNOWN"
|
||||
|
||||
# Sum up all succeeded from "Tests: succeeded N, ..." lines
|
||||
SUCCEEDED=$(grep "Tests: succeeded" "${detail_log}" 2>/dev/null | sed 's/.*succeeded //' | sed 's/,.*//' | awk '{sum+=$1} END {print sum}' || echo "0")
|
||||
[ -z "$SUCCEEDED" ] && SUCCEEDED="UNKNOWN"
|
||||
|
||||
# Sum up all failed from "Tests: ... failed N, ..." lines
|
||||
FAILED=$(grep "Tests:.*failed" "${detail_log}" 2>/dev/null | sed 's/.*failed //' | sed 's/,.*//' | awk '{sum+=$1} END {print sum}' || echo "0")
|
||||
[ -z "$FAILED" ] && FAILED="0"
|
||||
|
||||
# Sum up all ignored from "Tests: ... ignored N, ..." lines
|
||||
IGNORED=$(grep "Tests:.*ignored" "${detail_log}" 2>/dev/null | sed 's/.*ignored //' | sed 's/,.*//' | awk '{sum+=$1} END {print sum}' || echo "0")
|
||||
[ -z "$IGNORED" ] && IGNORED="0"
|
||||
|
||||
# Sum up errors (if any)
|
||||
ERRORS=$(grep "errors" "${detail_log}" 2>/dev/null | grep -v "ERROR" | sed 's/.*errors //' | sed 's/[^0-9].*//' | awk '{sum+=$1} END {print sum}' || echo "0")
|
||||
[ -z "$ERRORS" ] && ERRORS="0"
|
||||
|
||||
# Calculate total including ignored (like IntelliJ does)
|
||||
if [ "$TOTAL_TESTS" != "UNKNOWN" ] && [ "$IGNORED" != "0" ]; then
|
||||
TOTAL_WITH_IGNORED=$((TOTAL_TESTS + IGNORED))
|
||||
else
|
||||
TOTAL_TESTS="UNKNOWN"
|
||||
SUCCEEDED="UNKNOWN"
|
||||
FAILED="UNKNOWN"
|
||||
ERRORS="0"
|
||||
SKIPPED="UNKNOWN"
|
||||
TOTAL_WITH_IGNORED="$TOTAL_TESTS"
|
||||
fi
|
||||
WARNINGS=$(grep -c "WARNING" "${detail_log}" || echo "UNKNOWN")
|
||||
|
||||
WARNINGS=$(grep -c "WARNING" "${detail_log}" 2>/dev/null || echo "0")
|
||||
|
||||
# Determine build status
|
||||
if grep -q "BUILD SUCCESS" "${detail_log}"; then
|
||||
@ -276,16 +399,153 @@ generate_summary() {
|
||||
# Print summary
|
||||
log_message "Test Run Summary"
|
||||
log_message "================"
|
||||
log_message "Timestamp: $(date)"
|
||||
log_message "Duration: ${duration_min}m ${duration_sec}s"
|
||||
|
||||
# Extract Maven timestamps and calculate Terminal timestamps
|
||||
local maven_start_timestamp=""
|
||||
local maven_end_timestamp=""
|
||||
local terminal_start_timestamp=""
|
||||
local terminal_end_timestamp=$(date)
|
||||
|
||||
if [ "$start_time" -gt 0 ] && [ "$end_time" -gt 0 ]; then
|
||||
# Use actual terminal start/end times if available
|
||||
terminal_start_timestamp=$(date -r "$start_time" 2>/dev/null || date -d "@$start_time" 2>/dev/null || echo "Unknown")
|
||||
terminal_end_timestamp=$(date -r "$end_time" 2>/dev/null || date -d "@$end_time" 2>/dev/null || echo "Unknown")
|
||||
else
|
||||
# Calculate terminal start time by subtracting duration from current time
|
||||
if [ "$duration_min" -gt 0 -o "$duration_sec" -gt 0 ]; then
|
||||
local total_seconds=$((duration_min * 60 + duration_sec))
|
||||
local approx_start_epoch=$(($(date "+%s") - total_seconds))
|
||||
terminal_start_timestamp=$(date -r "$approx_start_epoch" 2>/dev/null || echo "Approx. ${duration_min}m ${duration_sec}s ago")
|
||||
else
|
||||
terminal_start_timestamp="Unknown"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Extract Maven timestamps from log
|
||||
maven_end_timestamp=$(grep "Finished at:" "${detail_log}" | tail -1 | sed 's/.*Finished at: //' | sed 's/T/ /' | sed 's/+.*//' || echo "Unknown")
|
||||
|
||||
# Calculate Maven start time from Maven's "Total time" if available
|
||||
local maven_total_time=$(grep "Total time:" "${detail_log}" | tail -1 | sed 's/.*Total time: *//' | sed 's/ .*//' || echo "")
|
||||
if [ -n "$maven_total_time" ] && [ "$maven_end_timestamp" != "Unknown" ]; then
|
||||
# Parse Maven duration (e.g., "02:06" for "02:06 min" or "43.653" for "43.653 s")
|
||||
local maven_seconds=0
|
||||
if echo "$maven_total_time" | grep -q ":"; then
|
||||
# Format like "02:06" (minutes:seconds)
|
||||
local maven_min=$(echo "$maven_total_time" | sed 's/:.*//')
|
||||
local maven_sec=$(echo "$maven_total_time" | sed 's/.*://')
|
||||
# Remove leading zeros to avoid octal interpretation
|
||||
maven_min=$(echo "$maven_min" | sed 's/^0*//' | sed 's/^$/0/')
|
||||
maven_sec=$(echo "$maven_sec" | sed 's/^0*//' | sed 's/^$/0/')
|
||||
maven_seconds=$((maven_min * 60 + maven_sec))
|
||||
else
|
||||
# Format like "43.653" (seconds)
|
||||
maven_seconds=$(echo "$maven_total_time" | sed 's/\..*//')
|
||||
fi
|
||||
|
||||
# Calculate Maven start time
|
||||
if [ "$maven_seconds" -gt 0 ]; then
|
||||
local maven_end_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S" "$maven_end_timestamp" "+%s" 2>/dev/null || echo "0")
|
||||
if [ "$maven_end_epoch" -gt 0 ]; then
|
||||
local maven_start_epoch=$((maven_end_epoch - maven_seconds))
|
||||
maven_start_timestamp=$(date -r "$maven_start_epoch" 2>/dev/null || echo "Unknown")
|
||||
else
|
||||
maven_start_timestamp="Unknown"
|
||||
fi
|
||||
else
|
||||
maven_start_timestamp="Unknown"
|
||||
fi
|
||||
else
|
||||
maven_start_timestamp="Unknown"
|
||||
fi
|
||||
|
||||
# Format Maven end timestamp nicely
|
||||
if [ "$maven_end_timestamp" != "Unknown" ]; then
|
||||
maven_end_timestamp=$(date -j -f "%Y-%m-%d %H:%M:%S" "$maven_end_timestamp" "+%a %b %d %H:%M:%S %Z %Y" 2>/dev/null || echo "$maven_end_timestamp")
|
||||
fi
|
||||
|
||||
# Display both timelines
|
||||
log_message "Terminal Timeline:"
|
||||
log_message " Started: ${terminal_start_timestamp}"
|
||||
log_message " Completed: ${terminal_end_timestamp}"
|
||||
log_message " Duration: ${duration_min}m ${duration_sec}s"
|
||||
log_message ""
|
||||
log_message "Maven Timeline:"
|
||||
log_message " Started: ${maven_start_timestamp}"
|
||||
log_message " Completed: ${maven_end_timestamp}"
|
||||
if [ -n "$maven_total_time" ]; then
|
||||
local maven_duration_display=$(grep "Total time:" "${detail_log}" | tail -1 | sed 's/.*Total time: *//' || echo "Unknown")
|
||||
log_message " Duration: ${maven_duration_display}"
|
||||
fi
|
||||
log_message ""
|
||||
log_message "Build Status: ${BUILD_STATUS}"
|
||||
log_message ""
|
||||
|
||||
# Phase timing breakdown (if available)
|
||||
local timing_file="${LOG_DIR}/phase_timing.tmp"
|
||||
if [ -f "$timing_file" ]; then
|
||||
# Read timing values from file
|
||||
local start_ms=$(grep "STARTING_TIME=" "$timing_file" | cut -d= -f2 2>/dev/null || echo "0")
|
||||
local build_ms=$(grep "BUILDING_TIME=" "$timing_file" | cut -d= -f2 2>/dev/null || echo "0")
|
||||
local test_ms=$(grep "TESTING_TIME=" "$timing_file" | cut -d= -f2 2>/dev/null || echo "0")
|
||||
local complete_ms=$(grep "COMPLETE_TIME=" "$timing_file" | cut -d= -f2 2>/dev/null || echo "0")
|
||||
|
||||
# Ensure we have numeric values (default to 0 if empty)
|
||||
[ -z "$start_ms" ] && start_ms=0
|
||||
[ -z "$build_ms" ] && build_ms=0
|
||||
[ -z "$test_ms" ] && test_ms=0
|
||||
[ -z "$complete_ms" ] && complete_ms=0
|
||||
|
||||
# Clean up timing file
|
||||
rm -f "$timing_file"
|
||||
|
||||
if [ "$start_ms" -gt 0 ] 2>/dev/null || [ "$build_ms" -gt 0 ] 2>/dev/null || [ "$test_ms" -gt 0 ] 2>/dev/null || [ "$complete_ms" -gt 0 ] 2>/dev/null; then
|
||||
log_message "Phase Timing Breakdown:"
|
||||
|
||||
if [ "$start_ms" -gt 0 ] 2>/dev/null; then
|
||||
log_message " Starting: ${start_ms}ms ($(printf "%.2f" $(echo "scale=2; $start_ms/1000" | bc))s)"
|
||||
fi
|
||||
if [ "$build_ms" -gt 0 ] 2>/dev/null; then
|
||||
log_message " Building: ${build_ms}ms ($(printf "%.2f" $(echo "scale=2; $build_ms/1000" | bc))s)"
|
||||
fi
|
||||
if [ "$test_ms" -gt 0 ] 2>/dev/null; then
|
||||
log_message " Testing: ${test_ms}ms ($(printf "%.2f" $(echo "scale=2; $test_ms/1000" | bc))s)"
|
||||
fi
|
||||
if [ "$complete_ms" -gt 0 ] 2>/dev/null; then
|
||||
log_message " Complete: ${complete_ms}ms ($(printf "%.2f" $(echo "scale=2; $complete_ms/1000" | bc))s)"
|
||||
fi
|
||||
|
||||
# Calculate percentages
|
||||
local total_phase_time=$((start_ms + build_ms + test_ms + complete_ms))
|
||||
if [ "$total_phase_time" -gt 0 ]; then
|
||||
log_message ""
|
||||
log_message "Phase Distribution:"
|
||||
if [ "$start_ms" -gt 0 ] 2>/dev/null; then
|
||||
local starting_pct=$(echo "scale=1; $start_ms * 100 / $total_phase_time" | bc)
|
||||
log_message " Starting: ${starting_pct}%"
|
||||
fi
|
||||
if [ "$build_ms" -gt 0 ] 2>/dev/null; then
|
||||
local building_pct=$(echo "scale=1; $build_ms * 100 / $total_phase_time" | bc)
|
||||
log_message " Building: ${building_pct}%"
|
||||
fi
|
||||
if [ "$test_ms" -gt 0 ] 2>/dev/null; then
|
||||
local testing_pct=$(echo "scale=1; $test_ms * 100 / $total_phase_time" | bc)
|
||||
log_message " Testing: ${testing_pct}%"
|
||||
fi
|
||||
if [ "$complete_ms" -gt 0 ] 2>/dev/null; then
|
||||
local complete_pct=$(echo "scale=1; $complete_ms * 100 / $total_phase_time" | bc)
|
||||
log_message " Complete: ${complete_pct}%"
|
||||
fi
|
||||
fi
|
||||
log_message ""
|
||||
fi
|
||||
fi
|
||||
|
||||
log_message "Test Statistics:"
|
||||
log_message " Total: ${TOTAL_TESTS}"
|
||||
log_message " Total: ${TOTAL_WITH_IGNORED} (${TOTAL_TESTS} run + ${IGNORED} ignored)"
|
||||
log_message " Succeeded: ${SUCCEEDED}"
|
||||
log_message " Failed: ${FAILED}"
|
||||
log_message " Ignored: ${IGNORED}"
|
||||
log_message " Errors: ${ERRORS}"
|
||||
log_message " Skipped: ${SKIPPED}"
|
||||
log_message " Warnings: ${WARNINGS}"
|
||||
log_message ""
|
||||
|
||||
@ -320,7 +580,7 @@ generate_summary() {
|
||||
|
||||
# Extract test class names from failures
|
||||
grep -B 20 "\*\*\* FAILED \*\*\*" "${detail_log}" | \
|
||||
grep -oP "^[A-Z][a-zA-Z0-9_]+(?=:)" | \
|
||||
grep -E "^[A-Z][a-zA-Z0-9_]+:" | sed 's/:$//' | \
|
||||
sort -u | \
|
||||
while read test_class; do
|
||||
# Try to find package by searching for the class in test files
|
||||
@ -375,6 +635,8 @@ fi
|
||||
# START TEST RUN
|
||||
################################################################################
|
||||
|
||||
# Record starting phase
|
||||
record_phase_time "starting"
|
||||
set_terminal_style "Starting"
|
||||
|
||||
# Start the test run
|
||||
@ -414,24 +676,67 @@ fi
|
||||
################################################################################
|
||||
|
||||
print_header "Checking Test Server Ports"
|
||||
log_message "Checking if test server port 8018 is available..."
|
||||
|
||||
# Check if port 8018 is in use
|
||||
if lsof -i :8018 >/dev/null 2>&1; then
|
||||
log_message "[WARNING] Port 8018 is in use - attempting to kill process"
|
||||
# Try to kill the process using the port
|
||||
PORT_PID=$(lsof -t -i :8018 2>/dev/null)
|
||||
# Default test port (can be overridden)
|
||||
TEST_PORT=8018
|
||||
MAX_PORT_ATTEMPTS=5
|
||||
|
||||
log_message "Checking if test server port ${TEST_PORT} is available..."
|
||||
|
||||
# Function to find an available port
|
||||
find_available_port() {
|
||||
local port=$1
|
||||
local max_attempts=$2
|
||||
local attempt=0
|
||||
|
||||
while [ $attempt -lt $max_attempts ]; do
|
||||
if ! lsof -i :$port >/dev/null 2>&1; then
|
||||
echo $port
|
||||
return 0
|
||||
fi
|
||||
port=$((port + 1))
|
||||
attempt=$((attempt + 1))
|
||||
done
|
||||
|
||||
echo ""
|
||||
return 1
|
||||
}
|
||||
|
||||
# Check if port is in use
|
||||
if lsof -i :${TEST_PORT} >/dev/null 2>&1; then
|
||||
log_message "[WARNING] Port ${TEST_PORT} is in use - attempting to kill process"
|
||||
PORT_PID=$(lsof -t -i :${TEST_PORT} 2>/dev/null || true)
|
||||
if [ -n "$PORT_PID" ]; then
|
||||
kill -9 $PORT_PID 2>/dev/null || true
|
||||
sleep 2
|
||||
log_message "[OK] Killed process $PORT_PID using port 8018"
|
||||
|
||||
# Verify port is now free
|
||||
if lsof -i :${TEST_PORT} >/dev/null 2>&1; then
|
||||
log_message "[WARNING] Could not free port ${TEST_PORT}, searching for alternative..."
|
||||
NEW_PORT=$(find_available_port $((TEST_PORT + 1)) $MAX_PORT_ATTEMPTS)
|
||||
if [ -n "$NEW_PORT" ]; then
|
||||
log_message "[OK] Found available port: ${NEW_PORT}"
|
||||
# Update test.default.props with new port
|
||||
if [ -f "${PROPS_FILE}" ]; then
|
||||
sed -i.bak "s/hostname=127.0.0.1:${TEST_PORT}/hostname=127.0.0.1:${NEW_PORT}/" "${PROPS_FILE}" 2>/dev/null || \
|
||||
sed -i '' "s/hostname=127.0.0.1:${TEST_PORT}/hostname=127.0.0.1:${NEW_PORT}/" "${PROPS_FILE}"
|
||||
log_message "[OK] Updated test.default.props to use port ${NEW_PORT}"
|
||||
TEST_PORT=$NEW_PORT
|
||||
fi
|
||||
else
|
||||
log_message "[ERROR] No available ports found in range ${TEST_PORT}-$((TEST_PORT + MAX_PORT_ATTEMPTS))"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
log_message "[OK] Killed process $PORT_PID, port ${TEST_PORT} is now available"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
log_message "[OK] Port 8018 is available"
|
||||
log_message "[OK] Port ${TEST_PORT} is available"
|
||||
fi
|
||||
|
||||
# Also check for any stale Java test processes
|
||||
STALE_TEST_PROCS=$(ps aux | grep -E "TestServer|ScalaTest.*obp-api" | grep -v grep | awk '{print $2}' || true)
|
||||
STALE_TEST_PROCS=$(ps aux | grep -E "TestServer|ScalaTest.*obp-api" | grep -v grep | awk '{print $2}' 2>/dev/null || true)
|
||||
if [ -n "$STALE_TEST_PROCS" ]; then
|
||||
log_message "[WARNING] Found stale test processes - cleaning up"
|
||||
echo "$STALE_TEST_PROCS" | xargs kill -9 2>/dev/null || true
|
||||
@ -481,7 +786,6 @@ log_message ""
|
||||
################################################################################
|
||||
|
||||
print_header "Running Tests"
|
||||
update_terminal_title "Building"
|
||||
log_message "Executing: mvn clean test"
|
||||
echo ""
|
||||
|
||||
@ -492,6 +796,13 @@ export START_TIME
|
||||
MONITOR_FLAG="${LOG_DIR}/monitor.flag"
|
||||
touch "${MONITOR_FLAG}"
|
||||
|
||||
# Optional timeout handling
|
||||
MAVEN_PID=""
|
||||
if [ "$TIMEOUT_MINUTES" -gt 0 ] 2>/dev/null; then
|
||||
log_message "[INFO] Test timeout set to ${TIMEOUT_MINUTES} minutes"
|
||||
TIMEOUT_SECONDS=$((TIMEOUT_MINUTES * 60))
|
||||
fi
|
||||
|
||||
# Background process: Monitor log file and update title bar with progress
|
||||
(
|
||||
# Wait for log file to be created and have Maven output
|
||||
@ -500,35 +811,50 @@ touch "${MONITOR_FLAG}"
|
||||
done
|
||||
|
||||
phase="Building"
|
||||
in_building=false
|
||||
in_testing=false
|
||||
timing_file="${LOG_DIR}/phase_timing.tmp"
|
||||
|
||||
# Keep monitoring until flag file is removed
|
||||
while [ -f "${MONITOR_FLAG}" ]; do
|
||||
# Use tail to look at recent lines only (last 500 lines for performance)
|
||||
# This ensures O(1) performance regardless of log file size
|
||||
recent_lines=$(tail -n 500 "${DETAIL_LOG}" 2>/dev/null)
|
||||
recent_lines=$(tail -n 500 "${DETAIL_LOG}" 2>/dev/null || true)
|
||||
|
||||
# Switch to "Building" phase when Maven starts compiling
|
||||
if ! $in_building && echo "$recent_lines" | grep -q -E 'Compiling|Building.*Open Bank Project' 2>/dev/null; then
|
||||
phase="Building"
|
||||
in_building=true
|
||||
# Record building phase and update terminal (inline to avoid subshell issues)
|
||||
current_time=$(python3 -c "import time; print(int(time.time() * 1000))" 2>/dev/null || date +%s000)
|
||||
if [ -f "$timing_file" ]; then
|
||||
phase_start=$(grep "PHASE_START_TIME=" "$timing_file" 2>/dev/null | tail -1 | cut -d= -f2 || echo "0")
|
||||
[ -n "$phase_start" ] && [ "$phase_start" -gt 0 ] 2>/dev/null && echo "STARTING_TIME=$((current_time - phase_start))" >> "$timing_file"
|
||||
fi
|
||||
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
|
||||
echo -ne "\033]11;#ff6b35\007\033]10;#ffffff\007" # Orange background
|
||||
fi
|
||||
|
||||
# Switch to "Testing" phase when tests start
|
||||
if ! $in_testing && echo "$recent_lines" | grep -q "Run starting" 2>/dev/null; then
|
||||
phase="Testing"
|
||||
in_testing=true
|
||||
# Record testing phase
|
||||
current_time=$(python3 -c "import time; print(int(time.time() * 1000))" 2>/dev/null || date +%s000)
|
||||
if [ -f "$timing_file" ]; then
|
||||
phase_start=$(grep "PHASE_START_TIME=" "$timing_file" 2>/dev/null | tail -1 | cut -d= -f2 || echo "0")
|
||||
[ -n "$phase_start" ] && [ "$phase_start" -gt 0 ] 2>/dev/null && echo "BUILDING_TIME=$((current_time - phase_start))" >> "$timing_file"
|
||||
fi
|
||||
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
|
||||
echo -ne "\033]11;#001f3f\007\033]10;#ffffff\007" # Blue background
|
||||
fi
|
||||
|
||||
# Extract current running test suite and scenario from recent lines
|
||||
suite=""
|
||||
scenario=""
|
||||
if $in_testing; then
|
||||
# Find the most recent test suite name (pattern like "SomeTest:")
|
||||
# Pipe directly to avoid temp file I/O
|
||||
suite=$(echo "$recent_lines" | grep -E "Test:" | tail -1 | sed 's/\x1b\[[0-9;]*m//g' | sed 's/:$//' | tr -d '\n\r')
|
||||
|
||||
# Find the most recent scenario name (pattern like " Scenario: ..." or "- Scenario: ...")
|
||||
scenario=$(echo "$recent_lines" | grep -i "scenario:" | tail -1 | sed 's/\x1b\[[0-9;]*m//g' | sed 's/^[[:space:]]*-*[[:space:]]*//' | sed -E 's/^[Ss]cenario:[[:space:]]*//' | tr -d '\n\r')
|
||||
|
||||
# Truncate scenario if too long (max 50 chars)
|
||||
if [ -n "$scenario" ] && [ ${#scenario} -gt 50 ]; then
|
||||
scenario="${scenario:0:47}..."
|
||||
fi
|
||||
suite=$(echo "$recent_lines" | grep -E "Test:" 2>/dev/null | tail -1 | sed 's/\x1b\[[0-9;]*m//g' | sed 's/:$//' | tr -d '\n\r' || true)
|
||||
scenario=$(echo "$recent_lines" | grep -i "scenario:" 2>/dev/null | tail -1 | sed 's/\x1b\[[0-9;]*m//g' | sed 's/^[[:space:]]*-*[[:space:]]*//' | sed -E 's/^[Ss]cenario:[[:space:]]*//' | tr -d '\n\r' || true)
|
||||
[ -n "$scenario" ] && [ ${#scenario} -gt 50 ] && scenario="${scenario:0:47}..."
|
||||
fi
|
||||
|
||||
# Calculate elapsed time
|
||||
@ -537,23 +863,102 @@ touch "${MONITOR_FLAG}"
|
||||
seconds=$((duration % 60))
|
||||
elapsed=$(printf "%dm %ds" $minutes $seconds)
|
||||
|
||||
# Update title: "Testing: DynamicEntityTest - Scenario name [5m 23s]"
|
||||
update_terminal_title "$phase" "$elapsed" "" "$suite" "$scenario"
|
||||
# Update title
|
||||
title="OBP-API ${phase}"
|
||||
[ -n "$suite" ] && title="${title}: ${suite}"
|
||||
[ -n "$scenario" ] && title="${title} - ${scenario}"
|
||||
title="${title}... [${elapsed}]"
|
||||
echo -ne "\033]0;${title}\007"
|
||||
|
||||
sleep 5
|
||||
done
|
||||
) &
|
||||
MONITOR_PID=$!
|
||||
|
||||
# Run Maven (all output goes to terminal AND log file)
|
||||
if mvn clean test 2>&1 | tee "${DETAIL_LOG}"; then
|
||||
TEST_RESULT="SUCCESS"
|
||||
RESULT_COLOR=""
|
||||
# Run Maven with optional timeout
|
||||
if [ "$TIMEOUT_MINUTES" -gt 0 ] 2>/dev/null; then
|
||||
# Run Maven in background and monitor for timeout
|
||||
mvn clean test 2>&1 | tee "${DETAIL_LOG}" &
|
||||
MAVEN_PID=$!
|
||||
|
||||
elapsed=0
|
||||
while kill -0 $MAVEN_PID 2>/dev/null; do
|
||||
sleep 10
|
||||
elapsed=$((elapsed + 10))
|
||||
if [ $elapsed -ge $TIMEOUT_SECONDS ]; then
|
||||
log_message ""
|
||||
log_message "[TIMEOUT] Test execution exceeded ${TIMEOUT_MINUTES} minutes - terminating"
|
||||
kill -9 $MAVEN_PID 2>/dev/null || true
|
||||
# Also kill any child Java processes
|
||||
pkill -9 -P $MAVEN_PID 2>/dev/null || true
|
||||
TEST_RESULT="TIMEOUT"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$TEST_RESULT" != "TIMEOUT" ]; then
|
||||
wait $MAVEN_PID
|
||||
if [ $? -eq 0 ]; then
|
||||
TEST_RESULT="SUCCESS"
|
||||
else
|
||||
TEST_RESULT="FAILURE"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
TEST_RESULT="FAILURE"
|
||||
RESULT_COLOR=""
|
||||
# Run Maven normally (all output goes to terminal AND log file)
|
||||
if mvn clean test 2>&1 | tee "${DETAIL_LOG}"; then
|
||||
TEST_RESULT="SUCCESS"
|
||||
else
|
||||
TEST_RESULT="FAILURE"
|
||||
fi
|
||||
fi
|
||||
|
||||
################################################################################
|
||||
# GENERATE HTML REPORT
|
||||
################################################################################
|
||||
|
||||
print_header "Generating HTML Report"
|
||||
log_message "Running: mvn surefire-report:report-only -DskipTests"
|
||||
|
||||
# Generate HTML report from surefire XML files (without re-running tests)
|
||||
if mvn surefire-report:report-only -DskipTests 2>&1 | tee -a "${DETAIL_LOG}"; then
|
||||
log_message "[OK] HTML report generated"
|
||||
|
||||
# Copy HTML reports to test-results directory for easy access
|
||||
HTML_REPORT_DIR="${LOG_DIR}/html-reports"
|
||||
mkdir -p "${HTML_REPORT_DIR}"
|
||||
|
||||
# Copy reports from both modules
|
||||
if [ -f "obp-api/target/surefire-reports/surefire.html" ]; then
|
||||
cp "obp-api/target/surefire-reports/surefire.html" "${HTML_REPORT_DIR}/obp-api-report.html"
|
||||
# Also copy CSS, JS, images for proper rendering
|
||||
cp -r "obp-api/target/surefire-reports/css" "${HTML_REPORT_DIR}/" 2>/dev/null || true
|
||||
cp -r "obp-api/target/surefire-reports/js" "${HTML_REPORT_DIR}/" 2>/dev/null || true
|
||||
cp -r "obp-api/target/surefire-reports/images" "${HTML_REPORT_DIR}/" 2>/dev/null || true
|
||||
cp -r "obp-api/target/surefire-reports/fonts" "${HTML_REPORT_DIR}/" 2>/dev/null || true
|
||||
cp -r "obp-api/target/surefire-reports/img" "${HTML_REPORT_DIR}/" 2>/dev/null || true
|
||||
log_message " - obp-api report: ${HTML_REPORT_DIR}/obp-api-report.html"
|
||||
fi
|
||||
if [ -f "obp-commons/target/surefire-reports/surefire.html" ]; then
|
||||
cp "obp-commons/target/surefire-reports/surefire.html" "${HTML_REPORT_DIR}/obp-commons-report.html"
|
||||
log_message " - obp-commons report: ${HTML_REPORT_DIR}/obp-commons-report.html"
|
||||
fi
|
||||
|
||||
# Also check for site reports location (alternative naming)
|
||||
if [ -f "obp-api/target/site/surefire-report.html" ]; then
|
||||
cp "obp-api/target/site/surefire-report.html" "${HTML_REPORT_DIR}/obp-api-report.html"
|
||||
log_message " - obp-api report: ${HTML_REPORT_DIR}/obp-api-report.html"
|
||||
fi
|
||||
if [ -f "obp-commons/target/site/surefire-report.html" ]; then
|
||||
cp "obp-commons/target/site/surefire-report.html" "${HTML_REPORT_DIR}/obp-commons-report.html"
|
||||
log_message " - obp-commons report: ${HTML_REPORT_DIR}/obp-commons-report.html"
|
||||
fi
|
||||
else
|
||||
log_message "[WARNING] Failed to generate HTML report"
|
||||
fi
|
||||
|
||||
log_message ""
|
||||
|
||||
# Stop background monitor by removing flag file
|
||||
rm -f "${MONITOR_FLAG}"
|
||||
sleep 1
|
||||
@ -568,11 +973,15 @@ DURATION_SEC=$((DURATION % 60))
|
||||
# Update title with final results (no suite/scenario name for Complete phase)
|
||||
FINAL_ELAPSED=$(printf "%dm %ds" $DURATION_MIN $DURATION_SEC)
|
||||
# Build final counts with module context
|
||||
FINAL_COMMONS=$(sed -n '/Building Open Bank Project Commons/,/Building Open Bank Project API/{/Tests: succeeded/p;}' "${DETAIL_LOG}" 2>/dev/null | grep -oP "succeeded \K\d+" | head -1)
|
||||
FINAL_API=$(sed -n '/Building Open Bank Project API/,/OBP Http4s Runner/{/Tests: succeeded/p;}' "${DETAIL_LOG}" 2>/dev/null | grep -oP "succeeded \K\d+" | tail -1)
|
||||
FINAL_COMMONS=$(sed -n '/Building Open Bank Project Commons/,/Building Open Bank Project API/{/Tests: succeeded/p;}' "${DETAIL_LOG}" 2>/dev/null | sed 's/.*succeeded //' | sed 's/,.*//' | head -1)
|
||||
FINAL_API=$(sed -n '/Building Open Bank Project API/,/OBP Http4s Runner/{/Tests: succeeded/p;}' "${DETAIL_LOG}" 2>/dev/null | sed 's/.*succeeded //' | sed 's/,.*//' | tail -1)
|
||||
FINAL_COUNTS=""
|
||||
[ -n "$FINAL_COMMONS" ] && FINAL_COUNTS="commons:+${FINAL_COMMONS}"
|
||||
[ -n "$FINAL_API" ] && FINAL_COUNTS="${FINAL_COUNTS:+${FINAL_COUNTS} }api:+${FINAL_API}"
|
||||
|
||||
# Record complete phase start and change to green for completion phase
|
||||
record_phase_time "complete"
|
||||
set_terminal_style "Complete"
|
||||
update_terminal_title "Complete" "$FINAL_ELAPSED" "$FINAL_COUNTS" "" ""
|
||||
|
||||
################################################################################
|
||||
@ -585,6 +994,9 @@ else
|
||||
EXIT_CODE=1
|
||||
fi
|
||||
|
||||
# Record end time for complete phase
|
||||
record_phase_time "end"
|
||||
|
||||
log_message ""
|
||||
log_message "Logs saved to:"
|
||||
log_message " ${DETAIL_LOG}"
|
||||
@ -592,6 +1004,13 @@ log_message " ${SUMMARY_LOG}"
|
||||
if [ -f "${FAILED_TESTS_FILE}" ]; then
|
||||
log_message " ${FAILED_TESTS_FILE}"
|
||||
fi
|
||||
if [ -d "${LOG_DIR}/html-reports" ]; then
|
||||
log_message ""
|
||||
log_message "HTML Reports:"
|
||||
for report in "${LOG_DIR}/html-reports"/*.html; do
|
||||
[ -f "$report" ] && log_message " $report"
|
||||
done
|
||||
fi
|
||||
echo ""
|
||||
|
||||
exit ${EXIT_CODE}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user