Merge remote-tracking branch 'upstream/develop' into develop

This commit is contained in:
Marko Milić 2026-01-13 09:02:12 +01:00
commit 8c3f2e2671
29 changed files with 1736 additions and 337 deletions

View File

@ -41,6 +41,7 @@ jobs:
cache: maven
- name: Build with Maven
run: |
set -o pipefail
cp obp-api/src/main/resources/props/sample.props.template obp-api/src/main/resources/props/production.default.props
echo connector=star > obp-api/src/main/resources/props/test.default.props
echo starConnector_supported_types=mapped,internal >> obp-api/src/main/resources/props/test.default.props
@ -76,7 +77,44 @@ jobs:
echo ResetPasswordUrlEnabled=true >> obp-api/src/main/resources/props/test.default.props
echo consents.allowed=true >> obp-api/src/main/resources/props/test.default.props
MAVEN_OPTS="-Xmx3G -Xss2m" mvn clean package -Pprod
MAVEN_OPTS="-Xmx3G -Xss2m" mvn clean package -Pprod 2>&1 | tee maven-build.log
- name: Report failing tests (if any)
if: always()
run: |
echo "Checking build log for failing tests via grep..."
if [ ! -f maven-build.log ]; then
echo "No maven-build.log found; skipping failure scan."
exit 0
fi
if grep -n "\*\*\* FAILED \*\*\*" maven-build.log; then
echo "Failing tests detected above."
exit 1
else
echo "No failing tests detected in maven-build.log."
fi
- name: Upload Maven build log
if: always()
uses: actions/upload-artifact@v4
with:
name: maven-build-log
if-no-files-found: ignore
path: |
maven-build.log
- name: Upload test reports
if: always()
uses: actions/upload-artifact@v4
with:
name: test-reports
if-no-files-found: ignore
path: |
obp-api/target/surefire-reports/**
obp-commons/target/surefire-reports/**
**/target/scalatest-reports/**
**/target/site/surefire-report.html
**/target/site/surefire-report/*
- name: Save .war artifact
run: |
@ -95,7 +133,7 @@ jobs:
docker push docker.io/${{ env.DOCKER_HUB_ORGANIZATION }}/${{ env.DOCKER_HUB_REPOSITORY }} --all-tags
echo docker done
- uses: sigstore/cosign-installer@main
- uses: sigstore/cosign-installer@4d14d7f17e7112af04ea6108fbb4bfc714c00390
- name: Write signing key to disk (only needed for `cosign sign --key`)
run: echo "${{ secrets.COSIGN_PRIVATE_KEY }}" > cosign.key
@ -116,4 +154,3 @@ jobs:
COSIGN_PASSWORD: "${{secrets.COSIGN_PASSWORD}}"

View File

@ -3,7 +3,7 @@ name: Build and publish container non develop
on:
push:
branches:
- '*'
- '**'
- '!develop'
env:
@ -40,6 +40,7 @@ jobs:
cache: maven
- name: Build with Maven
run: |
set -o pipefail
cp obp-api/src/main/resources/props/sample.props.template obp-api/src/main/resources/props/production.default.props
echo connector=star > obp-api/src/main/resources/props/test.default.props
echo starConnector_supported_types=mapped,internal >> obp-api/src/main/resources/props/test.default.props
@ -75,7 +76,44 @@ jobs:
echo ResetPasswordUrlEnabled=true >> obp-api/src/main/resources/props/test.default.props
echo consents.allowed=true >> obp-api/src/main/resources/props/test.default.props
MAVEN_OPTS="-Xmx3G -Xss2m" mvn clean package -Pprod
MAVEN_OPTS="-Xmx3G -Xss2m" mvn clean package -Pprod 2>&1 | tee maven-build.log
- name: Report failing tests (if any)
if: always()
run: |
echo "Checking build log for failing tests via grep..."
if [ ! -f maven-build.log ]; then
echo "No maven-build.log found; skipping failure scan."
exit 0
fi
if grep -n "\*\*\* FAILED \*\*\*" maven-build.log; then
echo "Failing tests detected above."
exit 1
else
echo "No failing tests detected in maven-build.log."
fi
- name: Upload Maven build log
if: always()
uses: actions/upload-artifact@v4
with:
name: maven-build-log
if-no-files-found: ignore
path: |
maven-build.log
- name: Upload test reports
if: always()
uses: actions/upload-artifact@v4
with:
name: test-reports
if-no-files-found: ignore
path: |
obp-api/target/surefire-reports/**
obp-commons/target/surefire-reports/**
**/target/scalatest-reports/**
**/target/site/surefire-report.html
**/target/site/surefire-report/*
- name: Save .war artifact
run: |
@ -94,7 +132,7 @@ jobs:
docker push docker.io/${{ env.DOCKER_HUB_ORGANIZATION }}/${{ env.DOCKER_HUB_REPOSITORY }} --all-tags
echo docker done
- uses: sigstore/cosign-installer@main
- uses: sigstore/cosign-installer@4d14d7f17e7112af04ea6108fbb4bfc714c00390
- name: Write signing key to disk (only needed for `cosign sign --key`)
run: echo "${{ secrets.COSIGN_PRIVATE_KEY }}" > cosign.key
@ -111,4 +149,3 @@ jobs:
COSIGN_PASSWORD: "${{secrets.COSIGN_PASSWORD}}"

View File

@ -36,6 +36,7 @@ jobs:
cache: maven
- name: Build with Maven
run: |
set -o pipefail
cp obp-api/src/main/resources/props/sample.props.template obp-api/src/main/resources/props/production.default.props
echo connector=star > obp-api/src/main/resources/props/test.default.props
echo starConnector_supported_types=mapped,internal >> obp-api/src/main/resources/props/test.default.props
@ -65,14 +66,50 @@ jobs:
echo COUNTERPARTY_OTP_INSTRUCTION_TRANSPORT=dummy >> obp-api/src/main/resources/props/test.default.props
echo SEPA_CREDIT_TRANSFERS_OTP_INSTRUCTION_TRANSPORT=dummy >> obp-api/src/main/resources/props/test.default.props
echo allow_oauth2_login=true >> obp-api/src/main/resources/props/test.default.props
echo oauth2.jwk_set.url=https://www.googleapis.com/oauth2/v3/certs >> obp-api/src/main/resources/props/test.default.props
echo ResetPasswordUrlEnabled=true >> obp-api/src/main/resources/props/test.default.props
echo consents.allowed=true >> obp-api/src/main/resources/props/test.default.props
MAVEN_OPTS="-Xmx3G -Xss2m" mvn clean package -Pprod
MAVEN_OPTS="-Xmx3G -Xss2m" mvn clean package -Pprod 2>&1 | tee maven-build.log
- name: Report failing tests (if any)
if: always()
run: |
echo "Checking build log for failing tests via grep..."
if [ ! -f maven-build.log ]; then
echo "No maven-build.log found; skipping failure scan."
exit 0
fi
if grep -n "\*\*\* FAILED \*\*\*" maven-build.log; then
echo "Failing tests detected above."
exit 1
else
echo "No failing tests detected in maven-build.log."
fi
- name: Upload Maven build log
if: always()
uses: actions/upload-artifact@v4
with:
name: maven-build-log
if-no-files-found: ignore
path: |
maven-build.log
- name: Upload test reports
if: always()
uses: actions/upload-artifact@v4
with:
name: test-reports
if-no-files-found: ignore
path: |
obp-api/target/surefire-reports/**
obp-commons/target/surefire-reports/**
**/target/scalatest-reports/**
**/target/site/surefire-report.html
**/target/site/surefire-report/*
- name: Save .war artifact
run: |

3
.gitignore vendored
View File

@ -1,3 +1,4 @@
.github/*
*.class
*.db
.DS_Store
@ -11,6 +12,7 @@
*.code-workspace
.zed
.cursor
.trae
.classpath
.project
.cache
@ -42,3 +44,4 @@ project/project
coursier
metals.sbt
obp-http4s-runner/src/main/resources/git.properties
test-results

View File

@ -76,7 +76,7 @@ MAVEN_OPTS="-Xms3G -Xmx6G -XX:MaxMetaspaceSize=2G" mvn -pl obp-http4s-runner -am
java -jar obp-http4s-runner/target/obp-http4s-runner.jar
```
The http4s server binds to `http4s.host` / `http4s.port` as configured in your props file (defaults are `127.0.0.1` and `8181`).
The http4s server binds to `http4s.host` / `http4s.port` as configured in your props file (defaults are `127.0.0.1` and `8086`).
### ZED IDE Setup

View File

@ -1,10 +1,13 @@
#!/bin/bash
# Script to flush Redis, build the project, and run Jetty
# Script to flush Redis, build the project, and run both Jetty and http4s servers
#
# This script should be run from the OBP-API root directory:
# cd /path/to/OBP-API
# ./flushall_build_and_run.sh
#
# The http4s server will run in the background on port 8081
# The Jetty server will run in the foreground on port 8080
set -e # Exit on error
@ -27,4 +30,29 @@ echo "=========================================="
echo "Building and running with Maven..."
echo "=========================================="
export MAVEN_OPTS="-Xss128m --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/sun.reflect.generics.reflectiveObjects=ALL-UNNAMED"
mvn install -pl .,obp-commons && mvn jetty:run -pl obp-api
mvn install -pl .,obp-commons
echo ""
echo "=========================================="
echo "Building http4s runner..."
echo "=========================================="
export MAVEN_OPTS="-Xms3G -Xmx6G -XX:MaxMetaspaceSize=2G"
mvn -pl obp-http4s-runner -am clean package -DskipTests=true -Dmaven.test.skip=true
echo ""
echo "=========================================="
echo "Starting http4s server in background..."
echo "=========================================="
java -jar obp-http4s-runner/target/obp-http4s-runner.jar > http4s-server.log 2>&1 &
HTTP4S_PID=$!
echo "http4s server started with PID: $HTTP4S_PID (port 8081)"
echo "Logs are being written to: http4s-server.log"
echo ""
echo "To stop http4s server later: kill $HTTP4S_PID"
echo ""
echo "=========================================="
echo "Starting Jetty server (foreground)..."
echo "=========================================="
export MAVEN_OPTS="-Xss128m --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/sun.reflect.generics.reflectiveObjects=ALL-UNNAMED"
mvn jetty:run -pl obp-api

View File

@ -586,8 +586,15 @@
<forkMode>once</forkMode>
<junitxml>.</junitxml>
<filereports>WDF TestSuite.txt</filereports>
<argLine>-Drun.mode=test -XX:MaxMetaspaceSize=512m -Xms512m -Xmx512m --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.jar=ALL-UNNAMED --add-opens java.base/java.security=ALL-UNNAMED</argLine>
<!-- Increased memory for faster test execution -->
<argLine>-Drun.mode=test -XX:MaxMetaspaceSize=1G -Xms2G -Xmx4G -XX:+UseG1GC -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+UseStringDeduplication --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.jar=ALL-UNNAMED --add-opens java.base/java.security=ALL-UNNAMED</argLine>
<tagsToExclude>code.external</tagsToExclude>
<testFailureIgnore>${maven.test.failure.ignore}</testFailureIgnore>
<!-- Disable parallel test execution to avoid shared database state issues -->
<!-- Tests share an in-memory H2 database which causes conflicts when run in parallel -->
<!-- <parallel>true</parallel>-->
<!-- <threadCount>4</threadCount>-->
<parallel>false</parallel>
</configuration>
<executions>
<execution>
@ -597,6 +604,49 @@
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-report-plugin</artifactId>
<version>3.5.2</version>
<configuration>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<outputDirectory>${project.build.directory}/surefire-reports</outputDirectory>
</configuration>
<executions>
<execution>
<id>surefire-html-report</id>
<phase>package</phase>
<goals>
<goal>report-only</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<version>3.1.0</version>
<executions>
<execution>
<id>delete-surefire-xml-after-html</id>
<phase>verify</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete>
<fileset dir="${project.build.directory}/surefire-reports">
<include name="TEST-*.xml"/>
<include name="TESTS-*.xml"/>
</fileset>
</delete>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<!-- add src/main/java to source dirs -->
@ -623,15 +673,26 @@
<version>4.8.1</version>
<configuration>
<fork>true</fork>
<recompileMode>incremental</recompileMode>
<useZincServer>true</useZincServer>
<jvmArgs>
<jvmArg>-Xms4G</jvmArg>
<jvmArg>-Xmx12G</jvmArg>
<jvmArg>-Xss4m</jvmArg>
<jvmArg>-XX:MaxMetaspaceSize=4G</jvmArg>
<jvmArg>-XX:+UseG1GC</jvmArg>
<jvmArg>-XX:+TieredCompilation</jvmArg>
<jvmArg>-XX:TieredStopAtLevel=1</jvmArg>
</jvmArgs>
<args>
<arg>-deprecation</arg>
<arg>-feature</arg>
<!-- Enable language features to suppress warnings -->
<arg>-language:implicitConversions</arg>
<arg>-language:reflectiveCalls</arg>
<arg>-language:postfixOps</arg>
<!-- Suppress auto-application deprecation warning -->
<arg>-Wconf:cat=deprecation&amp;msg=auto-application:s</arg>
</args>
</configuration>
</plugin>

View File

@ -1703,6 +1703,6 @@ securelogging_mask_email=true
############################################
# Host and port for http4s server (used by bootstrap.http4s.Http4sServer)
# Defaults (if not set) are 127.0.0.1 and 8181
# Defaults (if not set) are 127.0.0.1 and 8086
http4s.host=127.0.0.1
http4s.port=8086

View File

@ -11,17 +11,15 @@ import org.http4s.implicits._
import scala.language.higherKinds
object Http4sServer extends IOApp {
val services: Kleisli[({type λ[β$0$] = OptionT[IO, β$0$]})#λ, Request[IO], Response[IO]] =
code.api.v7_0_0.Http4s700.wrappedRoutesV700Services
val httpApp: Kleisli[IO, Request[IO], Response[IO]] = (services).orNotFound
//Start OBP relevant objects, and settings
//Start OBP relevant objects and settings; this step MUST be executed first
new bootstrap.http4s.Http4sBoot().boot
val port = APIUtil.getPropsAsIntValue("http4s.port",8181)
val port = APIUtil.getPropsAsIntValue("http4s.port",8086)
val host = APIUtil.getPropsValue("http4s.host","127.0.0.1")
val services: HttpRoutes[IO] = code.api.v7_0_0.Http4s700.wrappedRoutesV700Services
val httpApp: Kleisli[IO, Request[IO], Response[IO]] = (services).orNotFound
override def run(args: List[String]): IO[ExitCode] = EmberServerBuilder
.default[IO]

View File

@ -228,6 +228,71 @@ object OAuth2Login extends RestHelper with MdcLoggable {
def urlOfJwkSets: Box[String] = Constant.oauth2JwkSetUrl
/**
* Get all JWKS URLs from configuration.
* This is a helper method for trying multiple JWKS URLs when validating tokens.
* We need more than one JWKS URL if we have multiple OIDC providers configured etc.
* @return List of all configured JWKS URLs
*/
protected def getAllJwksUrls: List[String] = {
val url: List[String] = Constant.oauth2JwkSetUrl.toList
url.flatMap(_.split(",").toList).map(_.trim).filter(_.nonEmpty)
}
/**
* Try to validate a JWT token with multiple JWKS URLs.
* This is a generic retry mechanism that works for both ID tokens and access tokens.
*
* @param token The JWT token to validate
* @param tokenType Description of token type for logging (e.g., "ID token", "access token")
* @param validateFunc Function that validates token against a JWKS URL
* @tparam T The type of claims returned (IDTokenClaimsSet or JWTClaimsSet)
* @return Boxed claims or failure
*/
protected def tryValidateWithAllJwksUrls[T](
token: String,
tokenType: String,
validateFunc: (String, String) => Box[T]
): Box[T] = {
logger.debug(s"tryValidateWithAllJwksUrls - attempting to validate $tokenType")
// Extract issuer for better error reporting
val actualIssuer = JwtUtil.getIssuer(token).getOrElse("NO_ISSUER_CLAIM")
logger.debug(s"tryValidateWithAllJwksUrls - JWT issuer claim: '$actualIssuer'")
// Get all JWKS URLs
val allJwksUrls = getAllJwksUrls
if (allJwksUrls.isEmpty) {
logger.debug(s"tryValidateWithAllJwksUrls - No JWKS URLs configured")
return Failure(Oauth2ThereIsNoUrlOfJwkSet)
}
logger.debug(s"tryValidateWithAllJwksUrls - Will try ${allJwksUrls.size} JWKS URL(s): $allJwksUrls")
// Try each JWKS URL until one succeeds
val results = allJwksUrls.map { url =>
logger.debug(s"tryValidateWithAllJwksUrls - Trying JWKS URL: '$url'")
val result = validateFunc(token, url)
result match {
case Full(_) =>
logger.debug(s"tryValidateWithAllJwksUrls - SUCCESS with JWKS URL: '$url'")
case Failure(msg, _, _) =>
logger.debug(s"tryValidateWithAllJwksUrls - FAILED with JWKS URL: '$url', reason: $msg")
case _ =>
logger.debug(s"tryValidateWithAllJwksUrls - FAILED with JWKS URL: '$url'")
}
result
}
// Return the first successful result, or the last failure
results.find(_.isDefined).getOrElse {
logger.debug(s"tryValidateWithAllJwksUrls - All ${allJwksUrls.size} JWKS URL(s) failed for issuer: '$actualIssuer'")
logger.debug(s"tryValidateWithAllJwksUrls - Tried URLs: $allJwksUrls")
results.lastOption.getOrElse(Failure(Oauth2ThereIsNoUrlOfJwkSet))
}
}
def checkUrlOfJwkSets(identityProvider: String) = {
val url: List[String] = Constant.oauth2JwkSetUrl.toList
val jwksUris: List[String] = url.map(_.toLowerCase()).map(_.split(",").toList).flatten
@ -310,47 +375,10 @@ object OAuth2Login extends RestHelper with MdcLoggable {
}.getOrElse(false)
}
def validateIdToken(idToken: String): Box[IDTokenClaimsSet] = {
logger.debug(s"validateIdToken - attempting to validate ID token")
// Extract issuer for better error reporting
val actualIssuer = JwtUtil.getIssuer(idToken).getOrElse("NO_ISSUER_CLAIM")
logger.debug(s"validateIdToken - JWT issuer claim: '$actualIssuer'")
urlOfJwkSets match {
case Full(url) =>
logger.debug(s"validateIdToken - using JWKS URL: '$url'")
JwtUtil.validateIdToken(idToken, url)
case ParamFailure(a, b, c, apiFailure : APIFailure) =>
logger.debug(s"validateIdToken - ParamFailure: $a, $b, $c, $apiFailure")
logger.debug(s"validateIdToken - JWT issuer was: '$actualIssuer'")
ParamFailure(a, b, c, apiFailure : APIFailure)
case Failure(msg, t, c) =>
logger.debug(s"validateIdToken - Failure getting JWKS URL: $msg")
logger.debug(s"validateIdToken - JWT issuer was: '$actualIssuer'")
if (msg.contains("OBP-20208")) {
logger.debug("validateIdToken - OBP-20208 Error Details:")
logger.debug(s"validateIdToken - JWT issuer claim: '$actualIssuer'")
logger.debug(s"validateIdToken - oauth2.jwk_set.url value: '${Constant.oauth2JwkSetUrl}'")
logger.debug("validateIdToken - Check that the JWKS URL configuration matches the JWT issuer")
}
Failure(msg, t, c)
case _ =>
logger.debug("validateIdToken - No JWKS URL available")
logger.debug(s"validateIdToken - JWT issuer was: '$actualIssuer'")
Failure(Oauth2ThereIsNoUrlOfJwkSet)
}
tryValidateWithAllJwksUrls(idToken, "ID token", JwtUtil.validateIdToken)
}
def validateAccessToken(accessToken: String): Box[JWTClaimsSet] = {
urlOfJwkSets match {
case Full(url) =>
JwtUtil.validateAccessToken(accessToken, url)
case ParamFailure(a, b, c, apiFailure : APIFailure) =>
ParamFailure(a, b, c, apiFailure : APIFailure)
case Failure(msg, t, c) =>
Failure(msg, t, c)
case _ =>
Failure(Oauth2ThereIsNoUrlOfJwkSet)
}
tryValidateWithAllJwksUrls(accessToken, "access token", JwtUtil.validateAccessToken)
}
/** New Style Endpoints
* This function creates user based on "iss" and "sub" fields

View File

@ -209,11 +209,16 @@ object ResourceDocs300 extends OBPRestHelper with ResourceDocsAPIMethods with Md
val resourceDocsJson = JSONFactory1_4_0.createResourceDocsJson(resourceDocs, isVersion4OrHigher, locale)
resourceDocsJson.resource_docs
case _ =>
// Get all resource docs for the requested version
val allResourceDocs = ImplementationsResourceDocs.getResourceDocsList(requestedApiVersion).getOrElse(List.empty)
val filteredResourceDocs = ResourceDocsAPIMethodsUtil.filterResourceDocs(allResourceDocs, resourceDocTags, partialFunctions)
val resourceDocJson = JSONFactory1_4_0.createResourceDocsJson(filteredResourceDocs, isVersion4OrHigher, locale)
resourceDocJson.resource_docs
contentParam match {
case Some(DYNAMIC) =>
ImplementationsResourceDocs.getResourceDocsObpDynamicCached(resourceDocTags, partialFunctions, locale, None, isVersion4OrHigher).head.resource_docs
case Some(STATIC) => {
ImplementationsResourceDocs.getStaticResourceDocsObpCached(requestedApiVersionString, resourceDocTags, partialFunctions, locale, isVersion4OrHigher).head.resource_docs
}
case _ => {
ImplementationsResourceDocs.getAllResourceDocsObpCached(requestedApiVersionString, resourceDocTags, partialFunctions, locale, contentParam, isVersion4OrHigher).head.resource_docs
}
}
}
val hostname = HostName

View File

@ -1,9 +1,10 @@
package code.api.ResourceDocs1_4_0
import scala.language.reflectiveCalls
import code.api.Constant.{GET_DYNAMIC_RESOURCE_DOCS_TTL, GET_STATIC_RESOURCE_DOCS_TTL, HostName, PARAM_LOCALE}
import code.api.OBPRestHelper
import code.api.cache.Caching
import code.api.dynamic.endpoint.OBPAPIDynamicEndpoint
import code.api.dynamic.entity.OBPAPIDynamicEntity
import code.api.util.APIUtil._
import code.api.util.ApiRole.{canReadDynamicResourceDocsAtOneBank, canReadResourceDoc}
import code.api.util.ApiTag._
@ -21,12 +22,9 @@ import code.api.v4_0_0.{APIMethods400, OBPAPI4_0_0}
import code.api.v5_0_0.OBPAPI5_0_0
import code.api.v5_1_0.OBPAPI5_1_0
import code.api.v6_0_0.OBPAPI6_0_0
import code.api.dynamic.endpoint.OBPAPIDynamicEndpoint
import code.api.dynamic.entity.OBPAPIDynamicEntity
import code.apicollectionendpoint.MappedApiCollectionEndpointsProvider
import code.util.Helper
import code.util.Helper.{MdcLoggable, ObpS, SILENCE_IS_GOLDEN}
import net.liftweb.http.S
import com.github.dwickern.macros.NameOf.nameOf
import com.openbankproject.commons.model.enums.ContentParam
import com.openbankproject.commons.model.enums.ContentParam.{ALL, DYNAMIC, STATIC}
@ -34,6 +32,7 @@ import com.openbankproject.commons.model.{BankId, ListResult, User}
import com.openbankproject.commons.util.ApiStandards._
import com.openbankproject.commons.util.{ApiVersion, ScannedApiVersion}
import net.liftweb.common.{Box, Empty, Full}
import net.liftweb.http.{LiftRules, S}
import net.liftweb.http.{InMemoryResponse, LiftRules, PlainTextResponse}
import net.liftweb.json
import net.liftweb.json.JsonAST.{JField, JString, JValue}
@ -119,7 +118,8 @@ trait ResourceDocsAPIMethods extends MdcLoggable with APIMethods220 with APIMeth
logger.debug(s"getResourceDocsList says requestedApiVersion is $requestedApiVersion")
val resourceDocs: ArrayBuffer[ResourceDoc] = requestedApiVersion match {
val resourceDocs = requestedApiVersion match {
case ApiVersion.v7_0_0 => code.api.v7_0_0.Http4s700.resourceDocs
case ApiVersion.v6_0_0 => OBPAPI6_0_0.allResourceDocs
case ApiVersion.v5_1_0 => OBPAPI5_1_0.allResourceDocs
case ApiVersion.v5_0_0 => OBPAPI5_0_0.allResourceDocs
@ -140,7 +140,8 @@ trait ResourceDocsAPIMethods extends MdcLoggable with APIMethods220 with APIMeth
logger.debug(s"There are ${resourceDocs.length} resource docs available to $requestedApiVersion")
val versionRoutes: immutable.Seq[OBPEndpoint] = requestedApiVersion match {
val versionRoutes = requestedApiVersion match {
case ApiVersion.v7_0_0 => Nil
case ApiVersion.v6_0_0 => OBPAPI6_0_0.routes
case ApiVersion.v5_1_0 => OBPAPI5_1_0.routes
case ApiVersion.v5_0_0 => OBPAPI5_0_0.routes
@ -167,7 +168,10 @@ trait ResourceDocsAPIMethods extends MdcLoggable with APIMethods220 with APIMeth
val versionRoutesClasses = versionRoutes.map { vr => vr.getClass }
// Only return the resource docs that have available routes
val activeResourceDocs = resourceDocs.filter(rd => versionRoutesClasses.contains(rd.partialFunction.getClass))
val activeResourceDocs = requestedApiVersion match {
case ApiVersion.v7_0_0 => resourceDocs
case _ => resourceDocs.filter(rd => versionRoutesClasses.contains(rd.partialFunction.getClass))
}
logger.debug(s"There are ${activeResourceDocs.length} resource docs available to $requestedApiVersion")
@ -227,7 +231,7 @@ trait ResourceDocsAPIMethods extends MdcLoggable with APIMethods220 with APIMeth
* @param contentParam if this is Some(`true`), only show dynamic endpoints, if Some(`false`), only show static. If it is None, we will show all. default is None
* @return
*/
private def getStaticResourceDocsObpCached(
def getStaticResourceDocsObpCached(
requestedApiVersionString: String,
resourceDocTags: Option[List[ResourceDocTag]],
partialFunctionNames: Option[List[String]],
@ -247,7 +251,7 @@ trait ResourceDocsAPIMethods extends MdcLoggable with APIMethods220 with APIMeth
* @param contentParam if this is Some(`true`), only show dynamic endpoints, if Some(`false`), only show static. If it is None, we will show all. default is None
* @return
*/
private def getAllResourceDocsObpCached(
def getAllResourceDocsObpCached(
requestedApiVersionString: String,
resourceDocTags: Option[List[ResourceDocTag]],
partialFunctionNames: Option[List[String]],
@ -290,7 +294,7 @@ trait ResourceDocsAPIMethods extends MdcLoggable with APIMethods220 with APIMeth
}
private def getResourceDocsObpDynamicCached(
def getResourceDocsObpDynamicCached(
resourceDocTags: Option[List[ResourceDocTag]],
partialFunctionNames: Option[List[String]],
locale: Option[String],
@ -1253,4 +1257,3 @@ so the caller must specify any required filtering by catalog explicitly.
}

View File

@ -25,4 +25,22 @@ object InMemory extends MdcLoggable {
logger.trace(s"InMemory.memoizeWithInMemory.underlyingGuavaCache size ${underlyingGuavaCache.size()}, current cache key is $cacheKey")
memoize(ttl)(f)
}
/**
* Count keys matching a pattern in the in-memory cache
* @param pattern Pattern to match (supports * wildcard)
* @return Number of matching keys
*/
def countKeys(pattern: String): Int = {
try {
val regex = pattern.replace("*", ".*").r
val allKeys = underlyingGuavaCache.asMap().keySet()
import scala.collection.JavaConverters._
allKeys.asScala.count(key => regex.pattern.matcher(key).matches())
} catch {
case e: Throwable =>
logger.error(s"Error counting in-memory cache keys for pattern $pattern: ${e.getMessage}")
0
}
}
}

View File

@ -73,12 +73,12 @@ object RedisLogger {
/** Map a LogLevel to its required entitlements */
def requiredRoles(level: LogLevel): List[ApiRole] = level match {
case TRACE => List(canGetTraceLevelLogsAtAllBanks, canGetAllLevelLogsAtAllBanks)
case DEBUG => List(canGetDebugLevelLogsAtAllBanks, canGetAllLevelLogsAtAllBanks)
case INFO => List(canGetInfoLevelLogsAtAllBanks, canGetAllLevelLogsAtAllBanks)
case WARNING => List(canGetWarningLevelLogsAtAllBanks, canGetAllLevelLogsAtAllBanks)
case ERROR => List(canGetErrorLevelLogsAtAllBanks, canGetAllLevelLogsAtAllBanks)
case ALL => List(canGetAllLevelLogsAtAllBanks)
case TRACE => List(canGetSystemLogCacheTrace, canGetSystemLogCacheAll)
case DEBUG => List(canGetSystemLogCacheDebug, canGetSystemLogCacheAll)
case INFO => List(canGetSystemLogCacheInfo, canGetSystemLogCacheAll)
case WARNING => List(canGetSystemLogCacheWarning, canGetSystemLogCacheAll)
case ERROR => List(canGetSystemLogCacheError, canGetSystemLogCacheAll)
case ALL => List(canGetSystemLogCacheAll)
}
}

View File

@ -30,6 +30,7 @@ package code.api.util
import scala.language.implicitConversions
import scala.language.reflectiveCalls
import bootstrap.liftweb.CustomDBVendor
import cats.effect.IO
import code.accountholders.AccountHolders
import code.api.Constant._
import code.api.OAuthHandshake._
@ -99,6 +100,7 @@ import net.liftweb.util.Helpers._
import net.liftweb.util._
import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.StringUtils
import org.http4s.HttpRoutes
import java.io.InputStream
import java.net.URLDecoder
@ -1639,7 +1641,8 @@ object APIUtil extends MdcLoggable with CustomJsonFormats{
isFeatured: Boolean = false,
specialInstructions: Option[String] = None,
var specifiedUrl: Option[String] = None, // A derived value: Contains the called version (added at run time). See the resource doc for resource doc!
createdByBankId: Option[String] = None //we need to filter the resource Doc by BankId
createdByBankId: Option[String] = None, //we need to filter the resource Doc by BankId
http4sPartialFunction: Http4sEndpoint = None // http4s endpoint handler
) {
// this code block will be merged to constructor.
{
@ -2792,6 +2795,7 @@ object APIUtil extends MdcLoggable with CustomJsonFormats{
type OBPEndpoint = PartialFunction[Req, CallContext => Box[JsonResponse]]
type OBPReturnType[T] = Future[(T, Option[CallContext])]
type Http4sEndpoint = Option[HttpRoutes[IO]]
def getAllowedEndpoints (endpoints : Iterable[OBPEndpoint], resourceDocs: ArrayBuffer[ResourceDoc]) : List[OBPEndpoint] = {

View File

@ -107,35 +107,23 @@ object ApiRole extends MdcLoggable{
// TRACE
case class CanGetTraceLevelLogsAtOneBank(requiresBankId: Boolean = true) extends ApiRole
lazy val canGetTraceLevelLogsAtOneBank = CanGetTraceLevelLogsAtOneBank()
case class CanGetTraceLevelLogsAtAllBanks(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetTraceLevelLogsAtAllBanks = CanGetTraceLevelLogsAtAllBanks()
case class CanGetSystemLogCacheTrace(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetSystemLogCacheTrace = CanGetSystemLogCacheTrace()
// DEBUG
case class CanGetDebugLevelLogsAtOneBank(requiresBankId: Boolean = true) extends ApiRole
lazy val canGetDebugLevelLogsAtOneBank = CanGetDebugLevelLogsAtOneBank()
case class CanGetDebugLevelLogsAtAllBanks(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetDebugLevelLogsAtAllBanks = CanGetDebugLevelLogsAtAllBanks()
case class CanGetSystemLogCacheDebug(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetSystemLogCacheDebug = CanGetSystemLogCacheDebug()
// INFO
case class CanGetInfoLevelLogsAtOneBank(requiresBankId: Boolean = true) extends ApiRole
lazy val canGetInfoLevelLogsAtOneBank = CanGetInfoLevelLogsAtOneBank()
case class CanGetInfoLevelLogsAtAllBanks(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetInfoLevelLogsAtAllBanks = CanGetInfoLevelLogsAtAllBanks()
case class CanGetSystemLogCacheInfo(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetSystemLogCacheInfo = CanGetSystemLogCacheInfo()
// WARNING
case class CanGetWarningLevelLogsAtOneBank(requiresBankId: Boolean = true) extends ApiRole
lazy val canGetWarningLevelLogsAtOneBank = CanGetWarningLevelLogsAtOneBank()
case class CanGetWarningLevelLogsAtAllBanks(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetWarningLevelLogsAtAllBanks = CanGetWarningLevelLogsAtAllBanks()
case class CanGetSystemLogCacheWarning(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetSystemLogCacheWarning = CanGetSystemLogCacheWarning()
// ERROR
case class CanGetErrorLevelLogsAtOneBank(requiresBankId: Boolean = true) extends ApiRole
lazy val canGetErrorLevelLogsAtOneBank = CanGetErrorLevelLogsAtOneBank()
case class CanGetErrorLevelLogsAtAllBanks(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetErrorLevelLogsAtAllBanks = CanGetErrorLevelLogsAtAllBanks()
case class CanGetSystemLogCacheError(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetSystemLogCacheError = CanGetSystemLogCacheError()
// ALL
case class CanGetAllLevelLogsAtOneBank(requiresBankId: Boolean = true) extends ApiRole
lazy val canGetAllLevelLogsAtOneBank = CanGetAllLevelLogsAtOneBank()
case class CanGetAllLevelLogsAtAllBanks(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetAllLevelLogsAtAllBanks = CanGetAllLevelLogsAtAllBanks()
case class CanGetSystemLogCacheAll(requiresBankId: Boolean = false) extends ApiRole
lazy val canGetSystemLogCacheAll = CanGetSystemLogCacheAll()
case class CanUpdateAgentStatusAtAnyBank(requiresBankId: Boolean = false) extends ApiRole
lazy val canUpdateAgentStatusAtAnyBank = CanUpdateAgentStatusAtAnyBank()

View File

@ -91,6 +91,7 @@ object ApiTag {
val apiTagDevOps = ResourceDocTag("DevOps")
val apiTagSystem = ResourceDocTag("System")
val apiTagCache = ResourceDocTag("Cache")
val apiTagLogCache = ResourceDocTag("Log-Cache")
val apiTagApiCollection = ResourceDocTag("Api-Collection")

View File

@ -19,6 +19,7 @@ object ApiVersionUtils {
v5_0_0 ::
v5_1_0 ::
v6_0_0 ::
v7_0_0 ::
`dynamic-endpoint` ::
`dynamic-entity` ::
scannedApis
@ -41,6 +42,7 @@ object ApiVersionUtils {
case v5_0_0.fullyQualifiedVersion | v5_0_0.apiShortVersion => v5_0_0
case v5_1_0.fullyQualifiedVersion | v5_1_0.apiShortVersion => v5_1_0
case v6_0_0.fullyQualifiedVersion | v6_0_0.apiShortVersion => v6_0_0
case v7_0_0.fullyQualifiedVersion | v7_0_0.apiShortVersion => v7_0_0
case `dynamic-endpoint`.fullyQualifiedVersion | `dynamic-endpoint`.apiShortVersion => `dynamic-endpoint`
case `dynamic-entity`.fullyQualifiedVersion | `dynamic-entity`.apiShortVersion => `dynamic-entity`
case version if(scannedApis.map(_.fullyQualifiedVersion).contains(version))

View File

@ -84,6 +84,7 @@ object ErrorMessages {
val FXCurrencyCodeCombinationsNotSupported = "OBP-10004: ISO Currency code combination not supported for FX. Please modify the FROM_CURRENCY_CODE or TO_CURRENCY_CODE. "
val InvalidDateFormat = "OBP-10005: Invalid Date Format. Could not convert value to a Date."
val InvalidCurrency = "OBP-10006: Invalid Currency Value."
val InvalidCacheNamespaceId = "OBP-10123: Invalid namespace_id."
val IncorrectRoleName = "OBP-10007: Incorrect Role name:"
val CouldNotTransformJsonToInternalModel = "OBP-10008: Could not transform Json to internal model."
val CountNotSaveOrUpdateResource = "OBP-10009: Could not save or update resource."
@ -269,7 +270,7 @@ object ErrorMessages {
val Oauth2ThereIsNoUrlOfJwkSet = "OBP-20203: There is no an URL of OAuth 2.0 server's JWK set, published at a well-known URL."
val Oauth2BadJWTException = "OBP-20204: Bad JWT error. "
val Oauth2ParseException = "OBP-20205: Parse error. "
val Oauth2BadJOSEException = "OBP-20206: Bad JSON Object Signing and Encryption (JOSE) exception. The ID token is invalid or expired. "
val Oauth2BadJOSEException = "OBP-20206: Bad JSON Object Signing and Encryption (JOSE) exception. The ID token is invalid or expired. OBP-API Admin should check the oauth2.jwk_set.url list contains the jwks url of the provider."
val Oauth2JOSEException = "OBP-20207: Bad JSON Object Signing and Encryption (JOSE) exception. An internal JOSE exception was encountered. "
val Oauth2CannotMatchIssuerAndJwksUriException = "OBP-20208: Cannot match the issuer and JWKS URI at this server instance. "
val Oauth2TokenHaveNoConsumer = "OBP-20209: The token have no linked consumer. "

View File

@ -239,55 +239,204 @@ trait APIMethods510 {
}
}
// Helper function to avoid code duplication
private def getLogCacheHelper(level: RedisLogger.LogLevel.Value, cc: CallContext): Future[(RedisLogger.LogTail, Option[CallContext])] = {
implicit val ec = EndpointContext(Some(cc))
for {
httpParams <- NewStyle.function.extractHttpParamsFromUrl(cc.url)
(obpQueryParams, callContext) <- createQueriesByHttpParamsFuture(httpParams, cc.callContext)
limit = obpQueryParams.collectFirst { case OBPLimit(value) => value }
offset = obpQueryParams.collectFirst { case OBPOffset(value) => value }
logs <- Future(RedisLogger.getLogTail(level, limit, offset))
} yield {
(logs, HttpCode.`200`(callContext))
}
}
staticResourceDocs += ResourceDoc(
logCacheEndpoint,
logCacheTraceEndpoint,
implementedInApiVersion,
nameOf(logCacheEndpoint),
nameOf(logCacheTraceEndpoint),
"GET",
"/system/log-cache/LOG_LEVEL",
"Get Log Cache",
"""Returns information about:
|
|* Log Cache
"/system/log-cache/trace",
"Get Trace Level Log Cache",
"""Returns TRACE level logs from the system log cache.
|
|This endpoint supports pagination via the following optional query parameters:
|* limit - Maximum number of log entries to return
|* offset - Number of log entries to skip (for pagination)
|
|Example: GET /system/log-cache/INFO?limit=50&offset=100
|Example: GET /system/log-cache/trace?limit=50&offset=100
""",
EmptyBody,
EmptyBody,
List($UserNotLoggedIn, UnknownError),
apiTagSystem :: apiTagApi :: Nil,
Some(List(canGetAllLevelLogsAtAllBanks)))
apiTagSystem :: apiTagApi :: apiTagLogCache :: Nil,
Some(List(canGetSystemLogCacheTrace, canGetSystemLogCacheAll)))
lazy val logCacheEndpoint: OBPEndpoint = {
case "system" :: "log-cache" :: logLevel :: Nil JsonGet _ =>
lazy val logCacheTraceEndpoint: OBPEndpoint = {
case "system" :: "log-cache" :: "trace" :: Nil JsonGet _ =>
cc =>
implicit val ec = EndpointContext(Some(cc))
for {
// Parse and validate log level
level <- NewStyle.function.tryons(ErrorMessages.invalidLogLevel, 400, cc.callContext) {
RedisLogger.LogLevel.valueOf(logLevel)
}
// Check entitlements using helper
_ <- NewStyle.function.handleEntitlementsAndScopes(
bankId = "",
userId = cc.userId,
roles = RedisLogger.LogLevel.requiredRoles(level),
callContext = cc.callContext
)
httpParams <- NewStyle.function.extractHttpParamsFromUrl(cc.url)
(obpQueryParams, callContext) <- createQueriesByHttpParamsFuture(httpParams, cc.callContext)
// Extract limit and offset from query parameters
limit = obpQueryParams.collectFirst { case OBPLimit(value) => value }
offset = obpQueryParams.collectFirst { case OBPOffset(value) => value }
// Fetch logs with pagination
logs <- Future(RedisLogger.getLogTail(level, limit, offset))
} yield {
(logs, HttpCode.`200`(cc.callContext))
}
_ <- NewStyle.function.handleEntitlementsAndScopes("", cc.userId, List(canGetSystemLogCacheTrace, canGetSystemLogCacheAll), cc.callContext)
result <- getLogCacheHelper(RedisLogger.LogLevel.TRACE, cc)
} yield result
}
staticResourceDocs += ResourceDoc(
logCacheDebugEndpoint,
implementedInApiVersion,
nameOf(logCacheDebugEndpoint),
"GET",
"/system/log-cache/debug",
"Get Debug Level Log Cache",
"""Returns DEBUG level logs from the system log cache.
|
|This endpoint supports pagination via the following optional query parameters:
|* limit - Maximum number of log entries to return
|* offset - Number of log entries to skip (for pagination)
|
|Example: GET /system/log-cache/debug?limit=50&offset=100
""",
EmptyBody,
EmptyBody,
List($UserNotLoggedIn, UnknownError),
apiTagSystem :: apiTagApi :: apiTagLogCache :: Nil,
Some(List(canGetSystemLogCacheDebug, canGetSystemLogCacheAll)))
lazy val logCacheDebugEndpoint: OBPEndpoint = {
case "system" :: "log-cache" :: "debug" :: Nil JsonGet _ =>
cc =>
implicit val ec = EndpointContext(Some(cc))
for {
_ <- NewStyle.function.handleEntitlementsAndScopes("", cc.userId, List(canGetSystemLogCacheDebug, canGetSystemLogCacheAll), cc.callContext)
result <- getLogCacheHelper(RedisLogger.LogLevel.DEBUG, cc)
} yield result
}
staticResourceDocs += ResourceDoc(
logCacheInfoEndpoint,
implementedInApiVersion,
nameOf(logCacheInfoEndpoint),
"GET",
"/system/log-cache/info",
"Get Info Level Log Cache",
"""Returns INFO level logs from the system log cache.
|
|This endpoint supports pagination via the following optional query parameters:
|* limit - Maximum number of log entries to return
|* offset - Number of log entries to skip (for pagination)
|
|Example: GET /system/log-cache/info?limit=50&offset=100
""",
EmptyBody,
EmptyBody,
List($UserNotLoggedIn, UnknownError),
apiTagSystem :: apiTagApi :: apiTagLogCache :: Nil,
Some(List(canGetSystemLogCacheInfo, canGetSystemLogCacheAll)))
lazy val logCacheInfoEndpoint: OBPEndpoint = {
case "system" :: "log-cache" :: "info" :: Nil JsonGet _ =>
cc =>
implicit val ec = EndpointContext(Some(cc))
for {
_ <- NewStyle.function.handleEntitlementsAndScopes("", cc.userId, List(canGetSystemLogCacheInfo, canGetSystemLogCacheAll), cc.callContext)
result <- getLogCacheHelper(RedisLogger.LogLevel.INFO, cc)
} yield result
}
staticResourceDocs += ResourceDoc(
logCacheWarningEndpoint,
implementedInApiVersion,
nameOf(logCacheWarningEndpoint),
"GET",
"/system/log-cache/warning",
"Get Warning Level Log Cache",
"""Returns WARNING level logs from the system log cache.
|
|This endpoint supports pagination via the following optional query parameters:
|* limit - Maximum number of log entries to return
|* offset - Number of log entries to skip (for pagination)
|
|Example: GET /system/log-cache/warning?limit=50&offset=100
""",
EmptyBody,
EmptyBody,
List($UserNotLoggedIn, UnknownError),
apiTagSystem :: apiTagApi :: apiTagLogCache :: Nil,
Some(List(canGetSystemLogCacheWarning, canGetSystemLogCacheAll)))
lazy val logCacheWarningEndpoint: OBPEndpoint = {
case "system" :: "log-cache" :: "warning" :: Nil JsonGet _ =>
cc =>
implicit val ec = EndpointContext(Some(cc))
for {
_ <- NewStyle.function.handleEntitlementsAndScopes("", cc.userId, List(canGetSystemLogCacheWarning, canGetSystemLogCacheAll), cc.callContext)
result <- getLogCacheHelper(RedisLogger.LogLevel.WARNING, cc)
} yield result
}
staticResourceDocs += ResourceDoc(
logCacheErrorEndpoint,
implementedInApiVersion,
nameOf(logCacheErrorEndpoint),
"GET",
"/system/log-cache/error",
"Get Error Level Log Cache",
"""Returns ERROR level logs from the system log cache.
|
|This endpoint supports pagination via the following optional query parameters:
|* limit - Maximum number of log entries to return
|* offset - Number of log entries to skip (for pagination)
|
|Example: GET /system/log-cache/error?limit=50&offset=100
""",
EmptyBody,
EmptyBody,
List($UserNotLoggedIn, UnknownError),
apiTagSystem :: apiTagApi :: apiTagLogCache :: Nil,
Some(List(canGetSystemLogCacheError, canGetSystemLogCacheAll)))
lazy val logCacheErrorEndpoint: OBPEndpoint = {
case "system" :: "log-cache" :: "error" :: Nil JsonGet _ =>
cc =>
implicit val ec = EndpointContext(Some(cc))
for {
_ <- NewStyle.function.handleEntitlementsAndScopes("", cc.userId, List(canGetSystemLogCacheError, canGetSystemLogCacheAll), cc.callContext)
result <- getLogCacheHelper(RedisLogger.LogLevel.ERROR, cc)
} yield result
}
staticResourceDocs += ResourceDoc(
logCacheAllEndpoint,
implementedInApiVersion,
nameOf(logCacheAllEndpoint),
"GET",
"/system/log-cache/all",
"Get All Level Log Cache",
"""Returns logs of all levels from the system log cache.
|
|This endpoint supports pagination via the following optional query parameters:
|* limit - Maximum number of log entries to return
|* offset - Number of log entries to skip (for pagination)
|
|Example: GET /system/log-cache/all?limit=50&offset=100
""",
EmptyBody,
EmptyBody,
List($UserNotLoggedIn, UnknownError),
apiTagSystem :: apiTagApi :: apiTagLogCache :: Nil,
Some(List(canGetSystemLogCacheAll)))
lazy val logCacheAllEndpoint: OBPEndpoint = {
case "system" :: "log-cache" :: "all" :: Nil JsonGet _ =>
cc =>
implicit val ec = EndpointContext(Some(cc))
for {
_ <- NewStyle.function.handleEntitlementsAndScopes("", cc.userId, List(canGetSystemLogCacheAll), cc.callContext)
result <- getLogCacheHelper(RedisLogger.LogLevel.ALL, cc)
} yield result
}

View File

@ -28,7 +28,7 @@ import code.api.v5_0_0.{ViewJsonV500, ViewsJsonV500}
import code.api.v5_1_0.{JSONFactory510, PostCustomerLegalNameJsonV510}
import code.api.dynamic.entity.helper.{DynamicEntityHelper, DynamicEntityInfo}
import code.api.v6_0_0.JSONFactory600.{AddUserToGroupResponseJsonV600, DynamicEntityDiagnosticsJsonV600, DynamicEntityIssueJsonV600, GroupEntitlementJsonV600, GroupEntitlementsJsonV600, GroupJsonV600, GroupsJsonV600, PostGroupJsonV600, PostGroupMembershipJsonV600, PostResetPasswordUrlJsonV600, PutGroupJsonV600, ReferenceTypeJsonV600, ReferenceTypesJsonV600, ResetPasswordUrlJsonV600, RoleWithEntitlementCountJsonV600, RolesWithEntitlementCountsJsonV600, ScannedApiVersionJsonV600, UpdateViewJsonV600, UserGroupMembershipJsonV600, UserGroupMembershipsJsonV600, ValidateUserEmailJsonV600, ValidateUserEmailResponseJsonV600, ViewJsonV600, ViewPermissionJsonV600, ViewPermissionsJsonV600, ViewsJsonV600, createAbacRuleJsonV600, createAbacRulesJsonV600, createActiveRateLimitsJsonV600, createCallLimitJsonV600, createRedisCallCountersJson}
import code.api.v6_0_0.{AbacRuleJsonV600, AbacRuleResultJsonV600, AbacRulesJsonV600, CacheConfigJsonV600, CacheInfoJsonV600, CacheNamespaceInfoJsonV600, CacheProviderConfigJsonV600, CreateAbacRuleJsonV600, CurrentConsumerJsonV600, ExecuteAbacRuleJsonV600, UpdateAbacRuleJsonV600}
import code.api.v6_0_0.{AbacRuleJsonV600, AbacRuleResultJsonV600, AbacRulesJsonV600, CacheConfigJsonV600, CacheInfoJsonV600, CacheNamespaceInfoJsonV600, CreateAbacRuleJsonV600, CurrentConsumerJsonV600, ExecuteAbacRuleJsonV600, InMemoryCacheStatusJsonV600, RedisCacheStatusJsonV600, UpdateAbacRuleJsonV600}
import code.api.v6_0_0.OBPAPI6_0_0
import code.abacrule.{AbacRuleEngine, MappedAbacRuleProvider}
import code.metrics.APIMetrics
@ -636,7 +636,7 @@ trait APIMethods600 {
}
namespaceId = postJson.namespace_id
_ <- Helper.booleanToFuture(
s"Invalid namespace_id: $namespaceId. Valid values: ${Constant.ALL_CACHE_NAMESPACES.mkString(", ")}",
s"$InvalidCacheNamespaceId $namespaceId. Valid values: ${Constant.ALL_CACHE_NAMESPACES.mkString(", ")}",
400,
callContext
)(Constant.ALL_CACHE_NAMESPACES.contains(namespaceId))
@ -668,8 +668,8 @@ trait APIMethods600 {
"Get Cache Configuration",
"""Returns cache configuration information including:
|
|- Available cache providers (Redis, In-Memory)
|- Redis connection details (URL, port, SSL)
|- Redis status: availability, connection details (URL, port, SSL)
|- In-memory cache status: availability and current size
|- Instance ID and environment
|- Global cache namespace prefix
|
@ -679,21 +679,15 @@ trait APIMethods600 {
|""",
EmptyBody,
CacheConfigJsonV600(
providers = List(
CacheProviderConfigJsonV600(
provider = "redis",
enabled = true,
url = Some("127.0.0.1"),
port = Some(6379),
use_ssl = Some(false)
),
CacheProviderConfigJsonV600(
provider = "in_memory",
enabled = true,
url = None,
port = None,
use_ssl = None
)
redis_status = RedisCacheStatusJsonV600(
available = true,
url = "127.0.0.1",
port = 6379,
use_ssl = false
),
in_memory_status = InMemoryCacheStatusJsonV600(
available = true,
current_size = 42
),
instance_id = "obp",
environment = "dev",
@ -734,6 +728,14 @@ trait APIMethods600 {
|- Current version counter
|- Number of keys in each namespace
|- Description and category
|- Storage location (redis, memory, both, or unknown)
| - "redis": Keys stored in Redis
| - "memory": Keys stored in in-memory cache
| - "both": Keys in both locations (indicates a BUG - should never happen)
| - "unknown": No keys found, storage location cannot be determined
|- TTL info: Sampled TTL information from actual keys
| - Shows actual TTL values from up to 5 sample keys
| - Format: "123s" (fixed), "range 60s to 3600s (avg 1800s)" (variable), "no expiry" (persistent)
|- Total key count across all namespaces
|- Redis availability status
|
@ -750,7 +752,9 @@ trait APIMethods600 {
current_version = 1,
key_count = 42,
description = "Rate limit call counters",
category = "Rate Limiting"
category = "Rate Limiting",
storage_location = "redis",
ttl_info = "range 60s to 86400s (avg 3600s)"
),
CacheNamespaceInfoJsonV600(
namespace_id = "rd_localised",
@ -758,7 +762,9 @@ trait APIMethods600 {
current_version = 1,
key_count = 128,
description = "Localized resource docs",
category = "API Documentation"
category = "API Documentation",
storage_location = "redis",
ttl_info = "3600s"
)
),
total_keys = 170,

View File

@ -268,16 +268,21 @@ case class InvalidatedCacheNamespaceJsonV600(
status: String
)
case class CacheProviderConfigJsonV600(
provider: String,
enabled: Boolean,
url: Option[String],
port: Option[Int],
use_ssl: Option[Boolean]
case class RedisCacheStatusJsonV600(
available: Boolean,
url: String,
port: Int,
use_ssl: Boolean
)
case class InMemoryCacheStatusJsonV600(
available: Boolean,
current_size: Long
)
case class CacheConfigJsonV600(
providers: List[CacheProviderConfigJsonV600],
redis_status: RedisCacheStatusJsonV600,
in_memory_status: InMemoryCacheStatusJsonV600,
instance_id: String,
environment: String,
global_prefix: String
@ -289,7 +294,9 @@ case class CacheNamespaceInfoJsonV600(
current_version: Long,
key_count: Int,
description: String,
category: String
category: String,
storage_location: String,
ttl_info: String
)
case class CacheInfoJsonV600(
@ -1119,21 +1126,17 @@ object JSONFactory600 extends CustomJsonFormats with MdcLoggable {
import code.api.Constant
import net.liftweb.util.Props
val redisProvider = CacheProviderConfigJsonV600(
provider = "redis",
enabled = true,
url = Some(Redis.url),
port = Some(Redis.port),
use_ssl = Some(Redis.useSsl)
)
val redisIsReady = try {
Redis.isRedisReady
} catch {
case _: Throwable => false
}
val inMemoryProvider = CacheProviderConfigJsonV600(
provider = "in_memory",
enabled = true,
url = None,
port = None,
use_ssl = None
)
val inMemorySize = try {
InMemory.underlyingGuavaCache.size()
} catch {
case _: Throwable => 0L
}
val instanceId = code.api.util.APIUtil.getPropsValue("api_instance_id").getOrElse("obp")
val environment = Props.mode match {
@ -1144,8 +1147,21 @@ object JSONFactory600 extends CustomJsonFormats with MdcLoggable {
case _ => "unknown"
}
val redisStatus = RedisCacheStatusJsonV600(
available = redisIsReady,
url = Redis.url,
port = Redis.port,
use_ssl = Redis.useSsl
)
val inMemoryStatus = InMemoryCacheStatusJsonV600(
available = inMemorySize >= 0,
current_size = inMemorySize
)
CacheConfigJsonV600(
providers = List(redisProvider, inMemoryProvider),
redis_status = redisStatus,
in_memory_status = inMemoryStatus,
instance_id = instanceId,
environment = environment,
global_prefix = Constant.getGlobalCacheNamespacePrefix
@ -1153,8 +1169,9 @@ object JSONFactory600 extends CustomJsonFormats with MdcLoggable {
}
def createCacheInfoJsonV600(): CacheInfoJsonV600 = {
import code.api.cache.Redis
import code.api.cache.{Redis, InMemory}
import code.api.Constant
import code.api.JedisMethod
val namespaceDescriptions = Map(
Constant.CALL_COUNTER_NAMESPACE -> ("Rate limit call counters", "Rate Limiting"),
@ -1178,14 +1195,69 @@ object JSONFactory600 extends CustomJsonFormats with MdcLoggable {
val prefix = Constant.getVersionedCachePrefix(namespaceId)
val pattern = s"${prefix}*"
val keyCount = try {
val count = Redis.countKeys(pattern)
totalKeys += count
count
// Dynamically determine storage location by checking where keys exist
var redisKeyCount = 0
var memoryKeyCount = 0
var storageLocation = "unknown"
var ttlInfo = "no keys to sample"
try {
redisKeyCount = Redis.countKeys(pattern)
totalKeys += redisKeyCount
// Sample keys to get TTL information
if (redisKeyCount > 0) {
val sampleKeys = Redis.scanKeys(pattern).take(5)
val ttls = sampleKeys.flatMap { key =>
Redis.use(JedisMethod.TTL, key, None, None).map(_.toLong)
}
if (ttls.nonEmpty) {
val minTtl = ttls.min
val maxTtl = ttls.max
val avgTtl = ttls.sum / ttls.length.toLong
ttlInfo = if (minTtl == maxTtl) {
if (minTtl == -1) "no expiry"
else if (minTtl == -2) "keys expired or missing"
else s"${minTtl}s"
} else {
s"range ${minTtl}s to ${maxTtl}s (avg ${avgTtl}s)"
}
}
}
} catch {
case _: Throwable =>
redisAvailable = false
0
}
try {
memoryKeyCount = InMemory.countKeys(pattern)
totalKeys += memoryKeyCount
if (memoryKeyCount > 0 && redisKeyCount == 0) {
ttlInfo = "in-memory (no TTL in Guava cache)"
}
} catch {
case _: Throwable =>
// In-memory cache error (shouldn't happen, but handle gracefully)
}
// Determine storage based on where keys actually exist
val keyCount = if (redisKeyCount > 0 && memoryKeyCount > 0) {
storageLocation = "both"
ttlInfo = s"redis: ${ttlInfo}, memory: in-memory cache"
redisKeyCount + memoryKeyCount
} else if (redisKeyCount > 0) {
storageLocation = "redis"
redisKeyCount
} else if (memoryKeyCount > 0) {
storageLocation = "memory"
memoryKeyCount
} else {
// No keys found in either location - we don't know where they would be stored
storageLocation = "unknown"
0
}
val (description, category) = namespaceDescriptions.getOrElse(namespaceId, ("Unknown namespace", "Other"))
@ -1196,7 +1268,9 @@ object JSONFactory600 extends CustomJsonFormats with MdcLoggable {
current_version = version,
key_count = keyCount,
description = description,
category = category
category = category,
storage_location = storageLocation,
ttl_info = ttlInfo
)
}

View File

@ -2,18 +2,25 @@ package code.api.v7_0_0
import cats.data.{Kleisli, OptionT}
import cats.effect._
import cats.implicits._
import code.api.util.{APIUtil, CustomJsonFormats}
import code.api.Constant._
import code.api.ResourceDocs1_4_0.SwaggerDefinitionsJSON._
import code.api.ResourceDocs1_4_0.{ResourceDocs140, ResourceDocsAPIMethodsUtil}
import code.api.util.APIUtil.{EmptyBody, _}
import code.api.util.ApiTag._
import code.api.util.ErrorMessages._
import code.api.util.{ApiVersionUtils, CustomJsonFormats, NewStyle}
import code.api.v1_4_0.JSONFactory1_4_0
import code.api.v4_0_0.JSONFactory400
import code.bankconnectors.Connector
import com.openbankproject.commons.util.{ApiVersion, ScannedApiVersion}
import net.liftweb.json.Formats
import com.github.dwickern.macros.NameOf.nameOf
import com.openbankproject.commons.ExecutionContext.Implicits.global
import com.openbankproject.commons.util.{ApiVersion, ApiVersionStatus, ScannedApiVersion}
import net.liftweb.json.JsonAST.prettyRender
import net.liftweb.json.Extraction
import net.liftweb.json.{Extraction, Formats}
import org.http4s._
import org.http4s.dsl.io._
import org.typelevel.vault.Key
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.Future
import scala.language.{higherKinds, implicitConversions}
@ -24,12 +31,13 @@ object Http4s700 {
implicit val formats: Formats = CustomJsonFormats.formats
implicit def convertAnyToJsonString(any: Any): String = prettyRender(Extraction.decompose(any))
val apiVersion: ScannedApiVersion = ApiVersion.v7_0_0
val apiVersionString: String = apiVersion.toString
val implementedInApiVersion: ScannedApiVersion = ApiVersion.v7_0_0
val versionStatus = ApiVersionStatus.STABLE.toString
val resourceDocs = ArrayBuffer[ResourceDoc]()
case class CallContext(userId: String, requestId: String)
import cats.effect.unsafe.implicits.global
val callContextKey: Key[CallContext] = Key.newKey[IO, CallContext].unsafeRunSync()
val callContextKey: Key[CallContext] =
Key.newKey[IO, CallContext].unsafeRunSync()(cats.effect.unsafe.IORuntime.global)
object CallContextMiddleware {
@ -42,31 +50,108 @@ object Http4s700 {
}
}
val v700Services: HttpRoutes[IO] = HttpRoutes.of[IO] {
case req @ GET -> Root / "obp" / `apiVersionString` / "root" =>
import com.openbankproject.commons.ExecutionContext.Implicits.global
val callContext = req.attributes.lookup(callContextKey).get.asInstanceOf[CallContext]
Ok(IO.fromFuture(IO(
for {
_ <- Future(()) // Just start async call
} yield {
convertAnyToJsonString(
JSONFactory700.getApiInfoJSON(apiVersion, s"Hello, ${callContext.userId}! Your request ID is ${callContext.requestId}.")
)
}
)))
object Implementations7_0_0 {
case req @ GET -> Root / "obp" / `apiVersionString` / "banks" =>
import com.openbankproject.commons.ExecutionContext.Implicits.global
Ok(IO.fromFuture(IO(
for {
(banks, callContext) <- code.api.util.NewStyle.function.getBanks(None)
} yield {
convertAnyToJsonString(JSONFactory400.createBanksJson(banks))
}
)))
// Common prefix: /obp/v7.0.0
val prefixPath = Root / ApiPathZero.toString / implementedInApiVersion.toString
resourceDocs += ResourceDoc(
null,
implementedInApiVersion,
nameOf(root),
"GET",
"/root",
"Get API Info (root)",
s"""Returns information about:
|
|* API version
|* Hosted by information
|* Git Commit
|${userAuthenticationMessage(false)}""",
EmptyBody,
apiInfoJSON,
List(UnknownError, "no connector set"),
apiTagApi :: Nil,
http4sPartialFunction = Some(root)
)
// Route: GET /obp/v7.0.0/root
val root: HttpRoutes[IO] = HttpRoutes.of[IO] {
case req @ GET -> `prefixPath` / "root" =>
val callContext = req.attributes.lookup(callContextKey).get.asInstanceOf[CallContext]
Ok(IO.fromFuture(IO(
for {
_ <- Future() // Just start async call
} yield {
convertAnyToJsonString(
JSONFactory700.getApiInfoJSON(implementedInApiVersion, s"Hello, ${callContext.userId}! Your request ID is ${callContext.requestId}.")
)
}
)))
}
resourceDocs += ResourceDoc(
null,
implementedInApiVersion,
nameOf(getBanks),
"GET",
"/banks",
"Get Banks",
s"""Get banks on this API instance
|Returns a list of banks supported on this server:
|
|* ID used as parameter in URLs
|* Short and full name of bank
|* Logo URL
|* Website
|${userAuthenticationMessage(false)}""",
EmptyBody,
banksJSON,
List(UnknownError),
apiTagBank :: Nil,
http4sPartialFunction = Some(getBanks)
)
// Route: GET /obp/v7.0.0/banks
val getBanks: HttpRoutes[IO] = HttpRoutes.of[IO] {
case req @ GET -> `prefixPath` / "banks" =>
import com.openbankproject.commons.ExecutionContext.Implicits.global
Ok(IO.fromFuture(IO(
for {
(banks, callContext) <- NewStyle.function.getBanks(None)
} yield {
convertAnyToJsonString(JSONFactory400.createBanksJson(banks))
}
)))
}
val getResourceDocsObpV700: HttpRoutes[IO] = HttpRoutes.of[IO] {
case req @ GET -> `prefixPath` / "resource-docs" / requestedApiVersionString / "obp" =>
import com.openbankproject.commons.ExecutionContext.Implicits.global
val logic = for {
httpParams <- NewStyle.function.extractHttpParamsFromUrl(req.uri.renderString)
tagsParam = httpParams.filter(_.name == "tags").map(_.values).headOption
functionsParam = httpParams.filter(_.name == "functions").map(_.values).headOption
localeParam = httpParams.filter(param => param.name == "locale" || param.name == "language").map(_.values).flatten.headOption
contentParam = httpParams.filter(_.name == "content").map(_.values).flatten.flatMap(ResourceDocsAPIMethodsUtil.stringToContentParam).headOption
apiCollectionIdParam = httpParams.filter(_.name == "api-collection-id").map(_.values).flatten.headOption
tags = tagsParam.map(_.map(ResourceDocTag(_)))
functions = functionsParam.map(_.toList)
requestedApiVersion <- Future(ApiVersionUtils.valueOf(requestedApiVersionString))
resourceDocs = ResourceDocs140.ImplementationsResourceDocs.getResourceDocsList(requestedApiVersion).getOrElse(Nil)
filteredDocs = ResourceDocsAPIMethodsUtil.filterResourceDocs(resourceDocs, tags, functions)
resourceDocsJson = JSONFactory1_4_0.createResourceDocsJson(filteredDocs, isVersion4OrHigher = true, localeParam)
} yield convertAnyToJsonString(resourceDocsJson)
Ok(IO.fromFuture(IO(logic)))
}
// All routes combined
val allRoutes: HttpRoutes[IO] =
Kleisli[HttpF, Request[IO], Response[IO]] { req: Request[IO] =>
root(req).orElse(getBanks(req)).orElse(getResourceDocsObpV700(req))
}
}
val wrappedRoutesV700Services: HttpRoutes[IO] = CallContextMiddleware.withCallContext(v700Services)
val wrappedRoutesV700Services: HttpRoutes[IO] = CallContextMiddleware.withCallContext(Implementations7_0_0.allRoutes)
}

View File

@ -1,7 +1,7 @@
package code.api.v5_1_0
import code.api.util.APIUtil.OAuth._
import code.api.util.ApiRole.CanGetAllLevelLogsAtAllBanks
import code.api.util.ApiRole.{CanGetSystemLogCacheAll,CanGetSystemLogCacheInfo}
import code.api.util.ErrorMessages.{UserHasMissingRoles, UserNotLoggedIn}
import code.api.v5_1_0.OBPAPI5_1_0.Implementations5_1_0
import code.entitlement.Entitlement
@ -21,12 +21,12 @@ class LogCacheEndpointTest extends V510ServerSetup {
* This is made possible by the scalatest maven plugin
*/
object VersionOfApi extends Tag(ApiVersion.v5_1_0.toString)
object ApiEndpoint1 extends Tag(nameOf(Implementations5_1_0.logCacheEndpoint))
object ApiEndpoint1 extends Tag(nameOf(Implementations5_1_0.logCacheInfoEndpoint))
feature(s"test $ApiEndpoint1 version $VersionOfApi - Unauthorized access") {
scenario("We will call the endpoint without user credentials", ApiEndpoint1, VersionOfApi) {
When("We make a request v5.1.0")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET
val response = makeGetRequest(request)
Then("We should get a 401")
response.code should equal(401)
@ -37,21 +37,23 @@ class LogCacheEndpointTest extends V510ServerSetup {
feature(s"test $ApiEndpoint1 version $VersionOfApi - Missing entitlement") {
scenario("We will call the endpoint with user credentials but without proper entitlement", ApiEndpoint1, VersionOfApi) {
When("We make a request v5.1.0")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1)
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1)
val response = makeGetRequest(request)
Then("error should be " + UserHasMissingRoles + CanGetAllLevelLogsAtAllBanks)
Then("error should be " + UserHasMissingRoles + CanGetSystemLogCacheAll)
response.code should equal(403)
response.body.extract[ErrorMessage].message should be(UserHasMissingRoles + CanGetAllLevelLogsAtAllBanks)
response.body.extract[ErrorMessage].message contains (UserHasMissingRoles) shouldBe (true)
response.body.extract[ErrorMessage].message contains CanGetSystemLogCacheInfo.toString() shouldBe (true)
response.body.extract[ErrorMessage].message contains CanGetSystemLogCacheAll.toString() shouldBe (true)
}
}
feature(s"test $ApiEndpoint1 version $VersionOfApi - Authorized access without pagination") {
scenario("We get log cache without pagination parameters", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make a request to get log cache")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1)
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1)
val response = makeGetRequest(request)
Then("We should get a successful response")
@ -66,10 +68,10 @@ class LogCacheEndpointTest extends V510ServerSetup {
feature(s"test $ApiEndpoint1 version $VersionOfApi - Authorized access with limit parameter") {
scenario("We get log cache with limit parameter only", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make a request with limit parameter")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("limit", "5"))
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("limit", "5"))
val response = makeGetRequest(request)
Then("We should get a successful response")
@ -85,10 +87,10 @@ class LogCacheEndpointTest extends V510ServerSetup {
feature(s"test $ApiEndpoint1 version $VersionOfApi - Authorized access with offset parameter") {
scenario("We get log cache with offset parameter only", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make a request with offset parameter")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("offset", "2"))
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("offset", "2"))
val response = makeGetRequest(request)
Then("We should get a successful response")
@ -103,10 +105,10 @@ class LogCacheEndpointTest extends V510ServerSetup {
feature(s"test $ApiEndpoint1 version $VersionOfApi - Authorized access with both parameters") {
scenario("We get log cache with both limit and offset parameters", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make a request with both limit and offset parameters")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("limit", "3"), ("offset", "1"))
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("limit", "3"), ("offset", "1"))
val response = makeGetRequest(request)
Then("We should get a successful response")
@ -122,13 +124,13 @@ class LogCacheEndpointTest extends V510ServerSetup {
feature(s"test $ApiEndpoint1 version $VersionOfApi - Edge cases") {
scenario("We get error with zero limit (invalid parameter)", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make a request with zero limit")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("limit", "0"))
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("limit", "0"))
val response = makeGetRequest(request)
Then("We should get a bad request response")
Then("We should get a not found response since endpoint does not exist")
response.code should equal(400)
val json = response.body.extract[JObject]
@ -139,10 +141,10 @@ class LogCacheEndpointTest extends V510ServerSetup {
scenario("We get log cache with large offset", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make a request with very large offset")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("offset", "10000"))
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("offset", "10000"))
val response = makeGetRequest(request)
Then("We should get a successful response")
@ -156,10 +158,10 @@ class LogCacheEndpointTest extends V510ServerSetup {
scenario("We get log cache with minimum valid limit", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make a request with minimum valid limit (1)")
val request = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("limit", "1"))
val request = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("limit", "1"))
val response = makeGetRequest(request)
Then("We should get a successful response")
@ -175,10 +177,10 @@ class LogCacheEndpointTest extends V510ServerSetup {
feature(s"test $ApiEndpoint1 version $VersionOfApi - Different log levels") {
scenario("We test different log levels with pagination", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make requests to different log levels with pagination")
val logLevels = List("DEBUG", "INFO", "WARN", "ERROR", "ALL")
val logLevels = List("debug", "info", "warning", "error", "all")
logLevels.foreach { logLevel =>
val request = (v5_1_0_Request / "system" / "log-cache" / logLevel).GET <@(user1) <<? List(("limit", "2"), ("offset", "0"))
@ -197,48 +199,48 @@ class LogCacheEndpointTest extends V510ServerSetup {
feature(s"test $ApiEndpoint1 version $VersionOfApi - Invalid log level") {
scenario("We get error for invalid log level", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We make a request with invalid log level")
val request = (v5_1_0_Request / "system" / "log-cache" / "INVALID_LEVEL").GET <@(user1)
val request = (v5_1_0_Request / "system" / "log-cache" / "invalid_level").GET <@(user1)
val response = makeGetRequest(request)
Then("We should get a bad request response")
response.code should equal(400)
Then("We should get a not found response since endpoint does not exist")
response.code should equal(404)
}
}
feature(s"test $ApiEndpoint1 version $VersionOfApi - Invalid parameters") {
scenario("We test invalid pagination parameters", ApiEndpoint1, VersionOfApi) {
Given("We have a user with proper entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetAllLevelLogsAtAllBanks.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetSystemLogCacheAll.toString)
When("We test with non-numeric limit parameter")
val requestInvalidLimit = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("limit", "abc"))
val requestInvalidLimit = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("limit", "abc"))
val responseInvalidLimit = makeGetRequest(requestInvalidLimit)
Then("We should get a bad request response")
Then("We should get a not found response since endpoint does not exist")
responseInvalidLimit.code should equal(400)
When("We test with non-numeric offset parameter")
val requestInvalidOffset = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("offset", "xyz"))
val requestInvalidOffset = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("offset", "xyz"))
val responseInvalidOffset = makeGetRequest(requestInvalidOffset)
Then("We should get a bad request response")
Then("We should get a not found response since endpoint does not exist")
responseInvalidOffset.code should equal(400)
When("We test with negative limit parameter")
val requestNegativeLimit = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("limit", "-1"))
val requestNegativeLimit = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("limit", "-1"))
val responseNegativeLimit = makeGetRequest(requestNegativeLimit)
Then("We should get a bad request response")
Then("We should get a not found response since endpoint does not exist")
responseNegativeLimit.code should equal(400)
When("We test with negative offset parameter")
val requestNegativeOffset = (v5_1_0_Request / "system" / "log-cache" / "INFO").GET <@(user1) <<? List(("offset", "-1"))
val requestNegativeOffset = (v5_1_0_Request / "system" / "log-cache" / "info").GET <@(user1) <<? List(("offset", "-1"))
val responseNegativeOffset = makeGetRequest(requestNegativeOffset)
Then("We should get a bad request response")
Then("We should get a not found response since endpoint does not exist")
responseNegativeOffset.code should equal(400)
}
}

View File

@ -0,0 +1,360 @@
/**
Open Bank Project - API
Copyright (C) 2011-2024, TESOBE GmbH
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Email: contact@tesobe.com
TESOBE GmbH
Osloerstrasse 16/17
Berlin 13359, Germany
This product includes software developed at
TESOBE (http://www.tesobe.com/)
*/
package code.api.v6_0_0
import code.api.util.APIUtil.OAuth._
import code.api.util.ApiRole.{CanGetCacheConfig, CanGetCacheInfo, CanInvalidateCacheNamespace}
import code.api.util.ErrorMessages.{InvalidJsonFormat, UserHasMissingRoles, UserNotLoggedIn}
import code.api.v6_0_0.OBPAPI6_0_0.Implementations6_0_0
import code.entitlement.Entitlement
import com.github.dwickern.macros.NameOf.nameOf
import com.openbankproject.commons.model.ErrorMessage
import com.openbankproject.commons.util.ApiVersion
import net.liftweb.json.Serialization.write
import org.scalatest.Tag
class CacheEndpointsTest extends V600ServerSetup {
/**
* Test tags
* Example: To run tests with tag "getCacheConfig":
* mvn test -D tagsToInclude
*
* This is made possible by the scalatest maven plugin
*/
object VersionOfApi extends Tag(ApiVersion.v6_0_0.toString)
object ApiEndpoint1 extends Tag(nameOf(Implementations6_0_0.getCacheConfig))
object ApiEndpoint2 extends Tag(nameOf(Implementations6_0_0.getCacheInfo))
object ApiEndpoint3 extends Tag(nameOf(Implementations6_0_0.invalidateCacheNamespace))
// ============================================================================================================
// GET /system/cache/config - Get Cache Configuration
// ============================================================================================================
feature(s"test $ApiEndpoint1 version $VersionOfApi - Unauthorized access") {
scenario("We call getCacheConfig without user credentials", ApiEndpoint1, VersionOfApi) {
When("We make a request v6.0.0 without credentials")
val request = (v6_0_0_Request / "system" / "cache" / "config").GET
val response = makeGetRequest(request)
Then("We should get a 401")
response.code should equal(401)
response.body.extract[ErrorMessage].message should equal(UserNotLoggedIn)
}
}
feature(s"test $ApiEndpoint1 version $VersionOfApi - Missing role") {
scenario("We call getCacheConfig without the CanGetCacheConfig role", ApiEndpoint1, VersionOfApi) {
When("We make a request v6.0.0 without the required role")
val request = (v6_0_0_Request / "system" / "cache" / "config").GET <@ (user1)
val response = makeGetRequest(request)
Then("We should get a 403")
response.code should equal(403)
And("error should be " + UserHasMissingRoles + CanGetCacheConfig)
response.body.extract[ErrorMessage].message should equal(UserHasMissingRoles + CanGetCacheConfig)
}
}
feature(s"test $ApiEndpoint1 version $VersionOfApi - Authorized access") {
scenario("We call getCacheConfig with the CanGetCacheConfig role", ApiEndpoint1, VersionOfApi) {
Given("We have a user with CanGetCacheConfig entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetCacheConfig.toString)
When("We make a request v6.0.0 with proper role")
val request = (v6_0_0_Request / "system" / "cache" / "config").GET <@ (user1)
val response = makeGetRequest(request)
Then("We should get a 200")
response.code should equal(200)
And("The response should have the correct structure")
val cacheConfig = response.body.extract[CacheConfigJsonV600]
cacheConfig.instance_id should not be empty
cacheConfig.environment should not be empty
cacheConfig.global_prefix should not be empty
And("Redis status should have valid data")
cacheConfig.redis_status.available shouldBe a[Boolean]
cacheConfig.redis_status.url should not be empty
cacheConfig.redis_status.port should be > 0
cacheConfig.redis_status.use_ssl shouldBe a[Boolean]
And("In-memory status should have valid data")
cacheConfig.in_memory_status.available shouldBe a[Boolean]
cacheConfig.in_memory_status.current_size should be >= 0L
}
}
// ============================================================================================================
// GET /system/cache/info - Get Cache Information
// ============================================================================================================
feature(s"test $ApiEndpoint2 version $VersionOfApi - Unauthorized access") {
scenario("We call getCacheInfo without user credentials", ApiEndpoint2, VersionOfApi) {
When("We make a request v6.0.0 without credentials")
val request = (v6_0_0_Request / "system" / "cache" / "info").GET
val response = makeGetRequest(request)
Then("We should get a 401")
response.code should equal(401)
response.body.extract[ErrorMessage].message should equal(UserNotLoggedIn)
}
}
feature(s"test $ApiEndpoint2 version $VersionOfApi - Missing role") {
scenario("We call getCacheInfo without the CanGetCacheInfo role", ApiEndpoint2, VersionOfApi) {
When("We make a request v6.0.0 without the required role")
val request = (v6_0_0_Request / "system" / "cache" / "info").GET <@ (user1)
val response = makeGetRequest(request)
Then("We should get a 403")
response.code should equal(403)
And("error should be " + UserHasMissingRoles + CanGetCacheInfo)
response.body.extract[ErrorMessage].message should equal(UserHasMissingRoles + CanGetCacheInfo)
}
}
feature(s"test $ApiEndpoint2 version $VersionOfApi - Authorized access") {
scenario("We call getCacheInfo with the CanGetCacheInfo role", ApiEndpoint2, VersionOfApi) {
Given("We have a user with CanGetCacheInfo entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetCacheInfo.toString)
When("We make a request v6.0.0 with proper role")
val request = (v6_0_0_Request / "system" / "cache" / "info").GET <@ (user1)
val response = makeGetRequest(request)
Then("We should get a 200")
response.code should equal(200)
And("The response should have the correct structure")
val cacheInfo = response.body.extract[CacheInfoJsonV600]
cacheInfo.namespaces should not be null
cacheInfo.total_keys should be >= 0
cacheInfo.redis_available shouldBe a[Boolean]
And("Each namespace should have valid data")
cacheInfo.namespaces.foreach { namespace =>
namespace.namespace_id should not be empty
namespace.prefix should not be empty
namespace.current_version should be > 0L
namespace.key_count should be >= 0
namespace.description should not be empty
namespace.category should not be empty
namespace.storage_location should not be empty
namespace.storage_location should (equal("redis") or equal("memory") or equal("both") or equal("unknown"))
namespace.ttl_info should not be empty
namespace.ttl_info shouldBe a[String]
}
}
}
// ============================================================================================================
// POST /management/cache/namespaces/invalidate - Invalidate Cache Namespace
// ============================================================================================================
feature(s"test $ApiEndpoint3 version $VersionOfApi - Unauthorized access") {
scenario("We call invalidateCacheNamespace without user credentials", ApiEndpoint3, VersionOfApi) {
When("We make a request v6.0.0 without credentials")
val request = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST
val response = makePostRequest(request, write(InvalidateCacheNamespaceJsonV600("rd_localised")))
Then("We should get a 401")
response.code should equal(401)
response.body.extract[ErrorMessage].message should equal(UserNotLoggedIn)
}
}
feature(s"test $ApiEndpoint3 version $VersionOfApi - Missing role") {
scenario("We call invalidateCacheNamespace without the CanInvalidateCacheNamespace role", ApiEndpoint3, VersionOfApi) {
When("We make a request v6.0.0 without the required role")
val request = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val response = makePostRequest(request, write(InvalidateCacheNamespaceJsonV600("rd_localised")))
Then("We should get a 403")
response.code should equal(403)
And("error should be " + UserHasMissingRoles + CanInvalidateCacheNamespace)
response.body.extract[ErrorMessage].message should equal(UserHasMissingRoles + CanInvalidateCacheNamespace)
}
}
feature(s"test $ApiEndpoint3 version $VersionOfApi - Invalid JSON format") {
scenario("We call invalidateCacheNamespace with invalid JSON", ApiEndpoint3, VersionOfApi) {
Given("We have a user with CanInvalidateCacheNamespace entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanInvalidateCacheNamespace.toString)
When("We make a request with invalid JSON")
val request = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val response = makePostRequest(request, """{"invalid": "json"}""")
Then("We should get a 400")
response.code should equal(400)
And("error should be InvalidJsonFormat")
response.body.extract[ErrorMessage].message should startWith(InvalidJsonFormat)
}
}
feature(s"test $ApiEndpoint3 version $VersionOfApi - Invalid namespace_id") {
scenario("We call invalidateCacheNamespace with non-existent namespace_id", ApiEndpoint3, VersionOfApi) {
Given("We have a user with CanInvalidateCacheNamespace entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanInvalidateCacheNamespace.toString)
When("We make a request with invalid namespace_id")
val request = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val response = makePostRequest(request, write(InvalidateCacheNamespaceJsonV600("invalid_namespace")))
Then("We should get a 400")
response.code should equal(400)
And("error should mention invalid namespace_id")
val errorMessage = response.body.extract[ErrorMessage].message
errorMessage should include("Invalid namespace_id")
errorMessage should include("invalid_namespace")
}
}
feature(s"test $ApiEndpoint3 version $VersionOfApi - Authorized access with valid namespace") {
scenario("We call invalidateCacheNamespace with valid rd_localised namespace", ApiEndpoint3, VersionOfApi) {
Given("We have a user with CanInvalidateCacheNamespace entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanInvalidateCacheNamespace.toString)
When("We make a request with valid namespace_id")
val request = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val response = makePostRequest(request, write(InvalidateCacheNamespaceJsonV600("rd_localised")))
Then("We should get a 200")
response.code should equal(200)
And("The response should have the correct structure")
val result = response.body.extract[InvalidatedCacheNamespaceJsonV600]
result.namespace_id should equal("rd_localised")
result.old_version should be > 0L
result.new_version should be > result.old_version
result.new_version should equal(result.old_version + 1)
result.status should equal("invalidated")
}
scenario("We call invalidateCacheNamespace with valid connector namespace", ApiEndpoint3, VersionOfApi) {
Given("We have a user with CanInvalidateCacheNamespace entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanInvalidateCacheNamespace.toString)
When("We make a request with connector namespace_id")
val request = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val response = makePostRequest(request, write(InvalidateCacheNamespaceJsonV600("connector")))
Then("We should get a 200")
response.code should equal(200)
And("The response should have the correct structure")
val result = response.body.extract[InvalidatedCacheNamespaceJsonV600]
result.namespace_id should equal("connector")
result.old_version should be > 0L
result.new_version should be > result.old_version
result.status should equal("invalidated")
}
scenario("We call invalidateCacheNamespace with valid abac_rule namespace", ApiEndpoint3, VersionOfApi) {
Given("We have a user with CanInvalidateCacheNamespace entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanInvalidateCacheNamespace.toString)
When("We make a request with abac_rule namespace_id")
val request = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val response = makePostRequest(request, write(InvalidateCacheNamespaceJsonV600("abac_rule")))
Then("We should get a 200")
response.code should equal(200)
And("The response should have the correct structure")
val result = response.body.extract[InvalidatedCacheNamespaceJsonV600]
result.namespace_id should equal("abac_rule")
result.status should equal("invalidated")
}
}
feature(s"test $ApiEndpoint3 version $VersionOfApi - Version increment validation") {
scenario("We verify that cache version increments correctly on multiple invalidations", ApiEndpoint3, VersionOfApi) {
Given("We have a user with CanInvalidateCacheNamespace entitlement")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanInvalidateCacheNamespace.toString)
When("We invalidate the same namespace twice")
val request1 = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val response1 = makePostRequest(request1, write(InvalidateCacheNamespaceJsonV600("rd_dynamic")))
Then("First invalidation should succeed")
response1.code should equal(200)
val result1 = response1.body.extract[InvalidatedCacheNamespaceJsonV600]
val firstNewVersion = result1.new_version
When("We invalidate again")
val request2 = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val response2 = makePostRequest(request2, write(InvalidateCacheNamespaceJsonV600("rd_dynamic")))
Then("Second invalidation should succeed")
response2.code should equal(200)
val result2 = response2.body.extract[InvalidatedCacheNamespaceJsonV600]
And("Version should have incremented again")
result2.old_version should equal(firstNewVersion)
result2.new_version should equal(firstNewVersion + 1)
result2.status should equal("invalidated")
}
}
// ============================================================================================================
// Cross-endpoint test - Verify cache info updates after invalidation
// ============================================================================================================
feature(s"Integration test - Cache endpoints interaction") {
scenario("We verify cache info shows updated version after invalidation", ApiEndpoint2, ApiEndpoint3, VersionOfApi) {
Given("We have a user with both CanGetCacheInfo and CanInvalidateCacheNamespace entitlements")
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanGetCacheInfo.toString)
Entitlement.entitlement.vend.addEntitlement("", resourceUser1.userId, CanInvalidateCacheNamespace.toString)
When("We get the initial cache info")
val getRequest1 = (v6_0_0_Request / "system" / "cache" / "info").GET <@ (user1)
val getResponse1 = makeGetRequest(getRequest1)
getResponse1.code should equal(200)
val cacheInfo1 = getResponse1.body.extract[CacheInfoJsonV600]
// Find the rd_static namespace (or any other valid namespace)
val targetNamespace = "rd_static"
val initialVersion = cacheInfo1.namespaces.find(_.namespace_id == targetNamespace).map(_.current_version)
When("We invalidate the namespace")
val invalidateRequest = (v6_0_0_Request / "management" / "cache" / "namespaces" / "invalidate").POST <@ (user1)
val invalidateResponse = makePostRequest(invalidateRequest, write(InvalidateCacheNamespaceJsonV600(targetNamespace)))
invalidateResponse.code should equal(200)
val invalidateResult = invalidateResponse.body.extract[InvalidatedCacheNamespaceJsonV600]
When("We get the cache info again")
val getRequest2 = (v6_0_0_Request / "system" / "cache" / "info").GET <@ (user1)
val getResponse2 = makeGetRequest(getRequest2)
getResponse2.code should equal(200)
val cacheInfo2 = getResponse2.body.extract[CacheInfoJsonV600]
Then("The namespace version should have been incremented")
val updatedNamespace = cacheInfo2.namespaces.find(_.namespace_id == targetNamespace)
updatedNamespace should not be None
if (initialVersion.isDefined) {
updatedNamespace.get.current_version should be > initialVersion.get
}
updatedNamespace.get.current_version should equal(invalidateResult.new_version)
}
}
}

View File

@ -20,6 +20,6 @@ class ApiVersionUtilsTest extends V400ServerSetup {
versions.map(version => ApiVersionUtils.valueOf(version.fullyQualifiedVersion))
//NOTE, when we added the new version, better fix this number manually. and also check the versions
versions.length shouldBe(24)
versions.length shouldBe(25)
}}
}

View File

@ -113,8 +113,14 @@
<forkMode>once</forkMode>
<junitxml>.</junitxml>
<filereports>WDF TestSuite.txt</filereports>
<argLine>-Drun.mode=test -XX:MaxMetaspaceSize=512m -Xms512m -Xmx512m</argLine>
<!-- Increased memory for faster test execution -->
<argLine>-Drun.mode=test -XX:MaxMetaspaceSize=1G -Xms2G -Xmx4G -XX:+UseG1GC -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+UseStringDeduplication</argLine>
<tagsToExclude>code.external</tagsToExclude>
<testFailureIgnore>${maven.test.failure.ignore}</testFailureIgnore>
<!-- Disable parallel test execution to avoid shared state issues -->
<!-- <parallel>true</parallel>-->
<!-- <threadCount>4</threadCount>-->
<parallel>false</parallel>
</configuration>
<executions>
<execution>
@ -125,6 +131,49 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-report-plugin</artifactId>
<version>3.5.2</version>
<configuration>
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
<outputDirectory>${project.build.directory}/surefire-reports</outputDirectory>
</configuration>
<executions>
<execution>
<id>surefire-html-report</id>
<phase>package</phase>
<goals>
<goal>report-only</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<version>3.1.0</version>
<executions>
<execution>
<id>delete-surefire-xml-after-html</id>
<phase>verify</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete>
<fileset dir="${project.build.directory}/surefire-reports">
<include name="TEST-*.xml"/>
<include name="TESTS-*.xml"/>
</fileset>
</delete>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>

22
pom.xml
View File

@ -21,6 +21,7 @@
<!-- Common plugin settings -->
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>${project.build.sourceEncoding}</project.reporting.outputEncoding>
<maven.test.failure.ignore>false</maven.test.failure.ignore>
<!-- vscaladoc settings -->
<maven.scaladoc.vscaladocVersion>1.2-m1</maven.scaladoc.vscaladocVersion>
<vscaladoc.links.liftweb.pathsufix>scaladocs/</vscaladoc.links.liftweb.pathsufix>
@ -133,10 +134,14 @@
<scalaVersion>${scala.compiler}</scalaVersion>
<charset>${project.build.sourceEncoding}</charset>
<displayCmd>true</displayCmd>
<recompileMode>incremental</recompileMode>
<useZincServer>true</useZincServer>
<jvmArgs>
<jvmArg>-DpackageLinkDefs=file://${project.build.directory}/packageLinkDefs.properties</jvmArg>
<jvmArg>-Xms64m</jvmArg>
<jvmArg>-Xmx1024m</jvmArg>
<jvmArg>-Xms512m</jvmArg>
<jvmArg>-Xmx2G</jvmArg>
<jvmArg>-XX:+TieredCompilation</jvmArg>
<jvmArg>-XX:TieredStopAtLevel=1</jvmArg>
</jvmArgs>
<args>
<arg>-unchecked</arg>
@ -146,6 +151,12 @@
<arg>-deprecation</arg>
-->
<arg>-Ypartial-unification</arg>
<!-- Enable language features to suppress warnings -->
<arg>-language:implicitConversions</arg>
<arg>-language:reflectiveCalls</arg>
<arg>-language:postfixOps</arg>
<!-- Suppress auto-application deprecation warning -->
<arg>-Wconf:cat=deprecation&amp;msg=auto-application:s</arg>
</args>
</configuration>
<executions>
@ -155,13 +166,6 @@
<goal>testCompile</goal>
</goals>
</execution>
<execution>
<id>scala-test-compile</id>
<phase>process-test-resources</phase>
<goals>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- https://mvnrepository.com/artifact/org.scalatest/scalatest-maven-plugin -->

View File

@ -14,30 +14,66 @@
# Usage:
# ./run_all_tests.sh - Run full test suite
# ./run_all_tests.sh --summary-only - Regenerate summary from existing log
# ./run_all_tests.sh --timeout=60 - Run with 60 minute timeout
################################################################################
set -e
# Don't use set -e globally - it causes issues with grep returning 1 when no match
# Instead, we handle errors explicitly where needed
################################################################################
# PARSE COMMAND LINE ARGUMENTS
################################################################################
SUMMARY_ONLY=false
if [ "$1" = "--summary-only" ]; then
SUMMARY_ONLY=true
fi
TIMEOUT_MINUTES=0 # 0 means no timeout
for arg in "$@"; do
case $arg in
--summary-only)
SUMMARY_ONLY=true
;;
--timeout=*)
TIMEOUT_MINUTES="${arg#*=}"
;;
esac
done
################################################################################
# TERMINAL STYLING FUNCTIONS
################################################################################
# Set terminal to "test mode" - blue background, special title
# Set terminal to "test mode" - different colors for different phases
set_terminal_style() {
local phase="${1:-Running}"
echo -ne "\033]0;OBP-API Tests ${phase}...\007" # Title
echo -ne "\033]11;#001f3f\007" # Dark blue background
echo -ne "\033]10;#ffffff\007" # White text
# Print header bar
# Set different background colors for different phases
case "$phase" in
"Starting")
echo -ne "\033]11;#4a4a4a\007" # Dark gray background
echo -ne "\033]10;#ffffff\007" # White text
;;
"Building")
echo -ne "\033]11;#ff6b35\007" # Orange background
echo -ne "\033]10;#ffffff\007" # White text
;;
"Testing")
echo -ne "\033]11;#001f3f\007" # Dark blue background
echo -ne "\033]10;#ffffff\007" # White text
;;
"Complete")
echo -ne "\033]11;#2ecc40\007" # Green background
echo -ne "\033]10;#ffffff\007" # White text
;;
*)
echo -ne "\033]11;#001f3f\007" # Default blue background
echo -ne "\033]10;#ffffff\007" # White text
;;
esac
# Set window title
echo -ne "\033]0;OBP-API Tests ${phase}...\007"
# Print header bar with phase-specific styling
printf "\033[44m\033[1;37m%-$(tput cols)s\r OBP-API TEST RUNNER ACTIVE - ${phase} \n%-$(tput cols)s\033[0m\n" " " " "
}
@ -91,8 +127,74 @@ DETAIL_LOG="${LOG_DIR}/last_run.log" # Full Maven output
SUMMARY_LOG="${LOG_DIR}/last_run_summary.log" # Summary only
FAILED_TESTS_FILE="${LOG_DIR}/failed_tests.txt" # Failed test list for run_specific_tests.sh
# Phase timing variables (stored in temporary file)
PHASE_START_TIME=0
mkdir -p "${LOG_DIR}"
# Function to get current time in milliseconds
get_time_ms() {
if [[ "$OSTYPE" == "darwin"* ]]; then
# macOS
python3 -c "import time; print(int(time.time() * 1000))"
else
# Linux
date +%s%3N
fi
}
# Function to record phase timing
record_phase_time() {
local phase="$1"
local current_time=$(get_time_ms)
local timing_file="${LOG_DIR}/phase_timing.tmp"
case "$phase" in
"starting")
echo "PHASE_START_TIME=$current_time" > "$timing_file"
;;
"building")
if [ -f "$timing_file" ]; then
local phase_start=$(grep "PHASE_START_TIME=" "$timing_file" | cut -d= -f2)
if [ "$phase_start" -gt 0 ]; then
local starting_time=$((current_time - phase_start))
echo "STARTING_TIME=$starting_time" >> "$timing_file"
fi
fi
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
;;
"testing")
if [ -f "$timing_file" ]; then
local phase_start=$(grep "PHASE_START_TIME=" "$timing_file" | tail -1 | cut -d= -f2)
if [ "$phase_start" -gt 0 ]; then
local building_time=$((current_time - phase_start))
echo "BUILDING_TIME=$building_time" >> "$timing_file"
fi
fi
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
;;
"complete")
if [ -f "$timing_file" ]; then
local phase_start=$(grep "PHASE_START_TIME=" "$timing_file" | tail -1 | cut -d= -f2)
if [ "$phase_start" -gt 0 ]; then
local testing_time=$((current_time - phase_start))
echo "TESTING_TIME=$testing_time" >> "$timing_file"
fi
fi
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
;;
"end")
if [ -f "$timing_file" ]; then
local phase_start=$(grep "PHASE_START_TIME=" "$timing_file" | tail -1 | cut -d= -f2)
if [ "$phase_start" -gt 0 ]; then
local complete_time=$((current_time - phase_start))
echo "COMPLETE_TIME=$complete_time" >> "$timing_file"
fi
fi
;;
esac
}
# If summary-only mode, skip to summary generation
if [ "$SUMMARY_ONLY" = true ]; then
if [ ! -f "${DETAIL_LOG}" ]; then
@ -130,16 +232,21 @@ fi
rm -f "${LOG_DIR}/recent_lines.tmp"
echo " - Removed stale temp file"
fi
if [ -f "${LOG_DIR}/phase_timing.tmp" ]; then
rm -f "${LOG_DIR}/phase_timing.tmp"
echo " - Removed stale timing file"
fi
fi # End of if [ "$SUMMARY_ONLY" = true ]
################################################################################
# HELPER FUNCTIONS
################################################################################
# Log message to terminal and summary file
# Log message to terminal and both log files
log_message() {
echo "$1"
echo "[$(date +"%Y-%m-%d %H:%M:%S")] $1" >> "${SUMMARY_LOG}"
echo "$1" >> "${DETAIL_LOG}"
}
# Print section header
@ -231,8 +338,10 @@ generate_summary() {
# If no timing info (summary-only mode), extract from log
if [ $duration -eq 0 ] && grep -q "Total time:" "$detail_log"; then
local time_str=$(grep "Total time:" "$detail_log" | tail -1)
duration_min=$(echo "$time_str" | grep -oP '\d+(?= min)' || echo "0")
duration_sec=$(echo "$time_str" | grep -oP '\d+(?=\.\d+ s)' || echo "0")
duration_min=$(echo "$time_str" | sed 's/.*: //' | sed 's/ min.*//' | grep -o '[0-9]*' | head -1)
[ -z "$duration_min" ] && duration_min="0"
duration_sec=$(echo "$time_str" | sed 's/.* min //' | sed 's/\..*//' | grep -o '[0-9]*' | head -1)
[ -z "$duration_sec" ] && duration_sec="0"
fi
print_header "Test Results Summary"
@ -244,22 +353,36 @@ generate_summary() {
# Suites: completed M, aborted 0
# Tests: succeeded N, failed 0, canceled 0, ignored 0, pending 0
# All tests passed.
# We need to extract the stats from the last test run (in case there are multiple modules)
SCALATEST_SECTION=$(grep -A 4 "Run completed" "${detail_log}" | tail -5)
if [ -n "$SCALATEST_SECTION" ]; then
TOTAL_TESTS=$(echo "$SCALATEST_SECTION" | grep -oP "Total number of tests run: \K\d+" || echo "UNKNOWN")
SUCCEEDED=$(echo "$SCALATEST_SECTION" | grep -oP "succeeded \K\d+" || echo "UNKNOWN")
FAILED=$(echo "$SCALATEST_SECTION" | grep -oP "failed \K\d+" || echo "UNKNOWN")
ERRORS=$(echo "$SCALATEST_SECTION" | grep -oP "errors \K\d+" || echo "0")
SKIPPED=$(echo "$SCALATEST_SECTION" | grep -oP "ignored \K\d+" || echo "UNKNOWN")
# We need to sum stats from ALL test runs (multiple modules: obp-commons, obp-api, etc.)
# Sum up all "Total number of tests run" values (macOS compatible - no grep -P)
TOTAL_TESTS=$(grep "Total number of tests run:" "${detail_log}" 2>/dev/null | sed 's/.*Total number of tests run: //' | awk '{sum+=$1} END {print sum}' || echo "0")
[ -z "$TOTAL_TESTS" ] || [ "$TOTAL_TESTS" = "0" ] && TOTAL_TESTS="UNKNOWN"
# Sum up all succeeded from "Tests: succeeded N, ..." lines
SUCCEEDED=$(grep "Tests: succeeded" "${detail_log}" 2>/dev/null | sed 's/.*succeeded //' | sed 's/,.*//' | awk '{sum+=$1} END {print sum}' || echo "0")
[ -z "$SUCCEEDED" ] && SUCCEEDED="UNKNOWN"
# Sum up all failed from "Tests: ... failed N, ..." lines
FAILED=$(grep "Tests:.*failed" "${detail_log}" 2>/dev/null | sed 's/.*failed //' | sed 's/,.*//' | awk '{sum+=$1} END {print sum}' || echo "0")
[ -z "$FAILED" ] && FAILED="0"
# Sum up all ignored from "Tests: ... ignored N, ..." lines
IGNORED=$(grep "Tests:.*ignored" "${detail_log}" 2>/dev/null | sed 's/.*ignored //' | sed 's/,.*//' | awk '{sum+=$1} END {print sum}' || echo "0")
[ -z "$IGNORED" ] && IGNORED="0"
# Sum up errors (if any)
ERRORS=$(grep "errors" "${detail_log}" 2>/dev/null | grep -v "ERROR" | sed 's/.*errors //' | sed 's/[^0-9].*//' | awk '{sum+=$1} END {print sum}' || echo "0")
[ -z "$ERRORS" ] && ERRORS="0"
# Calculate total including ignored (like IntelliJ does)
if [ "$TOTAL_TESTS" != "UNKNOWN" ] && [ "$IGNORED" != "0" ]; then
TOTAL_WITH_IGNORED=$((TOTAL_TESTS + IGNORED))
else
TOTAL_TESTS="UNKNOWN"
SUCCEEDED="UNKNOWN"
FAILED="UNKNOWN"
ERRORS="0"
SKIPPED="UNKNOWN"
TOTAL_WITH_IGNORED="$TOTAL_TESTS"
fi
WARNINGS=$(grep -c "WARNING" "${detail_log}" || echo "UNKNOWN")
WARNINGS=$(grep -c "WARNING" "${detail_log}" 2>/dev/null || echo "0")
# Determine build status
if grep -q "BUILD SUCCESS" "${detail_log}"; then
@ -276,16 +399,153 @@ generate_summary() {
# Print summary
log_message "Test Run Summary"
log_message "================"
log_message "Timestamp: $(date)"
log_message "Duration: ${duration_min}m ${duration_sec}s"
# Extract Maven timestamps and calculate Terminal timestamps
local maven_start_timestamp=""
local maven_end_timestamp=""
local terminal_start_timestamp=""
local terminal_end_timestamp=$(date)
if [ "$start_time" -gt 0 ] && [ "$end_time" -gt 0 ]; then
# Use actual terminal start/end times if available
terminal_start_timestamp=$(date -r "$start_time" 2>/dev/null || date -d "@$start_time" 2>/dev/null || echo "Unknown")
terminal_end_timestamp=$(date -r "$end_time" 2>/dev/null || date -d "@$end_time" 2>/dev/null || echo "Unknown")
else
# Calculate terminal start time by subtracting duration from current time
if [ "$duration_min" -gt 0 -o "$duration_sec" -gt 0 ]; then
local total_seconds=$((duration_min * 60 + duration_sec))
local approx_start_epoch=$(($(date "+%s") - total_seconds))
terminal_start_timestamp=$(date -r "$approx_start_epoch" 2>/dev/null || echo "Approx. ${duration_min}m ${duration_sec}s ago")
else
terminal_start_timestamp="Unknown"
fi
fi
# Extract Maven timestamps from log
maven_end_timestamp=$(grep "Finished at:" "${detail_log}" | tail -1 | sed 's/.*Finished at: //' | sed 's/T/ /' | sed 's/+.*//' || echo "Unknown")
# Calculate Maven start time from Maven's "Total time" if available
local maven_total_time=$(grep "Total time:" "${detail_log}" | tail -1 | sed 's/.*Total time: *//' | sed 's/ .*//' || echo "")
if [ -n "$maven_total_time" ] && [ "$maven_end_timestamp" != "Unknown" ]; then
# Parse Maven duration (e.g., "02:06" for "02:06 min" or "43.653" for "43.653 s")
local maven_seconds=0
if echo "$maven_total_time" | grep -q ":"; then
# Format like "02:06" (minutes:seconds)
local maven_min=$(echo "$maven_total_time" | sed 's/:.*//')
local maven_sec=$(echo "$maven_total_time" | sed 's/.*://')
# Remove leading zeros to avoid octal interpretation
maven_min=$(echo "$maven_min" | sed 's/^0*//' | sed 's/^$/0/')
maven_sec=$(echo "$maven_sec" | sed 's/^0*//' | sed 's/^$/0/')
maven_seconds=$((maven_min * 60 + maven_sec))
else
# Format like "43.653" (seconds)
maven_seconds=$(echo "$maven_total_time" | sed 's/\..*//')
fi
# Calculate Maven start time
if [ "$maven_seconds" -gt 0 ]; then
local maven_end_epoch=$(date -j -f "%Y-%m-%d %H:%M:%S" "$maven_end_timestamp" "+%s" 2>/dev/null || echo "0")
if [ "$maven_end_epoch" -gt 0 ]; then
local maven_start_epoch=$((maven_end_epoch - maven_seconds))
maven_start_timestamp=$(date -r "$maven_start_epoch" 2>/dev/null || echo "Unknown")
else
maven_start_timestamp="Unknown"
fi
else
maven_start_timestamp="Unknown"
fi
else
maven_start_timestamp="Unknown"
fi
# Format Maven end timestamp nicely
if [ "$maven_end_timestamp" != "Unknown" ]; then
maven_end_timestamp=$(date -j -f "%Y-%m-%d %H:%M:%S" "$maven_end_timestamp" "+%a %b %d %H:%M:%S %Z %Y" 2>/dev/null || echo "$maven_end_timestamp")
fi
# Display both timelines
log_message "Terminal Timeline:"
log_message " Started: ${terminal_start_timestamp}"
log_message " Completed: ${terminal_end_timestamp}"
log_message " Duration: ${duration_min}m ${duration_sec}s"
log_message ""
log_message "Maven Timeline:"
log_message " Started: ${maven_start_timestamp}"
log_message " Completed: ${maven_end_timestamp}"
if [ -n "$maven_total_time" ]; then
local maven_duration_display=$(grep "Total time:" "${detail_log}" | tail -1 | sed 's/.*Total time: *//' || echo "Unknown")
log_message " Duration: ${maven_duration_display}"
fi
log_message ""
log_message "Build Status: ${BUILD_STATUS}"
log_message ""
# Phase timing breakdown (if available)
local timing_file="${LOG_DIR}/phase_timing.tmp"
if [ -f "$timing_file" ]; then
# Read timing values from file
local start_ms=$(grep "STARTING_TIME=" "$timing_file" | cut -d= -f2 2>/dev/null || echo "0")
local build_ms=$(grep "BUILDING_TIME=" "$timing_file" | cut -d= -f2 2>/dev/null || echo "0")
local test_ms=$(grep "TESTING_TIME=" "$timing_file" | cut -d= -f2 2>/dev/null || echo "0")
local complete_ms=$(grep "COMPLETE_TIME=" "$timing_file" | cut -d= -f2 2>/dev/null || echo "0")
# Ensure we have numeric values (default to 0 if empty)
[ -z "$start_ms" ] && start_ms=0
[ -z "$build_ms" ] && build_ms=0
[ -z "$test_ms" ] && test_ms=0
[ -z "$complete_ms" ] && complete_ms=0
# Clean up timing file
rm -f "$timing_file"
if [ "$start_ms" -gt 0 ] 2>/dev/null || [ "$build_ms" -gt 0 ] 2>/dev/null || [ "$test_ms" -gt 0 ] 2>/dev/null || [ "$complete_ms" -gt 0 ] 2>/dev/null; then
log_message "Phase Timing Breakdown:"
if [ "$start_ms" -gt 0 ] 2>/dev/null; then
log_message " Starting: ${start_ms}ms ($(printf "%.2f" $(echo "scale=2; $start_ms/1000" | bc))s)"
fi
if [ "$build_ms" -gt 0 ] 2>/dev/null; then
log_message " Building: ${build_ms}ms ($(printf "%.2f" $(echo "scale=2; $build_ms/1000" | bc))s)"
fi
if [ "$test_ms" -gt 0 ] 2>/dev/null; then
log_message " Testing: ${test_ms}ms ($(printf "%.2f" $(echo "scale=2; $test_ms/1000" | bc))s)"
fi
if [ "$complete_ms" -gt 0 ] 2>/dev/null; then
log_message " Complete: ${complete_ms}ms ($(printf "%.2f" $(echo "scale=2; $complete_ms/1000" | bc))s)"
fi
# Calculate percentages
local total_phase_time=$((start_ms + build_ms + test_ms + complete_ms))
if [ "$total_phase_time" -gt 0 ]; then
log_message ""
log_message "Phase Distribution:"
if [ "$start_ms" -gt 0 ] 2>/dev/null; then
local starting_pct=$(echo "scale=1; $start_ms * 100 / $total_phase_time" | bc)
log_message " Starting: ${starting_pct}%"
fi
if [ "$build_ms" -gt 0 ] 2>/dev/null; then
local building_pct=$(echo "scale=1; $build_ms * 100 / $total_phase_time" | bc)
log_message " Building: ${building_pct}%"
fi
if [ "$test_ms" -gt 0 ] 2>/dev/null; then
local testing_pct=$(echo "scale=1; $test_ms * 100 / $total_phase_time" | bc)
log_message " Testing: ${testing_pct}%"
fi
if [ "$complete_ms" -gt 0 ] 2>/dev/null; then
local complete_pct=$(echo "scale=1; $complete_ms * 100 / $total_phase_time" | bc)
log_message " Complete: ${complete_pct}%"
fi
fi
log_message ""
fi
fi
log_message "Test Statistics:"
log_message " Total: ${TOTAL_TESTS}"
log_message " Total: ${TOTAL_WITH_IGNORED} (${TOTAL_TESTS} run + ${IGNORED} ignored)"
log_message " Succeeded: ${SUCCEEDED}"
log_message " Failed: ${FAILED}"
log_message " Ignored: ${IGNORED}"
log_message " Errors: ${ERRORS}"
log_message " Skipped: ${SKIPPED}"
log_message " Warnings: ${WARNINGS}"
log_message ""
@ -320,7 +580,7 @@ generate_summary() {
# Extract test class names from failures
grep -B 20 "\*\*\* FAILED \*\*\*" "${detail_log}" | \
grep -oP "^[A-Z][a-zA-Z0-9_]+(?=:)" | \
grep -E "^[A-Z][a-zA-Z0-9_]+:" | sed 's/:$//' | \
sort -u | \
while read test_class; do
# Try to find package by searching for the class in test files
@ -375,6 +635,8 @@ fi
# START TEST RUN
################################################################################
# Record starting phase
record_phase_time "starting"
set_terminal_style "Starting"
# Start the test run
@ -414,24 +676,67 @@ fi
################################################################################
print_header "Checking Test Server Ports"
log_message "Checking if test server port 8018 is available..."
# Check if port 8018 is in use
if lsof -i :8018 >/dev/null 2>&1; then
log_message "[WARNING] Port 8018 is in use - attempting to kill process"
# Try to kill the process using the port
PORT_PID=$(lsof -t -i :8018 2>/dev/null)
# Default test port (can be overridden)
TEST_PORT=8018
MAX_PORT_ATTEMPTS=5
log_message "Checking if test server port ${TEST_PORT} is available..."
# Function to find an available port
find_available_port() {
local port=$1
local max_attempts=$2
local attempt=0
while [ $attempt -lt $max_attempts ]; do
if ! lsof -i :$port >/dev/null 2>&1; then
echo $port
return 0
fi
port=$((port + 1))
attempt=$((attempt + 1))
done
echo ""
return 1
}
# Check if port is in use
if lsof -i :${TEST_PORT} >/dev/null 2>&1; then
log_message "[WARNING] Port ${TEST_PORT} is in use - attempting to kill process"
PORT_PID=$(lsof -t -i :${TEST_PORT} 2>/dev/null || true)
if [ -n "$PORT_PID" ]; then
kill -9 $PORT_PID 2>/dev/null || true
sleep 2
log_message "[OK] Killed process $PORT_PID using port 8018"
# Verify port is now free
if lsof -i :${TEST_PORT} >/dev/null 2>&1; then
log_message "[WARNING] Could not free port ${TEST_PORT}, searching for alternative..."
NEW_PORT=$(find_available_port $((TEST_PORT + 1)) $MAX_PORT_ATTEMPTS)
if [ -n "$NEW_PORT" ]; then
log_message "[OK] Found available port: ${NEW_PORT}"
# Update test.default.props with new port
if [ -f "${PROPS_FILE}" ]; then
sed -i.bak "s/hostname=127.0.0.1:${TEST_PORT}/hostname=127.0.0.1:${NEW_PORT}/" "${PROPS_FILE}" 2>/dev/null || \
sed -i '' "s/hostname=127.0.0.1:${TEST_PORT}/hostname=127.0.0.1:${NEW_PORT}/" "${PROPS_FILE}"
log_message "[OK] Updated test.default.props to use port ${NEW_PORT}"
TEST_PORT=$NEW_PORT
fi
else
log_message "[ERROR] No available ports found in range ${TEST_PORT}-$((TEST_PORT + MAX_PORT_ATTEMPTS))"
exit 1
fi
else
log_message "[OK] Killed process $PORT_PID, port ${TEST_PORT} is now available"
fi
fi
else
log_message "[OK] Port 8018 is available"
log_message "[OK] Port ${TEST_PORT} is available"
fi
# Also check for any stale Java test processes
STALE_TEST_PROCS=$(ps aux | grep -E "TestServer|ScalaTest.*obp-api" | grep -v grep | awk '{print $2}' || true)
STALE_TEST_PROCS=$(ps aux | grep -E "TestServer|ScalaTest.*obp-api" | grep -v grep | awk '{print $2}' 2>/dev/null || true)
if [ -n "$STALE_TEST_PROCS" ]; then
log_message "[WARNING] Found stale test processes - cleaning up"
echo "$STALE_TEST_PROCS" | xargs kill -9 2>/dev/null || true
@ -481,7 +786,6 @@ log_message ""
################################################################################
print_header "Running Tests"
update_terminal_title "Building"
log_message "Executing: mvn clean test"
echo ""
@ -492,6 +796,13 @@ export START_TIME
MONITOR_FLAG="${LOG_DIR}/monitor.flag"
touch "${MONITOR_FLAG}"
# Optional timeout handling
MAVEN_PID=""
if [ "$TIMEOUT_MINUTES" -gt 0 ] 2>/dev/null; then
log_message "[INFO] Test timeout set to ${TIMEOUT_MINUTES} minutes"
TIMEOUT_SECONDS=$((TIMEOUT_MINUTES * 60))
fi
# Background process: Monitor log file and update title bar with progress
(
# Wait for log file to be created and have Maven output
@ -500,35 +811,50 @@ touch "${MONITOR_FLAG}"
done
phase="Building"
in_building=false
in_testing=false
timing_file="${LOG_DIR}/phase_timing.tmp"
# Keep monitoring until flag file is removed
while [ -f "${MONITOR_FLAG}" ]; do
# Use tail to look at recent lines only (last 500 lines for performance)
# This ensures O(1) performance regardless of log file size
recent_lines=$(tail -n 500 "${DETAIL_LOG}" 2>/dev/null)
recent_lines=$(tail -n 500 "${DETAIL_LOG}" 2>/dev/null || true)
# Switch to "Building" phase when Maven starts compiling
if ! $in_building && echo "$recent_lines" | grep -q -E 'Compiling|Building.*Open Bank Project' 2>/dev/null; then
phase="Building"
in_building=true
# Record building phase and update terminal (inline to avoid subshell issues)
current_time=$(python3 -c "import time; print(int(time.time() * 1000))" 2>/dev/null || date +%s000)
if [ -f "$timing_file" ]; then
phase_start=$(grep "PHASE_START_TIME=" "$timing_file" 2>/dev/null | tail -1 | cut -d= -f2 || echo "0")
[ -n "$phase_start" ] && [ "$phase_start" -gt 0 ] 2>/dev/null && echo "STARTING_TIME=$((current_time - phase_start))" >> "$timing_file"
fi
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
echo -ne "\033]11;#ff6b35\007\033]10;#ffffff\007" # Orange background
fi
# Switch to "Testing" phase when tests start
if ! $in_testing && echo "$recent_lines" | grep -q "Run starting" 2>/dev/null; then
phase="Testing"
in_testing=true
# Record testing phase
current_time=$(python3 -c "import time; print(int(time.time() * 1000))" 2>/dev/null || date +%s000)
if [ -f "$timing_file" ]; then
phase_start=$(grep "PHASE_START_TIME=" "$timing_file" 2>/dev/null | tail -1 | cut -d= -f2 || echo "0")
[ -n "$phase_start" ] && [ "$phase_start" -gt 0 ] 2>/dev/null && echo "BUILDING_TIME=$((current_time - phase_start))" >> "$timing_file"
fi
echo "PHASE_START_TIME=$current_time" >> "$timing_file"
echo -ne "\033]11;#001f3f\007\033]10;#ffffff\007" # Blue background
fi
# Extract current running test suite and scenario from recent lines
suite=""
scenario=""
if $in_testing; then
# Find the most recent test suite name (pattern like "SomeTest:")
# Pipe directly to avoid temp file I/O
suite=$(echo "$recent_lines" | grep -E "Test:" | tail -1 | sed 's/\x1b\[[0-9;]*m//g' | sed 's/:$//' | tr -d '\n\r')
# Find the most recent scenario name (pattern like " Scenario: ..." or "- Scenario: ...")
scenario=$(echo "$recent_lines" | grep -i "scenario:" | tail -1 | sed 's/\x1b\[[0-9;]*m//g' | sed 's/^[[:space:]]*-*[[:space:]]*//' | sed -E 's/^[Ss]cenario:[[:space:]]*//' | tr -d '\n\r')
# Truncate scenario if too long (max 50 chars)
if [ -n "$scenario" ] && [ ${#scenario} -gt 50 ]; then
scenario="${scenario:0:47}..."
fi
suite=$(echo "$recent_lines" | grep -E "Test:" 2>/dev/null | tail -1 | sed 's/\x1b\[[0-9;]*m//g' | sed 's/:$//' | tr -d '\n\r' || true)
scenario=$(echo "$recent_lines" | grep -i "scenario:" 2>/dev/null | tail -1 | sed 's/\x1b\[[0-9;]*m//g' | sed 's/^[[:space:]]*-*[[:space:]]*//' | sed -E 's/^[Ss]cenario:[[:space:]]*//' | tr -d '\n\r' || true)
[ -n "$scenario" ] && [ ${#scenario} -gt 50 ] && scenario="${scenario:0:47}..."
fi
# Calculate elapsed time
@ -537,23 +863,102 @@ touch "${MONITOR_FLAG}"
seconds=$((duration % 60))
elapsed=$(printf "%dm %ds" $minutes $seconds)
# Update title: "Testing: DynamicEntityTest - Scenario name [5m 23s]"
update_terminal_title "$phase" "$elapsed" "" "$suite" "$scenario"
# Update title
title="OBP-API ${phase}"
[ -n "$suite" ] && title="${title}: ${suite}"
[ -n "$scenario" ] && title="${title} - ${scenario}"
title="${title}... [${elapsed}]"
echo -ne "\033]0;${title}\007"
sleep 5
done
) &
MONITOR_PID=$!
# Run Maven (all output goes to terminal AND log file)
if mvn clean test 2>&1 | tee "${DETAIL_LOG}"; then
TEST_RESULT="SUCCESS"
RESULT_COLOR=""
# Run Maven with optional timeout
if [ "$TIMEOUT_MINUTES" -gt 0 ] 2>/dev/null; then
# Run Maven in background and monitor for timeout
mvn clean test 2>&1 | tee "${DETAIL_LOG}" &
MAVEN_PID=$!
elapsed=0
while kill -0 $MAVEN_PID 2>/dev/null; do
sleep 10
elapsed=$((elapsed + 10))
if [ $elapsed -ge $TIMEOUT_SECONDS ]; then
log_message ""
log_message "[TIMEOUT] Test execution exceeded ${TIMEOUT_MINUTES} minutes - terminating"
kill -9 $MAVEN_PID 2>/dev/null || true
# Also kill any child Java processes
pkill -9 -P $MAVEN_PID 2>/dev/null || true
TEST_RESULT="TIMEOUT"
break
fi
done
if [ "$TEST_RESULT" != "TIMEOUT" ]; then
wait $MAVEN_PID
if [ $? -eq 0 ]; then
TEST_RESULT="SUCCESS"
else
TEST_RESULT="FAILURE"
fi
fi
else
TEST_RESULT="FAILURE"
RESULT_COLOR=""
# Run Maven normally (all output goes to terminal AND log file)
if mvn clean test 2>&1 | tee "${DETAIL_LOG}"; then
TEST_RESULT="SUCCESS"
else
TEST_RESULT="FAILURE"
fi
fi
################################################################################
# GENERATE HTML REPORT
################################################################################
print_header "Generating HTML Report"
log_message "Running: mvn surefire-report:report-only -DskipTests"
# Generate HTML report from surefire XML files (without re-running tests)
if mvn surefire-report:report-only -DskipTests 2>&1 | tee -a "${DETAIL_LOG}"; then
log_message "[OK] HTML report generated"
# Copy HTML reports to test-results directory for easy access
HTML_REPORT_DIR="${LOG_DIR}/html-reports"
mkdir -p "${HTML_REPORT_DIR}"
# Copy reports from both modules
if [ -f "obp-api/target/surefire-reports/surefire.html" ]; then
cp "obp-api/target/surefire-reports/surefire.html" "${HTML_REPORT_DIR}/obp-api-report.html"
# Also copy CSS, JS, images for proper rendering
cp -r "obp-api/target/surefire-reports/css" "${HTML_REPORT_DIR}/" 2>/dev/null || true
cp -r "obp-api/target/surefire-reports/js" "${HTML_REPORT_DIR}/" 2>/dev/null || true
cp -r "obp-api/target/surefire-reports/images" "${HTML_REPORT_DIR}/" 2>/dev/null || true
cp -r "obp-api/target/surefire-reports/fonts" "${HTML_REPORT_DIR}/" 2>/dev/null || true
cp -r "obp-api/target/surefire-reports/img" "${HTML_REPORT_DIR}/" 2>/dev/null || true
log_message " - obp-api report: ${HTML_REPORT_DIR}/obp-api-report.html"
fi
if [ -f "obp-commons/target/surefire-reports/surefire.html" ]; then
cp "obp-commons/target/surefire-reports/surefire.html" "${HTML_REPORT_DIR}/obp-commons-report.html"
log_message " - obp-commons report: ${HTML_REPORT_DIR}/obp-commons-report.html"
fi
# Also check for site reports location (alternative naming)
if [ -f "obp-api/target/site/surefire-report.html" ]; then
cp "obp-api/target/site/surefire-report.html" "${HTML_REPORT_DIR}/obp-api-report.html"
log_message " - obp-api report: ${HTML_REPORT_DIR}/obp-api-report.html"
fi
if [ -f "obp-commons/target/site/surefire-report.html" ]; then
cp "obp-commons/target/site/surefire-report.html" "${HTML_REPORT_DIR}/obp-commons-report.html"
log_message " - obp-commons report: ${HTML_REPORT_DIR}/obp-commons-report.html"
fi
else
log_message "[WARNING] Failed to generate HTML report"
fi
log_message ""
# Stop background monitor by removing flag file
rm -f "${MONITOR_FLAG}"
sleep 1
@ -568,11 +973,15 @@ DURATION_SEC=$((DURATION % 60))
# Update title with final results (no suite/scenario name for Complete phase)
FINAL_ELAPSED=$(printf "%dm %ds" $DURATION_MIN $DURATION_SEC)
# Build final counts with module context
FINAL_COMMONS=$(sed -n '/Building Open Bank Project Commons/,/Building Open Bank Project API/{/Tests: succeeded/p;}' "${DETAIL_LOG}" 2>/dev/null | grep -oP "succeeded \K\d+" | head -1)
FINAL_API=$(sed -n '/Building Open Bank Project API/,/OBP Http4s Runner/{/Tests: succeeded/p;}' "${DETAIL_LOG}" 2>/dev/null | grep -oP "succeeded \K\d+" | tail -1)
FINAL_COMMONS=$(sed -n '/Building Open Bank Project Commons/,/Building Open Bank Project API/{/Tests: succeeded/p;}' "${DETAIL_LOG}" 2>/dev/null | sed 's/.*succeeded //' | sed 's/,.*//' | head -1)
FINAL_API=$(sed -n '/Building Open Bank Project API/,/OBP Http4s Runner/{/Tests: succeeded/p;}' "${DETAIL_LOG}" 2>/dev/null | sed 's/.*succeeded //' | sed 's/,.*//' | tail -1)
FINAL_COUNTS=""
[ -n "$FINAL_COMMONS" ] && FINAL_COUNTS="commons:+${FINAL_COMMONS}"
[ -n "$FINAL_API" ] && FINAL_COUNTS="${FINAL_COUNTS:+${FINAL_COUNTS} }api:+${FINAL_API}"
# Record complete phase start and change to green for completion phase
record_phase_time "complete"
set_terminal_style "Complete"
update_terminal_title "Complete" "$FINAL_ELAPSED" "$FINAL_COUNTS" "" ""
################################################################################
@ -585,6 +994,9 @@ else
EXIT_CODE=1
fi
# Record end time for complete phase
record_phase_time "end"
log_message ""
log_message "Logs saved to:"
log_message " ${DETAIL_LOG}"
@ -592,6 +1004,13 @@ log_message " ${SUMMARY_LOG}"
if [ -f "${FAILED_TESTS_FILE}" ]; then
log_message " ${FAILED_TESTS_FILE}"
fi
if [ -d "${LOG_DIR}/html-reports" ]; then
log_message ""
log_message "HTML Reports:"
for report in "${LOG_DIR}/html-reports"/*.html; do
[ -f "$report" ] && log_message " $report"
done
fi
echo ""
exit ${EXIT_CODE}