mirror of
https://github.com/OpenBankProject/OBP-API.git
synced 2026-02-06 11:06:49 +00:00
refactor/removed "kafka" from documents
This commit is contained in:
parent
76fd73f769
commit
7d1db2c23b
@ -246,7 +246,7 @@ trait OBPRestHelper extends RestHelper with MdcLoggable {
|
||||
# }
|
||||
# When is enabled we show all messages in a chain. For instance:
|
||||
# {
|
||||
# "error": "OBP-30001: Bank not found. Please specify a valid value for BANK_ID. <- Full(Kafka_TimeoutExceptionjava.util.concurrent.TimeoutException: The stream has not been completed in 1550 milliseconds.)"
|
||||
# "error": "OBP-30001: Bank not found. Please specify a valid value for BANK_ID. <- Full(TimeoutExceptionjava.util.concurrent.TimeoutException: The stream has not been completed in 1550 milliseconds.)"
|
||||
# }
|
||||
*/
|
||||
implicit def jsonResponseBoxToJsonResponse(box: Box[JsonResponse]): JsonResponse = {
|
||||
|
||||
@ -618,7 +618,7 @@ object SwaggerDefinitionsJSON {
|
||||
|
||||
val messageDocJson = MessageDocJson(
|
||||
process = "getAccounts",
|
||||
message_format = "KafkaV2017",
|
||||
message_format = "rest_vMar2019",
|
||||
inbound_topic = Some("from.obp.api.1.to.adapter.mf.caseclass.OutboundGetAccounts"),
|
||||
outbound_topic = Some("to.obp.api.1.caseclass.OutboundGetAccounts"),
|
||||
description = "get Banks",
|
||||
@ -4020,7 +4020,7 @@ object SwaggerDefinitionsJSON {
|
||||
user_auth_contexts = List(userAuthContextJson)
|
||||
)
|
||||
|
||||
val obpApiLoopbackJson = ObpApiLoopbackJson("kafka_vSept2018","f0acd4be14cdcb94be3433ec95c1ad65228812a0","10 ms")
|
||||
val obpApiLoopbackJson = ObpApiLoopbackJson("rest_vMar2019","f0acd4be14cdcb94be3433ec95c1ad65228812a0","10 ms")
|
||||
|
||||
val refresUserJson = RefreshUserJson("10 ms")
|
||||
|
||||
|
||||
@ -2057,7 +2057,6 @@ object APIUtil extends MdcLoggable with CustomJsonFormats{
|
||||
Glossary.glossaryItems.toList.sortBy(_.title)
|
||||
}
|
||||
|
||||
// Used to document the KafkaMessage calls
|
||||
case class MessageDoc(
|
||||
process: String,
|
||||
messageFormat: String,
|
||||
@ -3405,7 +3404,6 @@ object APIUtil extends MdcLoggable with CustomJsonFormats{
|
||||
|
||||
/**
|
||||
* This method is used for cache in connector level.
|
||||
* eg: KafkaMappedConnector_vJune2017.bankTTL
|
||||
* The default cache time unit is second.
|
||||
*/
|
||||
def getSecondsCache(cacheType: String) : Int = {
|
||||
|
||||
@ -256,7 +256,6 @@ object DynamicUtil extends MdcLoggable{
|
||||
|import code.api.dynamic.endpoint.helper.MockResponseHolder
|
||||
|import code.bankconnectors._
|
||||
|import code.customer.internalMapping.MappedCustomerIdMappingProvider
|
||||
|import code.kafka.KafkaHelper
|
||||
|import code.model.dataAccess.internalMapping.MappedAccountIdMappingProvider
|
||||
|import code.util.AkkaHttpClient._
|
||||
|import code.util.Helper.MdcLoggable
|
||||
|
||||
@ -681,21 +681,15 @@ object ErrorMessages {
|
||||
// Exceptions (OBP-50XXX)
|
||||
val UnknownError = "OBP-50000: Unknown Error."
|
||||
val FutureTimeoutException = "OBP-50001: Future Timeout Exception."
|
||||
val KafkaMessageClassCastException = "OBP-50002: Kafka Response Message Class Cast Exception."
|
||||
val AdapterOrCoreBankingSystemException = "OBP-50003: Adapter Or Core Banking System Exception. Failed to get a valid response from the south side Adapter or Core Banking System."
|
||||
// This error may not be shown to user, just for debugging.
|
||||
val CurrentUserNotFoundException = "OBP-50004: Method (AuthUser.getCurrentUser) can not find the current user in the current context!"
|
||||
val AnUnspecifiedOrInternalErrorOccurred = "OBP-50005: An unspecified or internal error occurred."
|
||||
val KafkaInterruptedException = "OBP-50006: Kafka interrupted exception."
|
||||
val KafkaExecutionException = "OBP-50007: Kafka execution exception."
|
||||
val KafkaStreamTimeoutException = "OBP-50008: Akka Kafka stream timeout exception."
|
||||
val KafkaUnknownError = "OBP-50009: Kafka Unknown Error."
|
||||
val ScalaEmptyBoxToLiftweb = "OBP-50010: Scala return Empty box to Liftweb."
|
||||
val NoCallContext = "OBP-50012: Can not get the CallContext object here."
|
||||
val UnspecifiedCbsError = "OBP-50013: The Core Banking System returned an unspecified error or response."
|
||||
val RefreshUserError = "OBP-50014: Can not refresh User."
|
||||
val InternalServerError = "OBP-50015: The server encountered an unexpected condition which prevented it from fulfilling the request."
|
||||
val KafkaServerUnavailable = "OBP-50016: The kafka server is unavailable."
|
||||
val NotAllowedEndpoint = "OBP-50017: The endpoint is forbidden at this API instance."
|
||||
val UnderConstructionError = "OBP-50018: Under Construction Error."
|
||||
val DatabaseConnectionClosedError = "OBP-50019: Cannot connect to the OBP database."
|
||||
@ -726,7 +720,6 @@ object ErrorMessages {
|
||||
val InvalidConnectorResponseForGetStatus = "OBP-50222: Connector method getStatus did not return the data we requested."
|
||||
|
||||
// Adapter Exceptions (OBP-6XXXX)
|
||||
// Reserved for adapter (south of Kafka) messages
|
||||
// Also used for connector == mapped, and show it as the Internal errors.
|
||||
val GetStatusException = "OBP-60001: Save Transaction Exception. "
|
||||
val GetChargeValueException = "OBP-60002: Get ChargeValue Exception. "
|
||||
|
||||
@ -129,10 +129,10 @@ object Glossary extends MdcLoggable {
|
||||
// NOTE! Some glossary items are defined in ExampleValue.scala
|
||||
|
||||
|
||||
val latestKafkaConnector : String = "kafka_vSept2018"
|
||||
val latestConnector : String = "rest_vMar2019"
|
||||
|
||||
def messageDocLink(process: String) : String = {
|
||||
s"""<a href="/message-docs?connector=$latestKafkaConnector#$process">$process</a>"""
|
||||
s"""<a href="/message-docs?connector=$latestConnector#$process">$process</a>"""
|
||||
}
|
||||
|
||||
val latestAkkaConnector : String = "akka_vDec2018"
|
||||
@ -171,8 +171,6 @@ object Glossary extends MdcLoggable {
|
||||
|
|
||||
|[Access Control](/glossary#API.Access-Control)
|
||||
|
|
||||
|[OBP Kafka](/glossary#Adapter.Kafka.Intro)
|
||||
|
|
||||
|[OBP Akka](/glossary#Adapter.Akka.Intro)
|
||||
|
|
||||
|[API Explorer](https://github.com/OpenBankProject/API-Explorer/blob/develop/README.md)
|
||||
@ -289,159 +287,6 @@ object Glossary extends MdcLoggable {
|
||||
|
|
||||
""")
|
||||
|
||||
|
||||
|
||||
glossaryItems += GlossaryItem(
|
||||
title = "Adapter.Kafka.Intro",
|
||||
description =
|
||||
s"""
|
||||
|## Use Kafka as an interface between OBP and your Core Banking System (CBS).
|
||||
|
|
||||
|
|
||||
|For an introduction to Kafka see [here](https://kafka.apache.org/)
|
||||
|
|
||||
|### Installation Prerequisites
|
||||
|
|
||||
|
|
||||
|* You have OBP-API running and it is connected to a Kafka installation.
|
||||
| You can check OBP -> Kafka connectivity using the <a href="/#OBPv3_1_0-getObpConnectorLoopback">"loopback" endpoint</a>.
|
||||
|
|
||||
|* Ideally you have API Explorer running (the application serving this page) but its not necessary - you could use any other REST client.
|
||||
|* You might want to also run API Manager as it makes it easier to grant yourself roles, but its not necessary - you could use the API Explorer / any REST client instead.
|
||||
|
|
||||
|### Create a Customer User and an Admin User
|
||||
|
|
||||
|* Register a User who will use the API as a Customer.
|
||||
|* Register another User that will use the API as an Admin. The Admin user will need some Roles. See [here](/index#OBPv2_0_0-addEntitlement). You can bootstrap an Admin user by editing the Props file. See the README for that.
|
||||
|
|
||||
|### Add some authentication context to the Customer User
|
||||
|
|
||||
|* As the Admin User, use the [Create Auth Context](/index#OBPv3_1_0-createUserAuthContext) endpoint to add one or more attributes to the Customer User.
|
||||
|For instance you could add the name/value pair CUSTOMER_NUMBER/889763 and this will be sent to the Adapter / CBS inside the AuthInfo object.
|
||||
|
|
||||
|
|
||||
|Now you should be able to use the [Get Auth Contexts](/index#OBPv3_1_0-getUserAuthContexts) endpoint to see the data you added.
|
||||
|
|
||||
|### Write or Build an Adapter to respond to the following messages.
|
||||
|
|
||||
| When getting started, we suggest that you implement the messages in the following order:
|
||||
|
|
||||
|1) Core (Prerequisites) - Get Adapter, Get Banks, Get Bank
|
||||
|
|
||||
|* ${messageDocLink("obp.getAdapterInfo")}
|
||||
|
|
||||
|Now you should be able to use the [Adapter Info](/index#OBPv3_1_0-getAdapterInfo) endpoint
|
||||
|
|
||||
|* ${messageDocLink("obp.getBanks")}
|
||||
|
|
||||
|Now you should be able to use the [Get Banks](/index#OBPv3_0_0-getBanks) endpoint
|
||||
|
|
||||
|* ${messageDocLink("obp.getBank")}
|
||||
|
|
||||
|Now you should be able to use the [Get Bank](/index#OBPv3_0_0-bankById) endpoint
|
||||
|
|
||||
|
|
||||
|2) Core (Authentications) -The step1 Apis are all anonymous access. If you need to link bank customer data to the obp user,
|
||||
| Then you need link OBP user with Bank user/customer using the [Create User Auth Context]((/index#OBPv3_1_0-createUserAuthContext)). Also
|
||||
| check the description for this endpoint. Once you create the user-auth-context for one user, then these user-auth-context key value pair
|
||||
| can be propagated over connector message. Than the Adapter can use it to map OBP user and Bank user/customer.
|
||||
|
|
||||
|* ${messageDocLink("obp.getBankAccountsForUser")}
|
||||
|
|
||||
|Now you should be able to use the [Refresh User](/index#OBPv3_1_0-refreshUser) endpoint
|
||||
|
|
||||
|3) Customers for logged in User
|
||||
|
|
||||
|* ${messageDocLink("obp.getCustomersByUserIdBox")}
|
||||
|
|
||||
|Now you should be able to use the [Get Customers](/index#OBPv3_0_0-getCustomersForUser) endpoint.
|
||||
|
|
||||
|
|
||||
|4) Get Accounts
|
||||
|
|
||||
|Now you should already be able to use the [Get Accounts at Bank (IDs only).](/index#OBPv3_0_0-getPrivateAccountIdsbyBankId) endpoint.
|
||||
|
|
||||
|* ${messageDocLink("obp.getCoreBankAccounts")}
|
||||
|
|
||||
| The above messages should enable at least the following endpoints:
|
||||
|
|
||||
|* [Get Accounts at Bank (Minimal).](/index#OBPv3_0_0-privateAccountsAtOneBank)
|
||||
|* [Get Accounts at all Banks (private)](/index#OBPv3_0_0-corePrivateAccountsAllBanks)
|
||||
|
|
||||
|5) Get Account
|
||||
|
|
||||
|* ${messageDocLink("obp.checkBankAccountExists")}
|
||||
|* ${messageDocLink("obp.getBankAccount")}
|
||||
|
|
||||
| The above message should enable at least the following endpoints:
|
||||
|
|
||||
|* [Get Account by Id - Core](/index#OBPv3_0_0-getCoreAccountById)
|
||||
|* [Get Account by Id - Full](/index#OBPv3_0_0-getPrivateAccountById)
|
||||
|
|
||||
|6) Get Transactions
|
||||
|
|
||||
|* ${messageDocLink("obp.getTransactions")}
|
||||
|* ${messageDocLink("obp.getTransaction")}
|
||||
|
|
||||
|7) Manage Counterparties
|
||||
|
|
||||
|* ${messageDocLink("obp.getCounterparties")}
|
||||
|* ${messageDocLink("obp.getCounterpartyByCounterpartyId")}
|
||||
|* ${messageDocLink("obp.createCounterparty")}
|
||||
|
|
||||
|8) Get Transaction Request Types
|
||||
|
|
||||
|* This is configured using OBP Props - No messages required
|
||||
|
|
||||
|9) Get Challenge Threshold (CBS)
|
||||
|
|
||||
|* ${messageDocLink("obp.getChallengeThreshold")}
|
||||
|
|
||||
|10) Make Payment (used by Create Transaction Request)
|
||||
|
|
||||
|* ${messageDocLink("obp.makePaymentv210")}
|
||||
|* This also requires 8,9,10 for high value payments.
|
||||
|
|
||||
|11) Get Transaction Requests.
|
||||
|
|
||||
|* ${messageDocLink("obp.getTransactionRequests210")}
|
||||
|
|
||||
|12) Generate Security Challenges (CBS)
|
||||
|
|
||||
|* ${messageDocLink("obp.createChallenge")}
|
||||
|
|
||||
|13) Answer Security Challenges (Validate)
|
||||
|
|
||||
|* Optional / Internal OBP (No additional messages required)
|
||||
|
|
||||
|14) Manage Counterparty Metadata
|
||||
|
|
||||
|* Internal OBP (No additional messages required)
|
||||
|
|
||||
|15) Get Entitlements
|
||||
|
|
||||
|* Internal OBP (No additional messages required)
|
||||
|
|
||||
|16) Manage Roles
|
||||
|
|
||||
|* Internal OBP (No additional messages required)
|
||||
|
|
||||
|17) Manage Entitlements
|
||||
|
|
||||
|* Internal OBP (No additional messages required)
|
||||
|
|
||||
|18) Manage Views
|
||||
|
|
||||
|* Internal OBP (No additional messages required)
|
||||
|
|
||||
|19) Manage Transaction Metadata
|
||||
|
|
||||
|* Internal OBP (No additional messages required)
|
||||
|
|
||||
|"""
|
||||
)
|
||||
|
||||
|
||||
glossaryItems += GlossaryItem(
|
||||
title = "Adapter.Stored_Procedure.Intro",
|
||||
description =
|
||||
@ -488,14 +333,14 @@ object Glossary extends MdcLoggable {
|
||||
|
|
||||
|However, there are multiple available connector implementations - and you can also mix and create your own.|
|
||||
|
|
||||
|E.g. Kafka
|
||||
|E.g. RabbitMq
|
||||
|
|
||||
|<pre>
|
||||
|[=============] [============] [============] [============] [============]
|
||||
|[ ] [ ] [ ] [ ] [ ]
|
||||
|[ OBP API ] ===> Kafka Connector ===> [ Kafka ] ===> [ Kafka ] [ OBP Kafka ] ===> [ CBS ]
|
||||
|[ OBP API ] ===> RabbitMq Connector ===> [ RabbitMq ] ===> [ RabbitMq ] [ OBP RabbitMq] ===> [ CBS ]
|
||||
|[ ] Puts OBP Messages [ Connector ] [ Cluster ] [ Adapter ] [ ]
|
||||
|[=============] onto a Kafka [============] [============] [============] [============]
|
||||
|[=============] onto a RabbitMq [============] [============] [============] [============]
|
||||
|
|
||||
|</pre>
|
||||
|
|
||||
@ -691,7 +536,7 @@ object Glossary extends MdcLoggable {
|
||||
|It SHOULD be a UUID. It MUST be unique in combination with the BANK_ID. ACCOUNT_ID is used in many URLS so it should be considered public.
|
||||
|(We do NOT use account number in URLs since URLs are cached and logged all over the internet.)
|
||||
|In local / sandbox mode, ACCOUNT_ID is generated as a UUID and stored in the database.
|
||||
|In non sandbox modes (Kafka etc.), ACCOUNT_ID is mapped to core banking account numbers / identifiers at the South Side Adapter level.
|
||||
|In non sandbox modes (RabbitMq etc.), ACCOUNT_ID is mapped to core banking account numbers / identifiers at the South Side Adapter level.
|
||||
|ACCOUNT_ID is used to link Metadata and Views so it must be persistant and known to the North Side (OBP-API).
|
||||
|
|
||||
| Example value: ${accountIdExample.value}
|
||||
@ -3172,7 +3017,7 @@ object Glossary extends MdcLoggable {
|
||||
|
|
||||
|The OBP Connector is a core part of the OBP-API and is written in Scala / Java and potentially other JVM languages.
|
||||
|
|
||||
|The OBP Connector implements multiple functions / methods in a style that satisfies a particular transport / protocol such as HTTP REST, Akka or Kafka.
|
||||
|The OBP Connector implements multiple functions / methods in a style that satisfies a particular transport / protocol such as HTTP REST, Akka or RabbitMq.
|
||||
|
|
||||
|An OBP Adapter is a separate software component written in any programming language that responds to requests from the OBP Connector.
|
||||
|
|
||||
@ -3193,7 +3038,7 @@ object Glossary extends MdcLoggable {
|
||||
| 1) The Name of the internal OBP function / method e.g. getAccountsForUser
|
||||
| 2) The Outbound Message structure.
|
||||
| 3) The Inbound Message structure.
|
||||
| 4) The Connector name which denotes the protocol / transport used (e.g. REST, Akka, Kafka etc)
|
||||
| 4) The Connector name which denotes the protocol / transport used (e.g. REST, Akka, RabbitMq etc)
|
||||
| 5) Outbound / Inbound Topic
|
||||
| 6) A list of required Inbound fields
|
||||
| 7) A list of dependent endpoints.
|
||||
@ -3233,7 +3078,7 @@ object Glossary extends MdcLoggable {
|
||||
|This contains the named fields and their values which are specific to each Function / Message Doc.
|
||||
|
|
||||
|
|
||||
|The Outbound / Inbound Topics are used for routing in multi OBP instance / Kafka installations. (so OBP nodes only listen only to the correct Topics).
|
||||
|The Outbound / Inbound Topics are used for routing in multi OBP instance / RabbitMq installations. (so OBP nodes only listen only to the correct Topics).
|
||||
|
|
||||
|The dependent endpoints are listed to facilitate navigation in the API Explorer so integrators can test endpoints during integration.
|
||||
|
|
||||
@ -3247,7 +3092,7 @@ object Glossary extends MdcLoggable {
|
||||
s"""
|
||||
|
|
||||
| Open Bank Project can have different connectors, to connect difference data sources.
|
||||
| We support several sources at the moment, eg: databases, rest services, stored procedures and kafka.
|
||||
| We support several sources at the moment, eg: databases, rest services, stored procedures and RabbitMq.
|
||||
|
|
||||
| If OBP set connector=star, then you can use this method routing to switch the sources.
|
||||
| And we also provide the fields mapping in side the endpoints. If the fields in the source are different from connector,
|
||||
|
||||
@ -1371,7 +1371,7 @@ object NewStyle extends MdcLoggable{
|
||||
|
||||
def getTransactionRequestImpl(transactionRequestId: TransactionRequestId, callContext: Option[CallContext]): OBPReturnType[TransactionRequest] =
|
||||
{
|
||||
//Note: this method is not over kafka yet, so use Future here.
|
||||
//Note: this method is not over CBS yet, so use Future here.
|
||||
Future{ Connector.connector.vend.getTransactionRequestImpl(transactionRequestId, callContext)} map {
|
||||
unboxFullOrFail(_, callContext, s"$InvalidTransactionRequestId Current TransactionRequestId($transactionRequestId) ")
|
||||
}
|
||||
|
||||
@ -135,7 +135,7 @@ object WriteMetricUtil extends MdcLoggable {
|
||||
Empty
|
||||
}
|
||||
|
||||
// TODO This should use Elastic Search or Kafka not an RDBMS
|
||||
// TODO This should use Elastic Search not an RDBMS
|
||||
val u: User = user.orNull
|
||||
val userId = if (u != null) u.userId else "null"
|
||||
val userName = if (u != null) u.name else "null"
|
||||
|
||||
@ -438,12 +438,12 @@ trait APIMethods220 {
|
||||
"GET",
|
||||
"/message-docs/CONNECTOR",
|
||||
"Get Message Docs",
|
||||
"""These message docs provide example messages sent by OBP to the (Kafka) message queue for processing by the Core Banking / Payment system Adapter - together with an example expected response and possible error codes.
|
||||
"""These message docs provide example messages sent by OBP to the (RabbitMq) message queue for processing by the Core Banking / Payment system Adapter - together with an example expected response and possible error codes.
|
||||
| Integrators can use these messages to build Adapters that provide core banking services to OBP.
|
||||
|
|
||||
| Note: API Explorer provides a Message Docs page where these messages are displayed.
|
||||
|
|
||||
| `CONNECTOR`: kafka_vSept2018, stored_procedure_vDec2019 ...
|
||||
| `CONNECTOR`: rest_vMar2019, stored_procedure_vDec2019 ...
|
||||
""".stripMargin,
|
||||
EmptyBody,
|
||||
messageDocsJson,
|
||||
@ -457,7 +457,7 @@ trait APIMethods220 {
|
||||
implicit val ec = EndpointContext(Some(cc))
|
||||
for {
|
||||
connectorObject <- Future(tryo{Connector.getConnectorInstance(connector)}) map { i =>
|
||||
val msg = s"$InvalidConnector Current Input is $connector. It should be eg: kafka_vSept2018..."
|
||||
val msg = s"$InvalidConnector Current Input is $connector. It should be eg: rest_vMar2019..."
|
||||
unboxFullOrFail(i, cc.callContext, msg)
|
||||
}
|
||||
} yield {
|
||||
|
||||
@ -1880,11 +1880,7 @@ trait APIMethods310 {
|
||||
(_, callContext) <- anonymousAccess(cc)
|
||||
connectorVersion = APIUtil.getPropsValue("connector").openOrThrowException("connector props field `connector` not set")
|
||||
starConnectorProps = APIUtil.getPropsValue("starConnector_supported_types").openOr("notfound")
|
||||
// obpApiLoopback <- connectorVersion.contains(connectorVersion.contains("star")) match {
|
||||
// case false => throw new IllegalStateException(s"${NotImplemented}for connector ${connectorVersion}")
|
||||
// case _ => throw new IllegalStateException(s"${KafkaServerUnavailable} Timeout error, because kafka do not return message to OBP-API. ${e.getMessage}")
|
||||
// }
|
||||
//TODO, before we only support kafka, now we need to decide what kind of connector should we use.
|
||||
//TODO we need to decide what kind of connector should we use.
|
||||
obpApiLoopback = ObpApiLoopback(
|
||||
connectorVersion ="Unknown",
|
||||
gitCommit ="Unknown",
|
||||
|
||||
@ -1043,7 +1043,7 @@ trait APIMethods400 extends MdcLoggable {
|
||||
|4) `answer` : must be `123` in case that Strong Customer Authentication method for OTP challenge is dummy.
|
||||
| For instance: SANDBOX_TAN_OTP_INSTRUCTION_TRANSPORT=dummy
|
||||
| Possible values are dummy,email and sms
|
||||
| In kafka mode, the answer can be got by phone message or other SCA methods.
|
||||
| In CBS mode, the answer can be got by phone message or other SCA methods.
|
||||
|
|
||||
|Note that each Transaction Request Type can have its own OTP_INSTRUCTION_TRANSPORT method.
|
||||
|OTP_INSTRUCTION_TRANSPORT methods are set in Props. See sample.props.template for instructions.
|
||||
|
||||
@ -44,7 +44,7 @@ import scala.reflect.runtime.universe.{MethodSymbol, typeOf}
|
||||
So we can switch between different sources of resources e.g.
|
||||
- Mapper ORM for connecting to RDBMS (via JDBC) https://www.assembla.com/wiki/show/liftweb/Mapper
|
||||
- MongoDB
|
||||
- KafkaMQ
|
||||
- RabbitMq
|
||||
etc.
|
||||
|
||||
Note: We also have individual providers for resources like Branches and Products.
|
||||
|
||||
@ -1311,7 +1311,7 @@ object LocalMappedConnector extends Connector with MdcLoggable {
|
||||
|
||||
|
||||
/**
|
||||
* This is used for create or update the special bankAccount for COUNTERPARTY stuff (toAccountProvider != "OBP") and (Connector = Kafka)
|
||||
* This is used for create or update the special bankAccount for COUNTERPARTY stuff (toAccountProvider != "OBP") and (Connector = RabbitMq)
|
||||
* details in createTransactionRequest - V210 ,case COUNTERPARTY.toString
|
||||
*
|
||||
*/
|
||||
|
||||
@ -54,12 +54,12 @@ trait RabbitMQConnector_vOct2024 extends Connector with MdcLoggable {
|
||||
implicit override val nameOfConnector = RabbitMQConnector_vOct2024.toString
|
||||
|
||||
// "Versioning" of the messages sent by this or similar connector works like this:
|
||||
// Use Case Classes (e.g. KafkaInbound... KafkaOutbound... as below to describe the message structures.
|
||||
// Use Case Classes (e.g. Inbound... Outbound... as below to describe the message structures.
|
||||
// Each connector has a separate file like this one.
|
||||
// Once the message format is STABLE, freeze the key/value pair names there. For now, new keys may be added but none modified.
|
||||
// If we want to add a new message format, create a new file e.g. March2017_messages.scala
|
||||
// Then add a suffix to the connector value i.e. instead of kafka we might have kafka_march_2017.
|
||||
// Then in this file, populate the different case classes depending on the connector name and send to Kafka
|
||||
// Then add a suffix to the connector value i.e. instead of RabbitMq we might have rest_vMar2019.
|
||||
// Then in this file, populate the different case classes depending on the connector name and send to CBS
|
||||
val messageFormat: String = "Oct2024"
|
||||
|
||||
override val messageDocs = ArrayBuffer[MessageDoc]()
|
||||
|
||||
@ -78,12 +78,12 @@ trait RestConnector_vMar2019 extends Connector with MdcLoggable {
|
||||
implicit override val nameOfConnector = RestConnector_vMar2019.toString
|
||||
|
||||
// "Versioning" of the messages sent by this or similar connector works like this:
|
||||
// Use Case Classes (e.g. KafkaInbound... KafkaOutbound... as below to describe the message structures.
|
||||
// Use Case Classes (e.g. Inbound... Outbound... as below to describe the message structures.
|
||||
// Each connector has a separate file like this one.
|
||||
// Once the message format is STABLE, freeze the key/value pair names there. For now, new keys may be added but none modified.
|
||||
// If we want to add a new message format, create a new file e.g. March2017_messages.scala
|
||||
// Then add a suffix to the connector value i.e. instead of kafka we might have kafka_march_2017.
|
||||
// Then in this file, populate the different case classes depending on the connector name and send to Kafka
|
||||
// Then add a suffix to the connector value i.e. instead of Rest we might have rest_vMar2019.
|
||||
// Then in this file, populate the different case classes depending on the connector name and send to rest_vMar2019
|
||||
val messageFormat: String = "March2019"
|
||||
|
||||
override val messageDocs = ArrayBuffer[MessageDoc]()
|
||||
|
||||
@ -59,12 +59,12 @@ trait StoredProcedureConnector_vDec2019 extends Connector with MdcLoggable {
|
||||
implicit override val nameOfConnector = StoredProcedureConnector_vDec2019.toString
|
||||
|
||||
// "Versioning" of the messages sent by this or similar connector works like this:
|
||||
// Use Case Classes (e.g. KafkaInbound... KafkaOutbound... as below to describe the message structures.
|
||||
// Use Case Classes (e.g. Inbound... Outbound... as below to describe the message structures.
|
||||
// Each connector has a separate file like this one.
|
||||
// Once the message format is STABLE, freeze the key/value pair names there. For now, new keys may be added but none modified.
|
||||
// If we want to add a new message format, create a new file e.g. March2017_messages.scala
|
||||
// Then add a suffix to the connector value i.e. instead of kafka we might have kafka_march_2017.
|
||||
// Then in this file, populate the different case classes depending on the connector name and send to Kafka
|
||||
// Then add a suffix to the connector value i.e. instead of Rest we might have rest_vMar2019.
|
||||
// Then in this file, populate the different case classes depending on the connector name and send to rest_vMar2019
|
||||
val messageFormat: String = "Dec2019"
|
||||
|
||||
override val messageDocs = ArrayBuffer[MessageDoc]()
|
||||
|
||||
@ -1122,7 +1122,7 @@ def restoreSomeSessions(): Unit = {
|
||||
S.error(S.?(ErrorMessages.UsernameHasBeenLocked))
|
||||
loginRedirect(ObpS.param("Referer").or(S.param("Referer")))
|
||||
|
||||
// Check if user came from kafka/obpjvm/stored_procedure and
|
||||
// Check if user came from CBS and
|
||||
// if User is NOT locked. Then check username and password
|
||||
// from connector in case they changed on the south-side
|
||||
case Full(user) if externalUserIsValidatedAndNotLocked(usernameFromGui, user) && testExternalPassword(usernameFromGui, passwordFromGui) =>
|
||||
@ -1131,7 +1131,7 @@ def restoreSomeSessions(): Unit = {
|
||||
val preLoginState = capturePreLoginState()
|
||||
logger.info("login redirect: " + loginRedirect.get)
|
||||
val redirect = redirectUri(user.user.foreign)
|
||||
//This method is used for connector = kafka* || obpjvm*
|
||||
//This method is used for connector = cbs* || obpjvm*
|
||||
//It will update the views and createAccountHolder ....
|
||||
registeredUserHelper(user.getProvider(),user.username.get)
|
||||
// User init actions
|
||||
@ -1150,8 +1150,7 @@ def restoreSomeSessions(): Unit = {
|
||||
|
||||
|
||||
// If user cannot be found locally, try to authenticate user via connector
|
||||
case Empty if (APIUtil.getPropsAsBoolValue("connector.user.authentication", false) ||
|
||||
APIUtil.getPropsAsBoolValue("kafka.user.authentication", false) ) =>
|
||||
case Empty if (APIUtil.getPropsAsBoolValue("connector.user.authentication", false)) =>
|
||||
|
||||
val preLoginState = capturePreLoginState()
|
||||
logger.info("login redirect: " + loginRedirect.get)
|
||||
@ -1235,7 +1234,7 @@ def restoreSomeSessions(): Unit = {
|
||||
* This method will update the views and createAccountHolder ....
|
||||
*/
|
||||
def registeredUserHelper(provider: String, username: String) = {
|
||||
if (connector.startsWith("kafka")) {
|
||||
if (connector.startsWith("rest_vMar2019")) {
|
||||
for {
|
||||
u <- Users.users.vend.getUserByProviderAndUsername(provider, username)
|
||||
} yield {
|
||||
|
||||
@ -30,7 +30,7 @@ trait ChallengeProvider {
|
||||
def getChallengesByBasketId(basketId: String): Box[List[ChallengeTrait]]
|
||||
|
||||
/**
|
||||
* There is another method: Connector.validateChallengeAnswer, it validate the challenge over Kafka.
|
||||
* There is another method: Connector.validateChallengeAnswer, it validates the challenge over CBS.
|
||||
* This method, will validate the answer in OBP side.
|
||||
*/
|
||||
def validateChallenge(challengeId: String, challengeAnswer: String, userId: Option[String]) : Box[ChallengeTrait]
|
||||
|
||||
@ -235,7 +235,6 @@ object Helper extends Loggable {
|
||||
/**
|
||||
* Used for version extraction from props string
|
||||
*/
|
||||
val matchAnyKafka = "kafka.*|star".r
|
||||
val matchAnyStoredProcedure = "stored_procedure.*|star".r
|
||||
|
||||
/**
|
||||
|
||||
@ -93,12 +93,12 @@ trait Views {
|
||||
final def getPrivateBankAccountsFuture(user : User, bankId : BankId) : Future[List[BankIdAccountId]] = Future {getPrivateBankAccounts(user, bankId)}
|
||||
|
||||
/**
|
||||
* @param bankIdAccountId the IncomingAccount from Kafka
|
||||
* @param bankIdAccountId the IncomingAccount from CBS
|
||||
* @param viewId This field should be selected one from Owner/Public/Accountant/Auditor, only support
|
||||
* these four values.
|
||||
* @return This will insert a View (e.g. the owner view) for an Account (BankAccount), and return the view
|
||||
* Note:
|
||||
* updateUserAccountViews would call createAccountView once per View specified in the IncomingAccount from Kafka.
|
||||
* updateUserAccountViews would call createAccountView once per View specified in the IncomingAccount from CBS.
|
||||
* We should cache this function because the available views on an account will change rarely.
|
||||
*
|
||||
*/
|
||||
|
||||
@ -423,19 +423,7 @@ class API2_2_0Test extends V220ServerSetup with DefaultUsers {
|
||||
val response: APIResponse = makeGetRequest(request)
|
||||
|
||||
response.code should be (200)
|
||||
}
|
||||
scenario("Get Message Docs - kafka_vSept2018") {
|
||||
val request = (v2_2Request / "message-docs" / "kafka_vSept2018" )
|
||||
val response: APIResponse = makeGetRequest(request)
|
||||
|
||||
response.code should be (200)
|
||||
}
|
||||
scenario("Get Message Docs - rest_vMar2019") {
|
||||
val request = (v2_2Request / "message-docs" / "rest_vMar2019" )
|
||||
val response: APIResponse = makeGetRequest(request)
|
||||
|
||||
response.code should be (200)
|
||||
}
|
||||
}
|
||||
scenario("Get Message Docs - stored_procedure_vDec2019") {
|
||||
val request = (v2_2Request / "message-docs" / "stored_procedure_vDec2019" )
|
||||
val response: APIResponse = makeGetRequest(request)
|
||||
|
||||
@ -1,79 +0,0 @@
|
||||
package code.container
|
||||
|
||||
import code.api.v5_0_0.V500ServerSetup
|
||||
import code.setup.DefaultUsers
|
||||
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
|
||||
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
|
||||
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
|
||||
import org.scalatest.Ignore
|
||||
import org.testcontainers.kafka.KafkaContainer
|
||||
|
||||
import java.util.{Collections, Properties}
|
||||
import scala.jdk.CollectionConverters._
|
||||
|
||||
|
||||
@Ignore
|
||||
class EmbeddedKafka extends V500ServerSetup with DefaultUsers {
|
||||
|
||||
val kafkaContainer: KafkaContainer = new KafkaContainer("apache/kafka-native:3.8.0")
|
||||
// It registers a shutdown hook, which is a block of code (or function) that runs when the application terminates,
|
||||
// - either normally(e.g., when the main method completes)
|
||||
// - or due to an external signal(e.g., Ctrl + C or termination by the operating system).
|
||||
sys.addShutdownHook {
|
||||
kafkaContainer.stop()
|
||||
}
|
||||
override def beforeAll(): Unit = {
|
||||
super.beforeAll()
|
||||
// Start RabbitMQ container
|
||||
kafkaContainer.start()
|
||||
}
|
||||
|
||||
override def afterAll(): Unit = {
|
||||
super.afterAll()
|
||||
kafkaContainer.stop()
|
||||
}
|
||||
|
||||
feature(s"test EmbeddedKafka") {
|
||||
scenario("Publish and Consume Message") {
|
||||
|
||||
val bootstrapServers: String = kafkaContainer.getBootstrapServers
|
||||
|
||||
// Kafka producer properties
|
||||
val producerProps = new Properties()
|
||||
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers)
|
||||
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
|
||||
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
|
||||
|
||||
// Kafka consumer properties
|
||||
val consumerProps = new Properties()
|
||||
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers)
|
||||
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group")
|
||||
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
|
||||
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
|
||||
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
|
||||
|
||||
// Create a producer
|
||||
val producer = new KafkaProducer[String, String](producerProps)
|
||||
val topic = "test-topic"
|
||||
val key = "test-key"
|
||||
val value = "Hello, Kafka!"
|
||||
|
||||
// Produce a message
|
||||
producer.send(new ProducerRecord[String, String](topic, key, value))
|
||||
producer.close()
|
||||
|
||||
// Create a consumer
|
||||
val consumer = new KafkaConsumer[String, String](consumerProps)
|
||||
consumer.subscribe(Collections.singletonList(topic))
|
||||
|
||||
// Consume the message
|
||||
val records = consumer.poll(5000L)
|
||||
consumer.close()
|
||||
|
||||
val messages = records.asScala.map(record => record.value())
|
||||
messages should contain(value)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -101,7 +101,6 @@ case class TransactionRequestStatusValue(value : String) {
|
||||
override def toString = value
|
||||
}
|
||||
|
||||
//Note: change case class -> trait, for kafka extends it
|
||||
trait TransactionRequestStatus{
|
||||
def transactionRequestId : String
|
||||
def bulkTransactionsStatus: List[TransactionStatus]
|
||||
|
||||
@ -44,7 +44,7 @@ trait JsonFieldReName
|
||||
|
||||
/**
|
||||
*
|
||||
* This is the base class for all kafka outbound case class
|
||||
* This is the base class for all CBS outbound case class
|
||||
* action and messageFormat are mandatory
|
||||
* The optionalFields can be any other new fields .
|
||||
*/
|
||||
@ -251,7 +251,7 @@ trait CustomerAddress {
|
||||
def insertDate: Date
|
||||
}
|
||||
|
||||
// This is the common InboundAccount from all Kafka/remote, not finished yet.
|
||||
// This is the common InboundAccount from all CBS/remote, not finished yet.
|
||||
trait InboundAccount{
|
||||
def bankId: String
|
||||
def branchId: String
|
||||
@ -392,8 +392,6 @@ trait RoutingT {
|
||||
def address: String
|
||||
}
|
||||
|
||||
// @see 'case request: TopicTrait' in code/bankconnectors/kafkaStreamsHelper.scala
|
||||
// This is for Kafka topics for both North and South sides.
|
||||
// In OBP-API, these topics will be created automatically.
|
||||
trait TopicTrait {
|
||||
|
||||
|
||||
@ -234,7 +234,6 @@ trait View {
|
||||
* 2rd: the view can grant the access to any other (not owner) users. eg: Simon's accountant view can grant access to Carola, then Carola can see Simon's accountant data
|
||||
* also look into some createView methods in code, you can understand more:
|
||||
* create1: code.bankconnectors.Connector.createViews
|
||||
* need also look into here KafkaMappedConnector_vMar2017.updateUserAccountViewsOld
|
||||
* after createViews method, always need call addPermission(v.uid, user). This will create this field
|
||||
* Create2: code.model.dataAccess.BankAccountCreation.createOwnerView
|
||||
* after create view, always need call `addPermission(ownerViewUID, user)`, this will create this field
|
||||
|
||||
Loading…
Reference in New Issue
Block a user