diff --git a/cohort_banking_initiator_js/README.md b/cohort_banking_initiator_js/README.md index d26f3303..5ce97f59 100644 --- a/cohort_banking_initiator_js/README.md +++ b/cohort_banking_initiator_js/README.md @@ -145,6 +145,6 @@ npm start ``` cd $TALOS/cohort_banking_initiator_js -# Genrate 1000 transation requests at the rate of 500 TPS -npm start count=1000 rate=500 +# Genrate 1000 transation requests at the rate of 500 TPS, pick accounts from dataset containing 10k bank accounts +npm start count=1000 rate=500 accounts=10000 ``` \ No newline at end of file diff --git a/cohort_banking_initiator_js/src/banking-app.ts b/cohort_banking_initiator_js/src/banking-app.ts index 17452700..b95d6632 100644 --- a/cohort_banking_initiator_js/src/banking-app.ts +++ b/cohort_banking_initiator_js/src/banking-app.ts @@ -99,6 +99,7 @@ export class BankingApp { async () => { const s = Date.now() const newRequest = await this.createNewRequest(tx) as any + logger.info("%s", JSON.stringify(newRequest, null, 2)) const n = Date.now() stateEnd = n - span_s stateDuration = n - s diff --git a/cohort_banking_initiator_js/src/load-generator.ts b/cohort_banking_initiator_js/src/load-generator.ts index 275c4a02..142ba738 100644 --- a/cohort_banking_initiator_js/src/load-generator.ts +++ b/cohort_banking_initiator_js/src/load-generator.ts @@ -73,8 +73,8 @@ export function createGeneratorService(settings: any): Worker { } if (!isMainThread) { - const { count, channelName, rate } = workerData.settings - const generator = new LoadGenerator(100_000, 100) + const { count, channelName, rate, accounts } = workerData.settings + const generator = new LoadGenerator(accounts, 100) logger.info("Load generator will generate: %d transactions at the reate of %d TPS", count, rate.toFixed(2)) new Promise(async () => { diff --git a/cohort_banking_initiator_js/src/main.ts b/cohort_banking_initiator_js/src/main.ts index 644d409a..00f82bf0 100644 --- a/cohort_banking_initiator_js/src/main.ts +++ b/cohort_banking_initiator_js/src/main.ts @@ -22,6 +22,7 @@ const printMetrics = (spans: Array) => { class LaunchParams { transactionsCount: number = 10_000 targetRatePerSecond: number = 1_000 + totalAccounts: number = 100_000 static parse(args: string[]): LaunchParams { const params = new LaunchParams() @@ -37,6 +38,8 @@ class LaunchParams { params.transactionsCount = parseInt(arg.replaceAll("count=", "")) } else if (arg.startsWith("rate")) { params.targetRatePerSecond = parseInt(arg.replaceAll("rate=", "")) + } else if (arg.startsWith("accounts")) { + params.totalAccounts = parseInt(arg.replaceAll("accounts=", "")) } } @@ -86,5 +89,10 @@ new Promise(async (resolve) => { } throw e } - const _worker = createGeneratorService({ channelName: CHANNEL_NAME, count: params.transactionsCount, rate: params.targetRatePerSecond }) + const _worker = createGeneratorService({ + channelName: CHANNEL_NAME, + count: params.transactionsCount, + rate: params.targetRatePerSecond, + accounts: params.totalAccounts, + }) }) diff --git a/cohort_sdk_client/README.md b/cohort_sdk_client/README.md index f31478bc..dab7827c 100644 --- a/cohort_sdk_client/README.md +++ b/cohort_sdk_client/README.md @@ -87,7 +87,7 @@ Above mentioned reads, writes and statemap fields together are known as certific ```json [ { - "TRANSFER: { + "TRANSFER": { "from": "account 1", "to": "account 2", "amount": "100.00" @@ -203,7 +203,7 @@ export const enum SdkErrorKind { | `Persistence` | Error communicating with database | | `Internal` | Some unexpected SDK error | | `OutOfOrderCallbackFailed` | Error when invoking out of order installer callback | -| `OutOfOrderSnapshotTimeout` | You have indicated that out of order install was not successful because of `JsOutOfOrderInstallOutcome.SafepointCondition` and we echausted all retry attempts. | +| `OutOfOrderSnapshotTimeout` | You have indicated that out of order install was not successful because of `JsOutOfOrderInstallOutcome.SafepointCondition` and we exhausted all retry attempts. | ## The Initiator API Configuration @@ -288,13 +288,13 @@ In the deployment where there is no Talos present, microservices typically imple In the deployment with Talos, the replicator, specifically this callback, is responsible for updating the database so that your cohort has the up-to-date view of bank account (or any other shared object). The async messaging is abstracted away from you by SDK. You just provide a handler how you want your database to be updated. -However, there are few rules which Talos protocols expects you to implement. +However, there are few rules which Talos protocol expects you to implement. -- Once you are done updating business objects in your database, it is very important to update the global stapshot. The replicator, specifically this callback, is the only place within cohort which is responsible for writing into snapshot table. +- Once you are done updating business objects in your database, it is very important to update the global snapshot. The replicator, specifically this callback, is the only place within cohort which is responsible for writing into snapshot table. - The version number in the snapshot table should only be incremented. If it happens that callback is invoked with older version, then no change should be made to database. - The change to business objects and to snapshot table should be atomic (in a single DB transaction with isolation level matching repeatable read or stricter). - When callback is invoked, it is possible that your business objects are already updated. In this case, the job of callback is to update the snapshot table only. - - This may happen if replicator and initiator belong to the same cohort, for example, out of order installer in initiator may have executed updated our business objects before the replicator. However, installer should never write to snapshot table. + - This may happen if replicator and initiator belong to the same cohort, for example, out of order installer in initiator may have executed and updated our business objects before the replicator. However, installer should never write to snapshot table. - When replicator belong to different cohort, it is just catching up on the changes made by other cohorts, hence it may not encounter the state when business objects are updated already. Unless there was some contingency, like unexpected restart. - When updating business objects, also update their versions so that versions match with snapshot version.