diff --git a/.cspell.json b/.cspell.json
index 57a07132243..4bf71935059 100644
--- a/.cspell.json
+++ b/.cspell.json
@@ -44,6 +44,8 @@
"HyperLedger",
"ipaddress",
"ipfs",
+ "Iroha",
+ "isready",
"jboss",
"JORDI",
"Keychain",
@@ -62,6 +64,7 @@
"myapp",
"mychannel",
"myroot",
+ "mysecretpassword",
"myvolume",
"Nerc",
"NETWORKSCOPEALLFORTX",
@@ -74,6 +77,7 @@
"openethereum",
"organisation",
"parameterizable",
+ "Postgres",
"protos",
"RUSTC",
"Secp",
diff --git a/docs/source/support/iroha.md b/docs/source/support/iroha.md
new file mode 100644
index 00000000000..90767397d15
--- /dev/null
+++ b/docs/source/support/iroha.md
@@ -0,0 +1,17 @@
+Iroha Support
+-----------------
+
+```{note}
+The deployContract feature not yet implemented since Iroha lacks full smart contract support during the initial development stage of the Iroha connector plugin.
+```
+
+
+ Hyperledger Cactus v0.9.0
+
+ | Iroha version | runTransaction |
+ | --- | :---: |
+ | Iroha 1.2.0 and Postgres 9.5 | ✅ [test](https://github.com/hyperledger/cactus/blob/v0.9.0/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/run-transaction-endpoint-v1.test.ts) |
+
+
+
+
\ No newline at end of file
diff --git a/package.json b/package.json
index f951f4fc535..14ef3cafb34 100644
--- a/package.json
+++ b/package.json
@@ -45,6 +45,7 @@
"build:dev:plugin-ledger-connector-quorum": "lerna exec --stream --scope '*/*connector-quorum' -- 'del-cli dist/** && tsc --project ./tsconfig.json && webpack --env=dev --target=node --config ../../webpack.config.js'",
"test:plugin-ledger-connector-quorum": "tap --ts --jobs=1 --timeout=60 \"packages/cactus-*-quorum/src/test/typescript/{unit,integration}/\"",
"build:dev:plugin-ledger-connector-fabric": "lerna exec --stream --scope '*/*connector-fabric' -- 'del-cli dist/** && tsc --project ./tsconfig.json && webpack --env=dev --target=node --config ../../webpack.config.js'",
+ "test:plugin-ledger-connector-iroha": "tap --ts --jobs=1 --timeout=600 \"packages/cactus-*-iroha/src/test/typescript/{unit,integration}/\"",
"build:dev:plugin-htlc-eth-besu": "lerna exec --stream --scope 'extensions/htlc/besu-eth' -- 'del-cli dist/** && tsc --project ./tsconfig.json && webpack --env=dev --target=node --config ../../webpack.config.js'",
"test:plugin-htlc-besu": "tap --jobs=1 --timeout=600 \"packages/*htlc-eth-besu/src/test/typescript/{integration}/\"",
"build:dev:plugin-consortium-manual": "lerna exec --stream --scope '*/*manual-consortium' -- 'del-cli dist/** && tsc --project ./tsconfig.json && webpack --env=dev --target=node --config ../../webpack.config.js'",
diff --git a/packages/cactus-common/src/main/typescript/http/http-status-code-errors.ts b/packages/cactus-common/src/main/typescript/http/http-status-code-errors.ts
new file mode 100644
index 00000000000..700a538700e
--- /dev/null
+++ b/packages/cactus-common/src/main/typescript/http/http-status-code-errors.ts
@@ -0,0 +1 @@
+export class Http405NotAllowedError extends Error {}
diff --git a/packages/cactus-common/src/main/typescript/public-api.ts b/packages/cactus-common/src/main/typescript/public-api.ts
index 168497b6af0..209b91f6dcc 100755
--- a/packages/cactus-common/src/main/typescript/public-api.ts
+++ b/packages/cactus-common/src/main/typescript/public-api.ts
@@ -19,3 +19,4 @@ export { ISignerKeyPair } from "./signer-key-pair";
export { Secp256k1Keys } from "./secp256k1-keys";
export { KeyFormat, KeyConverter } from "./key-converter";
export { IAsyncProvider } from "./i-async-provider";
+export { Http405NotAllowedError } from "./http/http-status-code-errors";
diff --git a/packages/cactus-plugin-ledger-connector-iroha/Dockerfile b/packages/cactus-plugin-ledger-connector-iroha/Dockerfile
new file mode 100644
index 00000000000..3fc79988f1e
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/Dockerfile
@@ -0,0 +1,5 @@
+FROM ghcr.io/hyperledger/cactus-cmd-api-server:2021-08-15--refactor-1222
+
+ARG NPM_PKG_VERSION=latest
+
+RUN npm i @hyperledger/cactus-plugin-ledger-connector-iroha@${NPM_PKG_VERSION} --production
diff --git a/packages/cactus-plugin-ledger-connector-iroha/README.md b/packages/cactus-plugin-ledger-connector-iroha/README.md
new file mode 100644
index 00000000000..c7ed49ad7d0
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/README.md
@@ -0,0 +1,235 @@
+# `@hyperledger/cactus-plugin-ledger-connector-iroha`
+
+This plugin provides `Cactus` a way to interact with Iroha networks. Using this we can perform:
+* Run various Iroha leger commands and queries.
+* Build and sign transactions using any arbitrary credential.
+## Summary
+
+ - [Getting Started](#getting-started)
+ - [Architecture](#architecture)
+ - [Usage](#usage)
+ - [Runing the tests](#running-the-tests)
+ - [Built With](#built-with)
+ - [Contributing](#contributing)
+ - [License](#license)
+ - [Acknowledgments](#acknowledgments)
+
+## Getting Started
+
+Clone the git repository on your local machine. Follow these instructions that will get you a copy of the project up and running on your local machine for development and testing purposes.
+
+### Prerequisites
+
+In the root of the project, execute the command to install the dependencies:
+```sh
+yarn run configure
+```
+
+### Compiling
+
+In the project root folder, run this command to compile the plugin and create the dist directory:
+```sh
+# For one off builds:
+yarn run build:dev:backend
+```
+
+```sh
+# For continuous watching of the source code with
+# automatic recompilation (more convenient)
+yarn run watch
+```
+
+### Architecture
+The sequence diagrams for various endpoints are mentioned below
+
+#### run-transaction-endpoint
+![run-transaction-endpoint sequence diagram](docs/architecture/images/run-transaction-endpoint.png)
+
+The above diagram shows the sequence diagram of run-transaction-endpoint. User A (One of the many Users) interacts with the API Client which in turn, calls the API server. The API server then executes `transact()` method which is explained in detailed in the subsequent diagrams.
+
+![run-transaction-endpoint transact() method](docs/architecture/images/run-transaction-endpoint-transact.png)
+
+The above diagram shows the sequence diagram of `transact()` method of the `PluginLedgerConnectorIroha` class. The caller to this function, which in reference to the above sequence diagram is API server, sends `RunTransactionRequestV1` object as an argument to the `transact()` method. Based on the exact command name of the request, corresponsing response `RunTransactionResponse` is sent back to the caller.
+
+### Usage
+
+To use this import public-api and create new **PluginFactoryLedgerConnector**. Then use it to create a connector.
+```typescript
+ const factory = new PluginFactoryLedgerConnector({
+ pluginImportType: PluginImportType.Local,
+ });
+
+ const connector: PluginLedgerConnectorIroha = await factory.create({
+ rpcToriiPortHost,
+ instanceId: uuidv4(),
+ pluginRegistry: new PluginRegistry(),
+ });
+```
+You can make calls through the connector to the plugin API:
+
+```typescript
+async transact(req: RunTransactionRequestV1):Promise;
+```
+
+Call example to run an Iroha execute account command:
+```typescript
+ const req = {
+ commandName: IrohaCommand.CreateAccount,
+ baseConfig: {
+ irohaHost: "localhost",
+ irohaPort: 50051,
+ creatorAccountId: "admin@test",
+ privKey: ["f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70"],
+ quorum: 1,
+ timeoutLimit: 5000,
+ },
+ const res = await apiClient.runTransactionV1(req);
+```
+Call example to run an Iroha get account query:
+```typescript
+ const req = {
+ commandName: IrohaQuery.GetAccount,
+ baseConfig: {
+ irohaHost: "localhost",
+ irohaPort: 50051,
+ creatorAccountId: "admin@test",
+ privKey: ["f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70"],
+ quorum: 1,
+ timeoutLimit: 5000,
+ },
+ params: ["admin@test"],
+ };
+ const res = await apiClient.runTransactionV1(req);
+```
+> Extensive documentation and examples in the [readthedocs](https://readthedocs.org/projects/hyperledger-cactus/) (WIP)
+
+
+### Building/running the container image locally
+
+In the Cactus project root say:
+
+```sh
+DOCKER_BUILDKIT=1 docker build -f ./packages/cactus-plugin-ledger-connector-iroha/Dockerfile . -t cplcb
+```
+
+Build with a specific version of the npm package:
+```sh
+DOCKER_BUILDKIT=1 docker build --build-arg NPM_PKG_VERSION=latest -f ./packages/cactus-plugin-ledger-connector-iroha/Dockerfile . -t cplcb
+```
+
+#### Running the container
+
+Launch container with plugin configuration as an **environment variable**:
+```sh
+docker run \
+ --rm \
+ --publish 3000:3000 \
+ --publish 4000:4000 \
+ --publish 5000:5000 \
+ --env PLUGINS='[{"packageName": "@hyperledger/cactus-plugin-ledger-connector-iroha", "type": "org.hyperledger.cactus.plugin_import_type.LOCAL", "options": {"rpcApiHttpHost": "http://localhost:8545", "instanceId": "some-unique-iroha-connector-instance-id"}}]' \
+ cplcb
+```
+
+Launch container with plugin configuration as a **CLI argument**:
+```sh
+docker run \
+ --rm \
+ --publish 3000:3000 \
+ --publish 4000:4000 \
+ --publish 5000:5000 \
+ cplcb \
+ ./node_modules/.bin/cactusapi \
+ --plugins='[{"packageName": "@hyperledger/cactus-plugin-ledger-connector-iroha", "type": "org.hyperledger.cactus.plugin_import_type.LOCAL", "options": {"rpcApiHttpHost": "http://localhost:8545", "instanceId": "some-unique-iroha-connector-instance-id"}}]'
+```
+
+Launch container with **configuration file** mounted from host machine:
+```sh
+
+echo '[{"packageName": "@hyperledger/cactus-plugin-ledger-connector-iroha", "type": "org.hyperledger.cactus.plugin_import_type.LOCAL", "options": {"rpcApiHttpHost": "http://localhost:8545", "instanceId": "some-unique-iroha-connector-instance-id"}}]' > cactus.json
+
+docker run \
+ --rm \
+ --publish 3000:3000 \
+ --publish 4000:4000 \
+ --publish 5000:5000 \
+ --mount type=bind,source="$(pwd)"/cactus.json,target=/cactus.json \
+ cplcb \
+ ./node_modules/.bin/cactusapi \
+ --config-file=/cactus.json
+```
+
+#### Testing API calls with the container
+
+Don't have an Iroha network on hand to test with? Test or develop against our Iroha All-In-One dockerfile!
+
+**Terminal Window 1 (Ledger)**
+```sh
+docker run -p 0.0.0.0:8545:8545/tcp -p 0.0.0.0:8546:8546/tcp -p 0.0.0.0:8888:8888/tcp -p 0.0.0.0:9001:9001/tcp -p 0.0.0.0:9545:9545/tcp hyperledger/cactus-iroha-all-in-one:latest
+```
+
+**Terminal Window 2 (Cactus API Server)**
+```sh
+docker run \
+ --network host \
+ --rm \
+ --publish 3000:3000 \
+ --publish 4000:4000 \
+ --publish 5000:5000 \
+ --env PLUGINS='[{"packageName": "@hyperledger/cactus-plugin-ledger-connector-iroha", "type": "org.hyperledger.cactus.plugin_import_type.LOCAL", "options": {"rpcApiHttpHost": "http://localhost:8545", "instanceId": "some-unique-iroha-connector-instance-id"}}]' \
+ cplcb
+```
+
+**Terminal Window 3 (curl - replace transaction request as needed)**
+```sh
+curl --location --request POST 'http://127.0.0.1:4000/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/run-transaction' \
+--header 'Content-Type: application/json' \
+--data-raw '{
+ commandName: 'createAsset',
+ baseConfig: {
+ irohaHost: 'localhost',
+ irohaPort: 50051,
+ creatorAccountId: 'admin@test',
+ privKey: [
+ 'f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70'
+ ],
+ quorum: 1,
+ timeoutLimit: 5000
+ },
+ params: [ 'coolcoin', 'test', 3]
+}'
+```
+
+The above should produce a response that looks similar to this:
+
+```json
+{
+ "success": true,
+ "data": {
+ "transactionReceipt": {
+ "txHash": "c3ffd772f26950243aa357ab4f21b9703d5172490b66ddc285355230d6df60b8",
+ "status": "COMMITTED"
+ }
+ }
+}
+```
+
+## Running the tests
+
+To check that all has been installed correctly and that the plugin has no errors run the tests:
+
+* Run this command at the project's root:
+```sh
+yarn run test:plugin-ledger-connector-iroha
+```
+
+## Contributing
+
+We welcome contributions to Hyperledger Cactus in many forms, and there’s always plenty to do!
+
+Please review [CONTIRBUTING.md](../../CONTRIBUTING.md) to get started.
+
+## License
+
+This distribution is published under the Apache License Version 2.0 found in the [LICENSE](../../LICENSE) file.
+
+## Acknowledgments
diff --git a/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/images/run-transaction-endpoint-transact.png b/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/images/run-transaction-endpoint-transact.png
new file mode 100644
index 00000000000..70dcf461860
Binary files /dev/null and b/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/images/run-transaction-endpoint-transact.png differ
diff --git a/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/images/run-transaction-endpoint.png b/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/images/run-transaction-endpoint.png
new file mode 100644
index 00000000000..d25d7b01939
Binary files /dev/null and b/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/images/run-transaction-endpoint.png differ
diff --git a/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/run-transaction-endpoint-transact.puml b/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/run-transaction-endpoint-transact.puml
new file mode 100644
index 00000000000..65575463604
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/run-transaction-endpoint-transact.puml
@@ -0,0 +1,28 @@
+@startuml
+title Hyperledger Cactus\nSequence Diagram\nRun Transaction Endpoint\ntransact() method
+
+skinparam sequenceArrowThickness 2
+skinparam roundcorner 20
+skinparam maxmessagesize 120
+skinparam sequenceParticipant underline
+
+actor "Caller" as caller
+participant "PluginLedgerConnectorIroha" as t << (C,#ADD1B2) class >>
+
+autoactivate on
+
+activate caller
+caller -> t: transact(RunTransactionRequest)
+
+alt #LightBlue commandName is an Iroha command
+ t -> t: generate Iroha commandOptions
+ return RunTransactionResponse
+ t --> caller: return RunTransactionResponse
+else #LightGreen commandName is an Iroha query
+ t -> t: generate Iroha queryOptions
+ return RunTransactionResponse
+ t --> caller: return RunTransactionResponse
+else #LightCoral default
+ t --> caller: throw RuntimeError("command or query does not exist)
+end
+@enduml
\ No newline at end of file
diff --git a/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/run-transaction-endpoint.puml b/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/run-transaction-endpoint.puml
new file mode 100644
index 00000000000..91c3724d8ea
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/docs/architecture/run-transaction-endpoint.puml
@@ -0,0 +1,29 @@
+@startuml Sequence Diagram - Transaction
+
+title Hyperledger Cactus\nSequence Diagram\nRun Transaction Endpoint
+
+skinparam sequenceArrowThickness 2
+skinparam roundcorner 20
+skinparam maxmessagesize 120
+skinparam sequenceParticipant underline
+
+box "Users" #LightBlue
+actor "User A" as a
+end box
+
+box "Hyperledger Cactus" #LightGray
+entity "API Client" as apic
+entity "API Server" as apis
+end box
+
+box "Ledger Connector" #LightGreen
+database "Iroha" as irohacon
+end box
+
+a --> apic : Tx Iroha Ledger
+apic --> apis: Request
+apis --> irohacon: transact()
+irohacon --> apis: Response
+apis --> apic: Formatted Response
+apic --> a: RunTransactionResponse
+@enduml
diff --git a/packages/cactus-plugin-ledger-connector-iroha/openapitools.json b/packages/cactus-plugin-ledger-connector-iroha/openapitools.json
new file mode 100644
index 00000000000..d2fdbae832d
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/openapitools.json
@@ -0,0 +1,7 @@
+{
+ "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json",
+ "spaces": 2,
+ "generator-cli": {
+ "version": "5.1.1"
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/package.json b/packages/cactus-plugin-ledger-connector-iroha/package.json
new file mode 100644
index 00000000000..fada22b48bc
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/package.json
@@ -0,0 +1,93 @@
+{
+ "name": "@hyperledger/cactus-plugin-ledger-connector-iroha",
+ "version": "0.8.0",
+ "description": "Allows Cactus nodes to connect to an Iroha ledger.",
+ "main": "dist/lib/main/typescript/index.js",
+ "mainMinified": "dist/cactus-plugin-ledger-connector-iroha.node.umd.min.js",
+ "browser": "dist/cactus-plugin-ledger-connector-iroha.web.umd.js",
+ "browserMinified": "dist/cactus-plugin-ledger-connector-iroha.web.umd.min.js",
+ "module": "dist/lib/main/typescript/index.js",
+ "types": "dist/types/main/typescript/index.d.ts",
+ "files": [
+ "dist/*"
+ ],
+ "scripts": {
+ "generate-sdk": "openapi-generator-cli generate -i ./src/main/json/openapi.json -g typescript-axios -o ./src/main/typescript/generated/openapi/typescript-axios/ --reserved-words-mappings protected=protected",
+ "codegen:openapi": "npm run generate-sdk",
+ "codegen": "run-p 'codegen:*'",
+ "watch": "npm-watch",
+ "webpack": "npm-run-all webpack:dev webpack:prod",
+ "webpack:dev": "npm-run-all webpack:dev:node webpack:dev:web",
+ "webpack:dev:web": "webpack --env=dev --target=web --config ../../webpack.config.js",
+ "webpack:dev:node": "webpack --env=dev --target=node --config ../../webpack.config.js",
+ "webpack:prod": "npm-run-all webpack:prod:node webpack:prod:web",
+ "webpack:prod:web": "webpack --env=prod --target=web --config ../../webpack.config.js",
+ "webpack:prod:node": "webpack --env=prod --target=node --config ../../webpack.config.js"
+ },
+ "watch": {
+ "codegen:openapi": {
+ "patterns": [
+ "./src/main/json/openapi.json"
+ ]
+ }
+ },
+ "publishConfig": {
+ "access": "public"
+ },
+ "engines": {
+ "node": ">=10",
+ "npm": ">=6"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/hyperledger/cactus.git"
+ },
+ "keywords": [
+ "Hyperledger",
+ "Cactus",
+ "Iroha",
+ "Integration",
+ "Blockchain",
+ "Distributed Ledger Technology"
+ ],
+ "author": {
+ "name": "Hyperledger Cactus Contributors",
+ "email": "cactus@lists.hyperledger.org",
+ "url": "https://www.hyperledger.org/use/cactus"
+ },
+ "contributors": [
+ {
+ "name": "Peter Somogyvari",
+ "email": "peter.somogyvari@accenture.com",
+ "url": "https://accenture.com"
+ },
+ {
+ "name": "Han Xu",
+ "email": "hanxu8@illinois.edu",
+ "url": "https://github.com/hxlaf"
+ }
+ ],
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/hyperledger/cactus/issues"
+ },
+ "homepage": "https://github.com/hyperledger/cactus#readme",
+ "dependencies": {
+ "@hyperledger/cactus-common": "0.8.0",
+ "@hyperledger/cactus-core": "0.8.0",
+ "@hyperledger/cactus-core-api": "0.8.0",
+ "@types/google-protobuf": "3.15.3",
+ "axios": "0.21.1",
+ "express": "4.17.1",
+ "grpc": "1.24.11",
+ "iroha-helpers-ts": "0.9.25-ss",
+ "openapi-types": "7.0.1",
+ "prom-client": "13.1.0",
+ "typescript-optional": "2.0.1"
+ },
+ "devDependencies": {
+ "@hyperledger/cactus-plugin-keychain-memory": "0.8.0",
+ "@hyperledger/cactus-test-tooling": "0.8.0",
+ "@types/express": "4.17.8"
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/json/openapi.json b/packages/cactus-plugin-ledger-connector-iroha/src/main/json/openapi.json
new file mode 100644
index 00000000000..2b99054d8e0
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/json/openapi.json
@@ -0,0 +1,359 @@
+{
+ "openapi": "3.0.3",
+ "info": {
+ "title": "Hyperledger Cactus Plugin - Connector Iroha",
+ "description": "Can perform basic tasks on a Iroha ledger",
+ "version": "0.0.1",
+ "license": {
+ "name": "Apache 2.0",
+ "url": "https://www.apache.org/licenses/LICENSE-2.0.html"
+ }
+ },
+ "servers": [
+ {
+ "url": "https://www.cactus.stream/{basePath}",
+ "description": "Public test instance",
+ "variables": {
+ "basePath": {
+ "default": ""
+ }
+ }
+ },
+ {
+ "url": "http://localhost:4000/{basePath}",
+ "description": "Local test instance",
+ "variables": {
+ "basePath": {
+ "default": ""
+ }
+ }
+ }
+ ],
+ "components": {
+ "schemas": {
+ "IrohaCommand": {
+ "type": "string",
+ "enum": [
+ "createAccount",
+ "setAccountDetail",
+ "setAccountQuorum",
+ "compareAndSetAccountDetail",
+ "createAsset",
+ "addAssetQuantity",
+ "subtractAssetQuantity",
+ "transferAsset",
+ "createDomain",
+ "createRole",
+ "detachRole",
+ "appendRole",
+ "addSignatory",
+ "removeSignatory",
+ "grantPermission",
+ "revokePermission",
+ "addPeer",
+ "removePeer",
+ "setSettingValue",
+ "callEngine"
+ ],
+ "x-enum-descriptions": [
+ "Make entity in the system, capable of sending transactions or queries, storing signatories, personal data and identifiers.",
+ "Set key-value information for a given account.",
+ "Set the number of signatories required to confirm the identity of a user, who creates the transaction.",
+ "Set key-value information for a given account if the old value matches the value passed.",
+ "Create a new type of asset, unique in a domain. An asset is a countable representation of a commodity.",
+ "Increase the quantity of an asset on account of transaction creator.",
+ "Decrease the number of assets on account of transaction creator.",
+ "Share assets within the account in peer network: in the way that source account transfers assets to the target account.",
+ "Make new domain in Iroha network, which is a group of accounts.",
+ "Create a new role in the system from the set of permissions.",
+ "Detach a role from the set of roles of an account.",
+ "Promote an account to some created role in the system, where a role is a set of permissions account has to perform an action (command or query).",
+ "Add an identifier to the account. Such identifier is a public key of another device or a public key of another user.",
+ "Remove a public key, associated with an identity, from an account",
+ "Give another account rights to perform actions on the account of transaction sender (give someone right to do something with my account).",
+ "Revoke or dismiss given granted permission from another account in the network.",
+ "Write into ledger the fact of peer addition into the peer network.",
+ "Write into ledger the fact of peer removal from the network.",
+ "This command is not available for use, it was added for backward compatibility with Iroha.",
+ "This command is not availalbe for use because it is related to smart contract."
+ ],
+ "x-enum-varnames": [
+ "CreateAccount",
+ "SetAccountDetail",
+ "SetAccountQuorum",
+ "CompareAndSetAccountDetail",
+ "CreateAsset",
+ "AddAssetQuantity",
+ "SubtractAssetQuantity",
+ "TransferAsset",
+ "CreateDomain",
+ "CreateRole",
+ "DetachRole",
+ "AppendRole",
+ "AddSignatory",
+ "RemoveSignatory",
+ "GrantPermission",
+ "RevokePermission",
+ "AddPeer",
+ "RemovePeer",
+ "SetSettingValue",
+ "CallEngine"
+ ]
+ },
+ "IrohaQuery": {
+ "type": "string",
+ "enum": [
+ "getAccount",
+ "getAccountDetail",
+ "getAssetInfo",
+ "getAccountAssets",
+ "getTransactions",
+ "getPendingTransactions",
+ "getAccountTransactions",
+ "getAccountAssetTransactions",
+ "getRoles",
+ "getSignatories",
+ "getRolePermissions",
+ "getBlock",
+ "getEngineReceipts",
+ "fetchCommits",
+ "getPeers"
+ ],
+ "x-enum-descriptions": [
+ "To get the state of an account",
+ "To get details of the account.",
+ "To get information on the given asset (as for now - its precision).",
+ "To get the state of all assets in an account (a balance).",
+ "To retrieve information about transactions, based on their hashes.",
+ "To retrieve a list of pending (not fully signed) multisignature transactions or batches of transactions issued by account of query creator.",
+ "To retrieve a list of transactions per account.",
+ "To retrieve all transactions associated with given account and asset.",
+ "To get existing roles in the system.",
+ "To get signatories, which act as an identity of the account.",
+ "To get available permissions per role in the system.",
+ "To get a specific block, using its height as an identifier.",
+ "To retrieve a receipt of a CallEngine command. Allows to access the event log created during computations inside the EVM.",
+ "To get new blocks as soon as they are committed, a user can invoke FetchCommits RPC call to Iroha network.",
+ "A query that returns a list of peers in Iroha network."
+ ],
+ "x-enum-varnames": [
+ "GetAccount",
+ "GetAccountDetail",
+ "GetAssetInfo",
+ "GetAccountAssets",
+ "GetTransactions",
+ "GetPendingTransactions",
+ "GetAccountTransactions",
+ "GetAccountAssetTransactions",
+ "GetRoles",
+ "GetSignatories",
+ "GetRolePermissions",
+ "GetBlock",
+ "GetEngineReceipts",
+ "FetchCommits",
+ "GetPeers"
+ ]
+ },
+ "KeyPair": {
+ "type": "object",
+ "required": ["publicKey", "privateKey"],
+ "properties": {
+ "publicKey": {
+ "description": "SHA-3 ed25519 public keys of length 64 are recommended.",
+ "example": "313a07e6384776ed95447710d15e59148473ccfc052a681317a72a69f2a49910",
+ "type": "string",
+ "nullable": false
+ },
+ "privateKey": {
+ "description": "SHA-3 ed25519 private keys of length 64 are recommended.",
+ "example": "f101537e319568c765b2cc89698325604991dca57b9716b58016b253506cab70",
+ "type": "string",
+ "nullable": false
+ }
+ }
+ },
+ "RunTransactionRequestV1": {
+ "type": "object",
+ "required": ["commandName", "params"],
+ "properties": {
+ "commandName": {
+ "type": "string",
+ "nullable": false
+ },
+ "baseConfig": {
+ "$ref": "#/components/schemas/IrohaBaseConfig",
+ "nullable": false
+ },
+ "params": {
+ "description": "The list of arguments to pass in to the transaction request.",
+ "type": "array",
+ "default": [],
+ "items": {}
+ }
+ }
+ },
+ "IrohaBaseConfig": {
+ "type": "object",
+ "additionalProperties": true,
+ "properties": {
+ "irohaHost": {
+ "type": "string",
+ "nullable": false
+ },
+ "irohaPort": {
+ "type": "number",
+ "nullable": false
+ },
+ "creatorAccountId": {
+ "type": "string",
+ "nullable": false
+ },
+ "privKey": {
+ "type": "array",
+ "items": {},
+ "default": [],
+ "nullable": false
+ },
+ "quorum": {
+ "type": "number",
+ "nullable": false
+ },
+ "timeoutLimit": {
+ "type": "number",
+ "nullable": false
+ },
+ "tls": {
+ "type": "boolean",
+ "nullable": false,
+ "description": "Can only be set to false for an insecure grpc connection."
+ }
+ }
+ },
+ "RunTransactionResponse": {
+ "type": "object",
+ "required": ["transactionReceipt"],
+ "properties": {
+ "transactionReceipt": {}
+ }
+ },
+ "InvokeContractV1Request": {
+ "type": "object",
+ "properties": {
+ "contractName": {}
+ }
+ },
+ "InvokeContractV1Response": {
+ "type": "object",
+ "required": ["success"],
+ "properties": {}
+ },
+ "PrometheusExporterMetricsResponse": {
+ "type": "string",
+ "nullable": false
+ }
+ }
+ },
+ "paths": {
+ "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/run-transaction": {
+ "post": {
+ "x-hyperledger-cactus": {
+ "http": {
+ "verbLowerCase": "post",
+ "path": "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/run-transaction"
+ }
+ },
+ "operationId": "runTransactionV1",
+ "summary": "Executes a transaction on a Iroha ledger",
+ "parameters": [],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/RunTransactionRequestV1"
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "OK",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/RunTransactionResponse"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/invoke-contract": {
+ "post": {
+ "x-hyperledger-cactus": {
+ "http": {
+ "verbLowerCase": "post",
+ "path": "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/invoke-contract"
+ }
+ },
+ "operationId": "invokeContractV1",
+ "summary": "Invokes a contract on a Iroha ledger",
+ "parameters": [],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/InvokeContractV1Request"
+ }
+ }
+ }
+ },
+ "responses": {
+ "501": {
+ "description": "Not implemented",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "message": {
+ "type": "string",
+ "nullable": false,
+ "minLength": 1,
+ "maxLength": 2048
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/get-prometheus-exporter-metrics": {
+ "get": {
+ "x-hyperledger-cactus": {
+ "http": {
+ "verbLowerCase": "get",
+ "path": "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/get-prometheus-exporter-metrics"
+ }
+ },
+ "operationId": "getPrometheusMetricsV1",
+ "summary": "Get the Prometheus Metrics",
+ "parameters": [],
+ "responses": {
+ "200": {
+ "description": "OK",
+ "content": {
+ "text/plain": {
+ "schema": {
+ "$ref": "#/components/schemas/PrometheusExporterMetricsResponse"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator-ignore b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator-ignore
new file mode 100644
index 00000000000..57cdd7b74b9
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator-ignore
@@ -0,0 +1,27 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
+
+git_push.sh
+.npmignore
+.gitignore
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator/FILES b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator/FILES
new file mode 100644
index 00000000000..53250c02696
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator/FILES
@@ -0,0 +1,5 @@
+api.ts
+base.ts
+common.ts
+configuration.ts
+index.ts
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator/VERSION b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator/VERSION
new file mode 100644
index 00000000000..3bff059174b
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/.openapi-generator/VERSION
@@ -0,0 +1 @@
+5.1.1
\ No newline at end of file
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/api.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/api.ts
new file mode 100644
index 00000000000..2fe0aa53887
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/api.ts
@@ -0,0 +1,546 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Hyperledger Cactus Plugin - Connector Iroha
+ * Can perform basic tasks on a Iroha ledger
+ *
+ * The version of the OpenAPI document: 0.0.1
+ *
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from './configuration';
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+// Some imports not used depending on template conditions
+// @ts-ignore
+import { DUMMY_BASE_URL, assertParamExists, setApiKeyToObject, setBasicAuthToObject, setBearerAuthToObject, setOAuthToObject, setSearchParams, serializeDataIfNeeded, toPathString, createRequestFunction } from './common';
+// @ts-ignore
+import { BASE_PATH, COLLECTION_FORMATS, RequestArgs, BaseAPI, RequiredError } from './base';
+
+/**
+ *
+ * @export
+ * @interface InlineResponse501
+ */
+export interface InlineResponse501 {
+ /**
+ *
+ * @type {string}
+ * @memberof InlineResponse501
+ */
+ message?: string;
+}
+/**
+ *
+ * @export
+ * @interface InvokeContractV1Request
+ */
+export interface InvokeContractV1Request {
+ /**
+ *
+ * @type {any}
+ * @memberof InvokeContractV1Request
+ */
+ contractName?: any | null;
+}
+/**
+ *
+ * @export
+ * @interface IrohaBaseConfig
+ */
+export interface IrohaBaseConfig {
+ [key: string]: object | any;
+
+ /**
+ *
+ * @type {string}
+ * @memberof IrohaBaseConfig
+ */
+ irohaHost?: string;
+ /**
+ *
+ * @type {number}
+ * @memberof IrohaBaseConfig
+ */
+ irohaPort?: number;
+ /**
+ *
+ * @type {string}
+ * @memberof IrohaBaseConfig
+ */
+ creatorAccountId?: string;
+ /**
+ *
+ * @type {Array}
+ * @memberof IrohaBaseConfig
+ */
+ privKey?: Array;
+ /**
+ *
+ * @type {number}
+ * @memberof IrohaBaseConfig
+ */
+ quorum?: number;
+ /**
+ *
+ * @type {number}
+ * @memberof IrohaBaseConfig
+ */
+ timeoutLimit?: number;
+ /**
+ * Can only be set to false for an insecure grpc connection.
+ * @type {boolean}
+ * @memberof IrohaBaseConfig
+ */
+ tls?: boolean;
+}
+/**
+ *
+ * @export
+ * @enum {string}
+ */
+export enum IrohaCommand {
+ /**
+ * Make entity in the system, capable of sending transactions or queries, storing signatories, personal data and identifiers.
+ */
+ CreateAccount = 'createAccount',
+ /**
+ * Set key-value information for a given account.
+ */
+ SetAccountDetail = 'setAccountDetail',
+ /**
+ * Set the number of signatories required to confirm the identity of a user, who creates the transaction.
+ */
+ SetAccountQuorum = 'setAccountQuorum',
+ /**
+ * Set key-value information for a given account if the old value matches the value passed.
+ */
+ CompareAndSetAccountDetail = 'compareAndSetAccountDetail',
+ /**
+ * Create a new type of asset, unique in a domain. An asset is a countable representation of a commodity.
+ */
+ CreateAsset = 'createAsset',
+ /**
+ * Increase the quantity of an asset on account of transaction creator.
+ */
+ AddAssetQuantity = 'addAssetQuantity',
+ /**
+ * Decrease the number of assets on account of transaction creator.
+ */
+ SubtractAssetQuantity = 'subtractAssetQuantity',
+ /**
+ * Share assets within the account in peer network: in the way that source account transfers assets to the target account.
+ */
+ TransferAsset = 'transferAsset',
+ /**
+ * Make new domain in Iroha network, which is a group of accounts.
+ */
+ CreateDomain = 'createDomain',
+ /**
+ * Create a new role in the system from the set of permissions.
+ */
+ CreateRole = 'createRole',
+ /**
+ * Detach a role from the set of roles of an account.
+ */
+ DetachRole = 'detachRole',
+ /**
+ * Promote an account to some created role in the system, where a role is a set of permissions account has to perform an action (command or query).
+ */
+ AppendRole = 'appendRole',
+ /**
+ * Add an identifier to the account. Such identifier is a public key of another device or a public key of another user.
+ */
+ AddSignatory = 'addSignatory',
+ /**
+ * Remove a public key, associated with an identity, from an account
+ */
+ RemoveSignatory = 'removeSignatory',
+ /**
+ * Give another account rights to perform actions on the account of transaction sender (give someone right to do something with my account).
+ */
+ GrantPermission = 'grantPermission',
+ /**
+ * Revoke or dismiss given granted permission from another account in the network.
+ */
+ RevokePermission = 'revokePermission',
+ /**
+ * Write into ledger the fact of peer addition into the peer network.
+ */
+ AddPeer = 'addPeer',
+ /**
+ * Write into ledger the fact of peer removal from the network.
+ */
+ RemovePeer = 'removePeer',
+ /**
+ * This command is not available for use, it was added for backward compatibility with Iroha.
+ */
+ SetSettingValue = 'setSettingValue',
+ /**
+ * This command is not availalbe for use because it is related to smart contract.
+ */
+ CallEngine = 'callEngine'
+}
+
+/**
+ *
+ * @export
+ * @enum {string}
+ */
+export enum IrohaQuery {
+ /**
+ * To get the state of an account
+ */
+ GetAccount = 'getAccount',
+ /**
+ * To get details of the account.
+ */
+ GetAccountDetail = 'getAccountDetail',
+ /**
+ * To get information on the given asset (as for now - its precision).
+ */
+ GetAssetInfo = 'getAssetInfo',
+ /**
+ * To get the state of all assets in an account (a balance).
+ */
+ GetAccountAssets = 'getAccountAssets',
+ /**
+ * To retrieve information about transactions, based on their hashes.
+ */
+ GetTransactions = 'getTransactions',
+ /**
+ * To retrieve a list of pending (not fully signed) multisignature transactions or batches of transactions issued by account of query creator.
+ */
+ GetPendingTransactions = 'getPendingTransactions',
+ /**
+ * To retrieve a list of transactions per account.
+ */
+ GetAccountTransactions = 'getAccountTransactions',
+ /**
+ * To retrieve all transactions associated with given account and asset.
+ */
+ GetAccountAssetTransactions = 'getAccountAssetTransactions',
+ /**
+ * To get existing roles in the system.
+ */
+ GetRoles = 'getRoles',
+ /**
+ * To get signatories, which act as an identity of the account.
+ */
+ GetSignatories = 'getSignatories',
+ /**
+ * To get available permissions per role in the system.
+ */
+ GetRolePermissions = 'getRolePermissions',
+ /**
+ * To get a specific block, using its height as an identifier.
+ */
+ GetBlock = 'getBlock',
+ /**
+ * To retrieve a receipt of a CallEngine command. Allows to access the event log created during computations inside the EVM.
+ */
+ GetEngineReceipts = 'getEngineReceipts',
+ /**
+ * To get new blocks as soon as they are committed, a user can invoke FetchCommits RPC call to Iroha network.
+ */
+ FetchCommits = 'fetchCommits',
+ /**
+ * A query that returns a list of peers in Iroha network.
+ */
+ GetPeers = 'getPeers'
+}
+
+/**
+ *
+ * @export
+ * @interface KeyPair
+ */
+export interface KeyPair {
+ /**
+ * SHA-3 ed25519 public keys of length 64 are recommended.
+ * @type {string}
+ * @memberof KeyPair
+ */
+ publicKey: string;
+ /**
+ * SHA-3 ed25519 private keys of length 64 are recommended.
+ * @type {string}
+ * @memberof KeyPair
+ */
+ privateKey: string;
+}
+/**
+ *
+ * @export
+ * @interface RunTransactionRequestV1
+ */
+export interface RunTransactionRequestV1 {
+ /**
+ *
+ * @type {string}
+ * @memberof RunTransactionRequestV1
+ */
+ commandName: string;
+ /**
+ *
+ * @type {IrohaBaseConfig}
+ * @memberof RunTransactionRequestV1
+ */
+ baseConfig?: IrohaBaseConfig;
+ /**
+ * The list of arguments to pass in to the transaction request.
+ * @type {Array}
+ * @memberof RunTransactionRequestV1
+ */
+ params: Array;
+}
+/**
+ *
+ * @export
+ * @interface RunTransactionResponse
+ */
+export interface RunTransactionResponse {
+ /**
+ *
+ * @type {any}
+ * @memberof RunTransactionResponse
+ */
+ transactionReceipt: any | null;
+}
+
+/**
+ * DefaultApi - axios parameter creator
+ * @export
+ */
+export const DefaultApiAxiosParamCreator = function (configuration?: Configuration) {
+ return {
+ /**
+ *
+ * @summary Get the Prometheus Metrics
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ getPrometheusMetricsV1: async (options: any = {}): Promise => {
+ const localVarPath = `/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/get-prometheus-exporter-metrics`;
+ // use dummy base URL string because the URL constructor only accepts absolute URLs.
+ const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+ let baseOptions;
+ if (configuration) {
+ baseOptions = configuration.baseOptions;
+ }
+
+ const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
+ const localVarHeaderParameter = {} as any;
+ const localVarQueryParameter = {} as any;
+
+
+
+ setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+ let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+ localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+
+ return {
+ url: toPathString(localVarUrlObj),
+ options: localVarRequestOptions,
+ };
+ },
+ /**
+ *
+ * @summary Invokes a contract on a Iroha ledger
+ * @param {InvokeContractV1Request} [invokeContractV1Request]
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ invokeContractV1: async (invokeContractV1Request?: InvokeContractV1Request, options: any = {}): Promise => {
+ const localVarPath = `/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/invoke-contract`;
+ // use dummy base URL string because the URL constructor only accepts absolute URLs.
+ const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+ let baseOptions;
+ if (configuration) {
+ baseOptions = configuration.baseOptions;
+ }
+
+ const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
+ const localVarHeaderParameter = {} as any;
+ const localVarQueryParameter = {} as any;
+
+
+
+ localVarHeaderParameter['Content-Type'] = 'application/json';
+
+ setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+ let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+ localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+ localVarRequestOptions.data = serializeDataIfNeeded(invokeContractV1Request, localVarRequestOptions, configuration)
+
+ return {
+ url: toPathString(localVarUrlObj),
+ options: localVarRequestOptions,
+ };
+ },
+ /**
+ *
+ * @summary Executes a transaction on a Iroha ledger
+ * @param {RunTransactionRequestV1} [runTransactionRequestV1]
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ runTransactionV1: async (runTransactionRequestV1?: RunTransactionRequestV1, options: any = {}): Promise => {
+ const localVarPath = `/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/run-transaction`;
+ // use dummy base URL string because the URL constructor only accepts absolute URLs.
+ const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
+ let baseOptions;
+ if (configuration) {
+ baseOptions = configuration.baseOptions;
+ }
+
+ const localVarRequestOptions = { method: 'POST', ...baseOptions, ...options};
+ const localVarHeaderParameter = {} as any;
+ const localVarQueryParameter = {} as any;
+
+
+
+ localVarHeaderParameter['Content-Type'] = 'application/json';
+
+ setSearchParams(localVarUrlObj, localVarQueryParameter, options.query);
+ let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
+ localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
+ localVarRequestOptions.data = serializeDataIfNeeded(runTransactionRequestV1, localVarRequestOptions, configuration)
+
+ return {
+ url: toPathString(localVarUrlObj),
+ options: localVarRequestOptions,
+ };
+ },
+ }
+};
+
+/**
+ * DefaultApi - functional programming interface
+ * @export
+ */
+export const DefaultApiFp = function(configuration?: Configuration) {
+ const localVarAxiosParamCreator = DefaultApiAxiosParamCreator(configuration)
+ return {
+ /**
+ *
+ * @summary Get the Prometheus Metrics
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ async getPrometheusMetricsV1(options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> {
+ const localVarAxiosArgs = await localVarAxiosParamCreator.getPrometheusMetricsV1(options);
+ return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+ },
+ /**
+ *
+ * @summary Invokes a contract on a Iroha ledger
+ * @param {InvokeContractV1Request} [invokeContractV1Request]
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ async invokeContractV1(invokeContractV1Request?: InvokeContractV1Request, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> {
+ const localVarAxiosArgs = await localVarAxiosParamCreator.invokeContractV1(invokeContractV1Request, options);
+ return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+ },
+ /**
+ *
+ * @summary Executes a transaction on a Iroha ledger
+ * @param {RunTransactionRequestV1} [runTransactionRequestV1]
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ async runTransactionV1(runTransactionRequestV1?: RunTransactionRequestV1, options?: any): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise> {
+ const localVarAxiosArgs = await localVarAxiosParamCreator.runTransactionV1(runTransactionRequestV1, options);
+ return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
+ },
+ }
+};
+
+/**
+ * DefaultApi - factory interface
+ * @export
+ */
+export const DefaultApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
+ const localVarFp = DefaultApiFp(configuration)
+ return {
+ /**
+ *
+ * @summary Get the Prometheus Metrics
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ getPrometheusMetricsV1(options?: any): AxiosPromise {
+ return localVarFp.getPrometheusMetricsV1(options).then((request) => request(axios, basePath));
+ },
+ /**
+ *
+ * @summary Invokes a contract on a Iroha ledger
+ * @param {InvokeContractV1Request} [invokeContractV1Request]
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ invokeContractV1(invokeContractV1Request?: InvokeContractV1Request, options?: any): AxiosPromise {
+ return localVarFp.invokeContractV1(invokeContractV1Request, options).then((request) => request(axios, basePath));
+ },
+ /**
+ *
+ * @summary Executes a transaction on a Iroha ledger
+ * @param {RunTransactionRequestV1} [runTransactionRequestV1]
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ */
+ runTransactionV1(runTransactionRequestV1?: RunTransactionRequestV1, options?: any): AxiosPromise {
+ return localVarFp.runTransactionV1(runTransactionRequestV1, options).then((request) => request(axios, basePath));
+ },
+ };
+};
+
+/**
+ * DefaultApi - object-oriented interface
+ * @export
+ * @class DefaultApi
+ * @extends {BaseAPI}
+ */
+export class DefaultApi extends BaseAPI {
+ /**
+ *
+ * @summary Get the Prometheus Metrics
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ * @memberof DefaultApi
+ */
+ public getPrometheusMetricsV1(options?: any) {
+ return DefaultApiFp(this.configuration).getPrometheusMetricsV1(options).then((request) => request(this.axios, this.basePath));
+ }
+
+ /**
+ *
+ * @summary Invokes a contract on a Iroha ledger
+ * @param {InvokeContractV1Request} [invokeContractV1Request]
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ * @memberof DefaultApi
+ */
+ public invokeContractV1(invokeContractV1Request?: InvokeContractV1Request, options?: any) {
+ return DefaultApiFp(this.configuration).invokeContractV1(invokeContractV1Request, options).then((request) => request(this.axios, this.basePath));
+ }
+
+ /**
+ *
+ * @summary Executes a transaction on a Iroha ledger
+ * @param {RunTransactionRequestV1} [runTransactionRequestV1]
+ * @param {*} [options] Override http request option.
+ * @throws {RequiredError}
+ * @memberof DefaultApi
+ */
+ public runTransactionV1(runTransactionRequestV1?: RunTransactionRequestV1, options?: any) {
+ return DefaultApiFp(this.configuration).runTransactionV1(runTransactionRequestV1, options).then((request) => request(this.axios, this.basePath));
+ }
+}
+
+
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/base.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/base.ts
new file mode 100644
index 00000000000..e859f2286a1
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/base.ts
@@ -0,0 +1,71 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Hyperledger Cactus Plugin - Connector Iroha
+ * Can perform basic tasks on a Iroha ledger
+ *
+ * The version of the OpenAPI document: 0.0.1
+ *
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+// Some imports not used depending on template conditions
+// @ts-ignore
+import globalAxios, { AxiosPromise, AxiosInstance } from 'axios';
+
+export const BASE_PATH = "https://www.cactus.stream".replace(/\/+$/, "");
+
+/**
+ *
+ * @export
+ */
+export const COLLECTION_FORMATS = {
+ csv: ",",
+ ssv: " ",
+ tsv: "\t",
+ pipes: "|",
+};
+
+/**
+ *
+ * @export
+ * @interface RequestArgs
+ */
+export interface RequestArgs {
+ url: string;
+ options: any;
+}
+
+/**
+ *
+ * @export
+ * @class BaseAPI
+ */
+export class BaseAPI {
+ protected configuration: Configuration | undefined;
+
+ constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected axios: AxiosInstance = globalAxios) {
+ if (configuration) {
+ this.configuration = configuration;
+ this.basePath = configuration.basePath || this.basePath;
+ }
+ }
+};
+
+/**
+ *
+ * @export
+ * @class RequiredError
+ * @extends {Error}
+ */
+export class RequiredError extends Error {
+ name: "RequiredError" = "RequiredError";
+ constructor(public field: string, msg?: string) {
+ super(msg);
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/common.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/common.ts
new file mode 100644
index 00000000000..7be7eb826f2
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/common.ts
@@ -0,0 +1,138 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Hyperledger Cactus Plugin - Connector Iroha
+ * Can perform basic tasks on a Iroha ledger
+ *
+ * The version of the OpenAPI document: 0.0.1
+ *
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+import { Configuration } from "./configuration";
+import { RequiredError, RequestArgs } from "./base";
+import { AxiosInstance } from 'axios';
+
+/**
+ *
+ * @export
+ */
+export const DUMMY_BASE_URL = 'https://example.com'
+
+/**
+ *
+ * @throws {RequiredError}
+ * @export
+ */
+export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
+ if (paramValue === null || paramValue === undefined) {
+ throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
+ }
+}
+
+/**
+ *
+ * @export
+ */
+export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
+ if (configuration && configuration.apiKey) {
+ const localVarApiKeyValue = typeof configuration.apiKey === 'function'
+ ? await configuration.apiKey(keyParamName)
+ : await configuration.apiKey;
+ object[keyParamName] = localVarApiKeyValue;
+ }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
+ if (configuration && (configuration.username || configuration.password)) {
+ object["auth"] = { username: configuration.username, password: configuration.password };
+ }
+}
+
+/**
+ *
+ * @export
+ */
+export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
+ if (configuration && configuration.accessToken) {
+ const accessToken = typeof configuration.accessToken === 'function'
+ ? await configuration.accessToken()
+ : await configuration.accessToken;
+ object["Authorization"] = "Bearer " + accessToken;
+ }
+}
+
+/**
+ *
+ * @export
+ */
+export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
+ if (configuration && configuration.accessToken) {
+ const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
+ ? await configuration.accessToken(name, scopes)
+ : await configuration.accessToken;
+ object["Authorization"] = "Bearer " + localVarAccessTokenValue;
+ }
+}
+
+/**
+ *
+ * @export
+ */
+export const setSearchParams = function (url: URL, ...objects: any[]) {
+ const searchParams = new URLSearchParams(url.search);
+ for (const object of objects) {
+ for (const key in object) {
+ if (Array.isArray(object[key])) {
+ searchParams.delete(key);
+ for (const item of object[key]) {
+ searchParams.append(key, item);
+ }
+ } else {
+ searchParams.set(key, object[key]);
+ }
+ }
+ }
+ url.search = searchParams.toString();
+}
+
+/**
+ *
+ * @export
+ */
+export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
+ const nonString = typeof value !== 'string';
+ const needsSerialization = nonString && configuration && configuration.isJsonMime
+ ? configuration.isJsonMime(requestOptions.headers['Content-Type'])
+ : nonString;
+ return needsSerialization
+ ? JSON.stringify(value !== undefined ? value : {})
+ : (value || "");
+}
+
+/**
+ *
+ * @export
+ */
+export const toPathString = function (url: URL) {
+ return url.pathname + url.search + url.hash
+}
+
+/**
+ *
+ * @export
+ */
+export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
+ return (axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
+ const axiosRequestArgs = {...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url};
+ return axios.request(axiosRequestArgs);
+ };
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/configuration.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/configuration.ts
new file mode 100644
index 00000000000..416649445c5
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/configuration.ts
@@ -0,0 +1,101 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Hyperledger Cactus Plugin - Connector Iroha
+ * Can perform basic tasks on a Iroha ledger
+ *
+ * The version of the OpenAPI document: 0.0.1
+ *
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export interface ConfigurationParameters {
+ apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise);
+ username?: string;
+ password?: string;
+ accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise);
+ basePath?: string;
+ baseOptions?: any;
+ formDataCtor?: new () => any;
+}
+
+export class Configuration {
+ /**
+ * parameter for apiKey security
+ * @param name security name
+ * @memberof Configuration
+ */
+ apiKey?: string | Promise | ((name: string) => string) | ((name: string) => Promise);
+ /**
+ * parameter for basic security
+ *
+ * @type {string}
+ * @memberof Configuration
+ */
+ username?: string;
+ /**
+ * parameter for basic security
+ *
+ * @type {string}
+ * @memberof Configuration
+ */
+ password?: string;
+ /**
+ * parameter for oauth2 security
+ * @param name security name
+ * @param scopes oauth2 scope
+ * @memberof Configuration
+ */
+ accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise);
+ /**
+ * override base path
+ *
+ * @type {string}
+ * @memberof Configuration
+ */
+ basePath?: string;
+ /**
+ * base options for axios calls
+ *
+ * @type {any}
+ * @memberof Configuration
+ */
+ baseOptions?: any;
+ /**
+ * The FormData constructor that will be used to create multipart form data
+ * requests. You can inject this here so that execution environments that
+ * do not support the FormData class can still run the generated client.
+ *
+ * @type {new () => FormData}
+ */
+ formDataCtor?: new () => any;
+
+ constructor(param: ConfigurationParameters = {}) {
+ this.apiKey = param.apiKey;
+ this.username = param.username;
+ this.password = param.password;
+ this.accessToken = param.accessToken;
+ this.basePath = param.basePath;
+ this.baseOptions = param.baseOptions;
+ this.formDataCtor = param.formDataCtor;
+ }
+
+ /**
+ * Check if the given MIME is a JSON MIME.
+ * JSON MIME examples:
+ * application/json
+ * application/json; charset=UTF8
+ * APPLICATION/JSON
+ * application/vnd.company+json
+ * @param mime - MIME (Multipurpose Internet Mail Extensions)
+ * @return True if the given MIME is JSON, false otherwise.
+ */
+ public isJsonMime(mime: string): boolean {
+ const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
+ return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/index.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/index.ts
new file mode 100644
index 00000000000..56c46477436
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/generated/openapi/typescript-axios/index.ts
@@ -0,0 +1,18 @@
+/* tslint:disable */
+/* eslint-disable */
+/**
+ * Hyperledger Cactus Plugin - Connector Iroha
+ * Can perform basic tasks on a Iroha ledger
+ *
+ * The version of the OpenAPI document: 0.0.1
+ *
+ *
+ * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
+ * https://openapi-generator.tech
+ * Do not edit the class manually.
+ */
+
+
+export * from "./api";
+export * from "./configuration";
+
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/index.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/index.ts
new file mode 100755
index 00000000000..87cb558397c
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/index.ts
@@ -0,0 +1 @@
+export * from "./public-api";
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/index.web.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/index.web.ts
new file mode 100755
index 00000000000..bdf54028d23
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/index.web.ts
@@ -0,0 +1 @@
+export * from "./generated/openapi/typescript-axios/index";
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/plugin-factory-ledger-connector.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/plugin-factory-ledger-connector.ts
new file mode 100644
index 00000000000..7734d3d514b
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/plugin-factory-ledger-connector.ts
@@ -0,0 +1,20 @@
+import {
+ IPluginFactoryOptions,
+ PluginFactory,
+} from "@hyperledger/cactus-core-api";
+import {
+ IPluginLedgerConnectorIrohaOptions,
+ PluginLedgerConnectorIroha,
+} from "./plugin-ledger-connector-iroha";
+
+export class PluginFactoryLedgerConnector extends PluginFactory<
+ PluginLedgerConnectorIroha,
+ IPluginLedgerConnectorIrohaOptions,
+ IPluginFactoryOptions
+> {
+ async create(
+ pluginOptions: IPluginLedgerConnectorIrohaOptions,
+ ): Promise {
+ return new PluginLedgerConnectorIroha(pluginOptions);
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/plugin-ledger-connector-iroha.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/plugin-ledger-connector-iroha.ts
new file mode 100644
index 00000000000..7e1a03dc6c5
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/plugin-ledger-connector-iroha.ts
@@ -0,0 +1,636 @@
+import { Server } from "http";
+import * as grpc from "grpc";
+import { Server as SecureServer } from "https";
+import { CommandService_v1Client as CommandService } from "iroha-helpers-ts/lib/proto/endpoint_grpc_pb";
+import { QueryService_v1Client as QueryService } from "iroha-helpers-ts/lib/proto/endpoint_grpc_pb";
+import commands from "iroha-helpers-ts/lib/commands/index";
+import queries from "iroha-helpers-ts/lib/queries";
+import type { Express } from "express";
+import { promisify } from "util";
+import { Optional } from "typescript-optional";
+import {
+ GrantablePermission,
+ GrantablePermissionMap,
+} from "iroha-helpers-ts/lib/proto/primitive_pb";
+
+import {
+ ConsensusAlgorithmFamily,
+ IPluginLedgerConnector,
+ IWebServiceEndpoint,
+ IPluginWebService,
+ ICactusPlugin,
+ ICactusPluginOptions,
+} from "@hyperledger/cactus-core-api";
+
+import {
+ PluginRegistry,
+ consensusHasTransactionFinality,
+} from "@hyperledger/cactus-core";
+
+import {
+ Checks,
+ Logger,
+ LoggerProvider,
+ LogLevelDesc,
+ Http405NotAllowedError,
+} from "@hyperledger/cactus-common";
+import { RuntimeError } from "run-time-error";
+import {
+ IrohaCommand,
+ IrohaQuery,
+ RunTransactionRequestV1,
+ RunTransactionResponse,
+} from "./generated/openapi/typescript-axios";
+
+import { RunTransactionEndpoint } from "./web-services/run-transaction-endpoint";
+import { PrometheusExporter } from "./prometheus-exporter/prometheus-exporter";
+import {
+ GetPrometheusExporterMetricsEndpointV1,
+ IGetPrometheusExporterMetricsEndpointV1Options,
+} from "./web-services/get-prometheus-exporter-metrics-endpoint-v1";
+
+export const E_KEYCHAIN_NOT_FOUND = "cactus.connector.iroha.keychain_not_found";
+
+export interface IPluginLedgerConnectorIrohaOptions
+ extends ICactusPluginOptions {
+ rpcToriiPortHost: string;
+ pluginRegistry: PluginRegistry;
+ prometheusExporter?: PrometheusExporter;
+ logLevel?: LogLevelDesc;
+}
+
+export class PluginLedgerConnectorIroha
+ implements
+ IPluginLedgerConnector<
+ never,
+ never,
+ RunTransactionRequestV1,
+ RunTransactionResponse
+ >,
+ ICactusPlugin,
+ IPluginWebService {
+ private readonly instanceId: string;
+ public prometheusExporter: PrometheusExporter;
+ private readonly log: Logger;
+ private readonly pluginRegistry: PluginRegistry;
+
+ private endpoints: IWebServiceEndpoint[] | undefined;
+ private httpServer: Server | SecureServer | null = null;
+
+ public static readonly CLASS_NAME = "PluginLedgerConnectorIroha";
+
+ public get className(): string {
+ return PluginLedgerConnectorIroha.CLASS_NAME;
+ }
+
+ constructor(public readonly options: IPluginLedgerConnectorIrohaOptions) {
+ const fnTag = `${this.className}#constructor()`;
+ Checks.truthy(options, `${fnTag} arg options`);
+ Checks.truthy(
+ options.rpcToriiPortHost,
+ `${fnTag} options.rpcToriiPortHost`,
+ );
+ Checks.truthy(options.pluginRegistry, `${fnTag} options.pluginRegistry`);
+ Checks.truthy(options.instanceId, `${fnTag} options.instanceId`);
+
+ const level = this.options.logLevel || "INFO";
+ const label = this.className;
+ this.log = LoggerProvider.getOrCreate({ level, label });
+
+ this.instanceId = options.instanceId;
+ this.pluginRegistry = options.pluginRegistry;
+ this.prometheusExporter =
+ options.prometheusExporter ||
+ new PrometheusExporter({ pollingIntervalInMin: 1 });
+ Checks.truthy(
+ this.prometheusExporter,
+ `${fnTag} options.prometheusExporter`,
+ );
+
+ this.prometheusExporter.startMetricsCollection();
+ }
+
+ deployContract(): Promise {
+ throw new RuntimeError("Method not implemented.");
+ }
+
+ public getPrometheusExporter(): PrometheusExporter {
+ return this.prometheusExporter;
+ }
+
+ public async getPrometheusExporterMetrics(): Promise {
+ const res: string = await this.prometheusExporter.getPrometheusMetrics();
+ this.log.debug(`getPrometheusExporterMetrics() response: %o`, res);
+ return res;
+ }
+
+ public getInstanceId(): string {
+ return this.instanceId;
+ }
+
+ public async onPluginInit(): Promise {
+ return;
+ }
+
+ public getHttpServer(): Optional {
+ return Optional.ofNullable(this.httpServer);
+ }
+
+ public async shutdown(): Promise {
+ const serverMaybe = this.getHttpServer();
+ if (serverMaybe.isPresent()) {
+ const server = serverMaybe.get();
+ await promisify(server.close.bind(server))();
+ }
+ }
+
+ async registerWebServices(app: Express): Promise {
+ const webServices = await this.getOrCreateWebServices();
+ await Promise.all(webServices.map((ws) => ws.registerExpress(app)));
+ return webServices;
+ }
+
+ public async getOrCreateWebServices(): Promise {
+ if (Array.isArray(this.endpoints)) {
+ return this.endpoints;
+ }
+ const endpoints: IWebServiceEndpoint[] = [];
+ {
+ const endpoint = new RunTransactionEndpoint({
+ connector: this,
+ logLevel: this.options.logLevel,
+ });
+ endpoints.push(endpoint);
+ }
+ {
+ const opts: IGetPrometheusExporterMetricsEndpointV1Options = {
+ connector: this,
+ logLevel: this.options.logLevel,
+ };
+ const endpoint = new GetPrometheusExporterMetricsEndpointV1(opts);
+ endpoints.push(endpoint);
+ }
+ this.endpoints = endpoints;
+ return endpoints;
+ }
+
+ public getPackageName(): string {
+ return `@hyperledger/cactus-plugin-ledger-connector-iroha`;
+ }
+
+ public async getConsensusAlgorithmFamily(): Promise<
+ ConsensusAlgorithmFamily
+ > {
+ return ConsensusAlgorithmFamily.Authority;
+ }
+ public async hasTransactionFinality(): Promise {
+ const currentConsensusAlgorithmFamily = await this.getConsensusAlgorithmFamily();
+
+ return consensusHasTransactionFinality(currentConsensusAlgorithmFamily);
+ }
+
+ public async transact(
+ req: RunTransactionRequestV1,
+ ): Promise {
+ const { baseConfig } = req;
+ if (
+ !baseConfig ||
+ !baseConfig.privKey ||
+ !baseConfig.creatorAccountId ||
+ !baseConfig.irohaHost ||
+ !baseConfig.irohaPort ||
+ !baseConfig.quorum ||
+ !baseConfig.timeoutLimit
+ ) {
+ this.log.debug(
+ "Certain field within the Iroha basic configuration is missing!",
+ );
+ throw new RuntimeError("Some fields in baseConfig is undefined");
+ }
+ const irohaHostPort = `${baseConfig.irohaHost}:${baseConfig.irohaPort}`;
+
+ let grpcCredentials;
+ if (baseConfig.tls) {
+ throw new RuntimeError("TLS option is not supported");
+ } else {
+ grpcCredentials = grpc.credentials.createInsecure();
+ }
+ const commandService = new CommandService(
+ irohaHostPort,
+ //TODO:do something in the production environment
+ grpcCredentials,
+ );
+ const queryService = new QueryService(irohaHostPort, grpcCredentials);
+ const commandOptions = {
+ privateKeys: baseConfig.privKey, //need an array of keys for command
+ creatorAccountId: baseConfig.creatorAccountId,
+ quorum: baseConfig.quorum,
+ commandService: commandService,
+ timeoutLimit: baseConfig.timeoutLimit,
+ };
+ const queryOptions = {
+ privateKey: baseConfig.privKey[0], //only need 1 key for query
+ creatorAccountId: baseConfig.creatorAccountId as string,
+ queryService: queryService,
+ timeoutLimit: baseConfig.timeoutLimit,
+ };
+
+ switch (req.commandName) {
+ case IrohaCommand.CreateAccount: {
+ try {
+ const response = await commands.createAccount(commandOptions, {
+ accountName: req.params[0],
+ domainId: req.params[1],
+ publicKey: req.params[2],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.SetAccountDetail: {
+ try {
+ const response = await commands.setAccountDetail(commandOptions, {
+ accountId: req.params[0],
+ key: req.params[1],
+ value: req.params[2],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.CompareAndSetAccountDetail: {
+ try {
+ const response = await commands.compareAndSetAccountDetail(
+ commandOptions,
+ {
+ accountId: req.params[0],
+ key: req.params[1],
+ value: req.params[2],
+ oldValue: req.params[3],
+ },
+ );
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.CreateAsset: {
+ try {
+ const response = await commands // (coolcoin#test; precision:3)
+ .createAsset(commandOptions, {
+ assetName: req.params[0],
+ domainId: req.params[1],
+ precision: req.params[2],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.CreateDomain: {
+ try {
+ const response = await commands.createDomain(commandOptions, {
+ domainId: req.params[0],
+ defaultRole: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.SetAccountQuorum: {
+ try {
+ const response = await commands.setAccountQuorum(commandOptions, {
+ accountId: req.params[0],
+ quorum: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.AddAssetQuantity: {
+ try {
+ const response = await commands.addAssetQuantity(commandOptions, {
+ assetId: req.params[0],
+ amount: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.SubtractAssetQuantity: {
+ try {
+ const response = await commands.subtractAssetQuantity(
+ commandOptions,
+ {
+ assetId: req.params[0],
+ amount: req.params[1],
+ },
+ );
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.TransferAsset: {
+ try {
+ const response = await commands.transferAsset(commandOptions, {
+ srcAccountId: req.params[0],
+ destAccountId: req.params[1],
+ assetId: req.params[2],
+ description: req.params[3],
+ amount: req.params[4],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetSignatories: {
+ try {
+ const queryRes = await queries.getSignatories(queryOptions, {
+ accountId: req.params[0],
+ });
+ return { transactionReceipt: queryRes };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetAccount: {
+ try {
+ const queryRes = await queries.getAccount(queryOptions, {
+ accountId: req.params[0],
+ });
+ return { transactionReceipt: queryRes };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetAccountDetail: {
+ try {
+ const queryRes = await queries.getAccountDetail(queryOptions, {
+ accountId: req.params[0],
+ key: req.params[1],
+ writer: req.params[2],
+ pageSize: req.params[3],
+ paginationKey: req.params[4],
+ paginationWriter: req.params[5],
+ });
+ return { transactionReceipt: queryRes };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetAssetInfo: {
+ try {
+ const queryRes = await queries.getAssetInfo(queryOptions, {
+ assetId: req.params[0],
+ });
+ return { transactionReceipt: queryRes };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetAccountAssets: {
+ try {
+ const queryRes = await queries.getAccountAssets(queryOptions, {
+ accountId: req.params[0],
+ pageSize: req.params[1],
+ firstAssetId: req.params[2],
+ });
+ return { transactionReceipt: queryRes };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.AddSignatory: {
+ try {
+ const response = await commands.addSignatory(commandOptions, {
+ accountId: req.params[0],
+ publicKey: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.RemoveSignatory: {
+ try {
+ const response = await commands.removeSignatory(commandOptions, {
+ accountId: req.params[0],
+ publicKey: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetRoles: {
+ try {
+ const response = await queries.getRoles(queryOptions);
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.CreateRole: {
+ try {
+ const response = await commands.createRole(commandOptions, {
+ roleName: req.params[0],
+ permissionsList: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.AppendRole: {
+ try {
+ const response = await commands.appendRole(commandOptions, {
+ accountId: req.params[0],
+ roleName: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.DetachRole: {
+ try {
+ const response = await commands.detachRole(commandOptions, {
+ accountId: req.params[0],
+ roleName: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetRolePermissions: {
+ try {
+ const response = await queries.getRolePermissions(queryOptions, {
+ roleId: req.params[0],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.GrantPermission: {
+ try {
+ type permission = keyof GrantablePermissionMap;
+ const response = await commands.grantPermission(commandOptions, {
+ accountId: req.params[0],
+ permission: GrantablePermission[req.params[1] as permission],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.RevokePermission: {
+ try {
+ type permission = keyof GrantablePermissionMap;
+ const response = await commands.revokePermission(commandOptions, {
+ accountId: req.params[0],
+ permission: GrantablePermission[req.params[1] as permission],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.SetSettingValue: {
+ throw new Http405NotAllowedError("SetSettingValue is not supported.");
+ }
+ case IrohaQuery.GetTransactions: {
+ try {
+ const response = await queries.getTransactions(queryOptions, {
+ txHashesList: req.params[0],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetPendingTransactions: {
+ try {
+ const response = await queries.getPendingTransactions(queryOptions, {
+ pageSize: req.params[0],
+ firstTxHash: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetAccountTransactions: {
+ try {
+ const response = await queries.getAccountTransactions(queryOptions, {
+ accountId: req.params[0],
+ pageSize: req.params[1],
+ firstTxHash: req.params[2],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetAccountAssetTransactions: {
+ try {
+ const response = await queries.getAccountAssetTransactions(
+ queryOptions,
+ {
+ accountId: req.params[0],
+ assetId: req.params[1],
+ pageSize: req.params[2],
+ firstTxHash: req.params[3],
+ },
+ );
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetBlock: {
+ try {
+ const response = await queries.getBlock(queryOptions, {
+ height: req.params[0],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.CallEngine: {
+ try {
+ const response = await commands.callEngine(commandOptions, {
+ type: req.params[0],
+ caller: req.params[1],
+ callee: req.params[2],
+ input: req.params[3],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetEngineReceipts: {
+ try {
+ const response = await queries.getEngineReceipts(queryOptions, {
+ txHash: req.params[0],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.FetchCommits: {
+ try {
+ const response = await queries.fetchCommits(queryOptions);
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.AddPeer: {
+ try {
+ const response = await commands.addPeer(commandOptions, {
+ address: req.params[0],
+ peerKey: req.params[1],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaCommand.RemovePeer: {
+ try {
+ const response = await commands.removePeer(commandOptions, {
+ publicKey: req.params[0],
+ });
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ case IrohaQuery.GetPeers: {
+ try {
+ const response = await queries.getPeers(queryOptions);
+ return { transactionReceipt: response };
+ } catch (err) {
+ throw new RuntimeError(err);
+ }
+ }
+ default: {
+ throw new RuntimeError(
+ "command or query does not exist, or is not supported in current version",
+ );
+ }
+ }
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/data-fetcher.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/data-fetcher.ts
new file mode 100644
index 00000000000..fc752190f8d
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/data-fetcher.ts
@@ -0,0 +1,10 @@
+import { Transactions } from "./response.type";
+
+import { totalTxCount, K_CACTUS_IROHA_TOTAL_TX_COUNT } from "./metrics";
+
+export async function collectMetrics(
+ transactions: Transactions,
+): Promise {
+ transactions.counter++;
+ totalTxCount.labels(K_CACTUS_IROHA_TOTAL_TX_COUNT).set(transactions.counter);
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/metrics.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/metrics.ts
new file mode 100644
index 00000000000..05f055d5aef
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/metrics.ts
@@ -0,0 +1,10 @@
+import { Gauge } from "prom-client";
+
+export const K_CACTUS_IROHA_TOTAL_TX_COUNT = "cactus_iroha_total_tx_count";
+
+export const totalTxCount = new Gauge({
+ registers: [],
+ name: "cactus_iroha_total_tx_count",
+ help: "Total transactions executed",
+ labelNames: ["type"],
+});
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/prometheus-exporter.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/prometheus-exporter.ts
new file mode 100644
index 00000000000..c4d2db9bc12
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/prometheus-exporter.ts
@@ -0,0 +1,39 @@
+import promClient, { Registry } from "prom-client";
+import { Transactions } from "./response.type";
+import { collectMetrics } from "./data-fetcher";
+import { K_CACTUS_IROHA_TOTAL_TX_COUNT } from "./metrics";
+import { totalTxCount } from "./metrics";
+
+export interface IPrometheusExporterOptions {
+ pollingIntervalInMin?: number;
+}
+
+export class PrometheusExporter {
+ public readonly metricsPollingIntervalInMin: number;
+ public readonly transactions: Transactions = { counter: 0 };
+ public readonly registry: Registry;
+
+ constructor(
+ public readonly prometheusExporterOptions: IPrometheusExporterOptions,
+ ) {
+ this.metricsPollingIntervalInMin =
+ prometheusExporterOptions.pollingIntervalInMin || 1;
+ this.registry = new Registry();
+ }
+
+ public addCurrentTransaction(): void {
+ collectMetrics(this.transactions);
+ }
+
+ public async getPrometheusMetrics(): Promise {
+ const result = await this.registry.getSingleMetricAsString(
+ K_CACTUS_IROHA_TOTAL_TX_COUNT,
+ );
+ return result;
+ }
+
+ public startMetricsCollection(): void {
+ this.registry.registerMetric(totalTxCount);
+ promClient.collectDefaultMetrics({ register: this.registry });
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/response.type.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/response.type.ts
new file mode 100644
index 00000000000..3f1bc7f4911
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/prometheus-exporter/response.type.ts
@@ -0,0 +1,3 @@
+export type Transactions = {
+ counter: number;
+};
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/public-api.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/public-api.ts
new file mode 100755
index 00000000000..a36b7654813
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/public-api.ts
@@ -0,0 +1,17 @@
+export {
+ E_KEYCHAIN_NOT_FOUND,
+ IPluginLedgerConnectorIrohaOptions,
+ PluginLedgerConnectorIroha,
+} from "./plugin-ledger-connector-iroha";
+export { PluginFactoryLedgerConnector } from "./plugin-factory-ledger-connector";
+
+import { IPluginFactoryOptions } from "@hyperledger/cactus-core-api";
+import { PluginFactoryLedgerConnector } from "./plugin-factory-ledger-connector";
+
+export * from "./generated/openapi/typescript-axios/api";
+
+export async function createPluginFactory(
+ pluginFactoryOptions: IPluginFactoryOptions,
+): Promise {
+ return new PluginFactoryLedgerConnector(pluginFactoryOptions);
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/web-services/get-prometheus-exporter-metrics-endpoint-v1.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/web-services/get-prometheus-exporter-metrics-endpoint-v1.ts
new file mode 100644
index 00000000000..f1cbeb82661
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/web-services/get-prometheus-exporter-metrics-endpoint-v1.ts
@@ -0,0 +1,94 @@
+import type { Express, Request, Response } from "express";
+
+import { registerWebServiceEndpoint } from "@hyperledger/cactus-core";
+
+import OAS from "../../json/openapi.json";
+
+import {
+ IWebServiceEndpoint,
+ IExpressRequestHandler,
+ IEndpointAuthzOptions,
+} from "@hyperledger/cactus-core-api";
+
+import {
+ LogLevelDesc,
+ Logger,
+ LoggerProvider,
+ Checks,
+ IAsyncProvider,
+} from "@hyperledger/cactus-common";
+
+import { PluginLedgerConnectorIroha } from "../plugin-ledger-connector-iroha";
+
+export interface IGetPrometheusExporterMetricsEndpointV1Options {
+ connector: PluginLedgerConnectorIroha;
+ logLevel?: LogLevelDesc;
+}
+
+export class GetPrometheusExporterMetricsEndpointV1
+ implements IWebServiceEndpoint {
+ private readonly log: Logger;
+
+ constructor(
+ public readonly options: IGetPrometheusExporterMetricsEndpointV1Options,
+ ) {
+ const fnTag = "GetPrometheusExporterMetricsEndpointV1#constructor()";
+
+ Checks.truthy(options, `${fnTag} options`);
+ Checks.truthy(options.connector, `${fnTag} options.connector`);
+
+ const label = "get-prometheus-exporter-metrics-endpoint";
+ const level = options.logLevel || "INFO";
+ this.log = LoggerProvider.getOrCreate({ label, level });
+ }
+
+ getAuthorizationOptionsProvider(): IAsyncProvider {
+ // TODO: make this an injectable dependency in the constructor
+ return {
+ get: async () => ({
+ isProtected: true,
+ requiredRoles: [],
+ }),
+ };
+ }
+
+ public getExpressRequestHandler(): IExpressRequestHandler {
+ return this.handleRequest.bind(this);
+ }
+
+ getPath(): string {
+ return OAS.paths[
+ "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/get-prometheus-exporter-metrics"
+ ].get["x-hyperledger-cactus"].http.path;
+ }
+
+ getVerbLowerCase(): string {
+ return OAS.paths[
+ "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/get-prometheus-exporter-metrics"
+ ].get["x-hyperledger-cactus"].http.verbLowerCase;
+ }
+
+ public async registerExpress(
+ expressApp: Express,
+ ): Promise {
+ await registerWebServiceEndpoint(expressApp, this);
+ return this;
+ }
+
+ async handleRequest(req: Request, res: Response): Promise {
+ const fnTag = "GetPrometheusExporterMetrics#handleRequest()";
+ const verbUpper = this.getVerbLowerCase().toUpperCase();
+ this.log.debug(`${verbUpper} ${this.getPath()}`);
+
+ try {
+ const resBody = await this.options.connector.getPrometheusExporterMetrics();
+ res.status(200);
+ res.send(resBody);
+ } catch (ex) {
+ this.log.error(`${fnTag} failed to serve request`, ex);
+ res.status(500);
+ res.statusMessage = ex.message;
+ res.json({ error: ex.stack });
+ }
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/web-services/run-transaction-endpoint.ts b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/web-services/run-transaction-endpoint.ts
new file mode 100644
index 00000000000..53519852c42
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/main/typescript/web-services/run-transaction-endpoint.ts
@@ -0,0 +1,119 @@
+import type { Express, Request, Response } from "express";
+
+import {
+ Logger,
+ Checks,
+ LogLevelDesc,
+ LoggerProvider,
+ IAsyncProvider,
+ Http405NotAllowedError,
+} from "@hyperledger/cactus-common";
+import {
+ IEndpointAuthzOptions,
+ IExpressRequestHandler,
+ IWebServiceEndpoint,
+} from "@hyperledger/cactus-core-api";
+import { registerWebServiceEndpoint } from "@hyperledger/cactus-core";
+
+import { PluginLedgerConnectorIroha } from "../plugin-ledger-connector-iroha";
+
+import OAS from "../../json/openapi.json";
+
+export interface IRunTransactionEndpointOptions {
+ logLevel?: LogLevelDesc;
+ connector: PluginLedgerConnectorIroha;
+}
+
+export class RunTransactionEndpoint implements IWebServiceEndpoint {
+ public static readonly CLASS_NAME = "RunTransactionEndpoint";
+
+ private readonly log: Logger;
+
+ public get className(): string {
+ return RunTransactionEndpoint.CLASS_NAME;
+ }
+
+ constructor(public readonly options: IRunTransactionEndpointOptions) {
+ const fnTag = `${this.className}#constructor()`;
+ Checks.truthy(options, `${fnTag} arg options`);
+ Checks.truthy(options.connector, `${fnTag} arg options.connector`);
+
+ const level = this.options.logLevel || "INFO";
+ const label = this.className;
+ this.log = LoggerProvider.getOrCreate({ level, label });
+ }
+
+ public getOasPath() {
+ return OAS.paths[
+ "/api/v1/plugins/@hyperledger/cactus-plugin-ledger-connector-iroha/run-transaction"
+ ];
+ }
+
+ public getPath(): string {
+ const apiPath = this.getOasPath();
+ return apiPath.post["x-hyperledger-cactus"].http.path;
+ }
+
+ public getVerbLowerCase(): string {
+ const apiPath = this.getOasPath();
+ return apiPath.post["x-hyperledger-cactus"].http.verbLowerCase;
+ }
+
+ public getOperationId(): string {
+ return this.getOasPath().post.operationId;
+ }
+
+ getAuthorizationOptionsProvider(): IAsyncProvider {
+ // TODO: make this an injectable dependency in the constructor
+ return {
+ get: async () => ({
+ isProtected: true,
+ requiredRoles: [],
+ }),
+ };
+ }
+
+ public async registerExpress(
+ expressApp: Express,
+ ): Promise {
+ await registerWebServiceEndpoint(expressApp, this);
+ return this;
+ }
+
+ public getExpressRequestHandler(): IExpressRequestHandler {
+ return this.handleRequest.bind(this);
+ }
+
+ public async handleRequest(req: Request, res: Response): Promise {
+ const reqTag = `${this.getVerbLowerCase()} - ${this.getPath()}`;
+ this.log.debug(reqTag);
+ const reqBody = req.body;
+ try {
+ const resBody = await this.options.connector.transact(reqBody);
+ res.json(resBody);
+ } catch (ex) {
+ if (ex instanceof Http405NotAllowedError) {
+ this.log.debug("Sending back HTTP405 Method Not Allowed error.");
+ res.status(405);
+ res.json(ex);
+ return;
+ }
+ /**
+ * An example output of the error message looks like:
+ * "Error: Error: Command response error: expected=COMMITTED, actual=REJECTED"
+ * @see https://iroha.readthedocs.io/en/main/develop/api/commands.html?highlight=CallEngine#id18
+ */
+ if (ex.message.includes("Error: Command response error")) {
+ this.log.debug("Sending back HTTP400 Bad Request error.");
+ res.status(400);
+ res.json(ex);
+ return;
+ }
+ this.log.error(`Crash while serving ${reqTag}`, ex);
+ res.status(500).json({
+ message: "Internal Server Error",
+ error: ex?.stack || ex?.message,
+ });
+ }
+ }
+}
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/api-surface.test.ts b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/api-surface.test.ts
new file mode 100644
index 00000000000..a77b09a8292
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/api-surface.test.ts
@@ -0,0 +1,8 @@
+import test, { Test } from "tape-promise/tape";
+
+import * as apiSurface from "../../../main/typescript/public-api";
+
+test("Library can be loaded", (t: Test) => {
+ t.ok(apiSurface, "apiSurface truthy OK");
+ t.end();
+});
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/iroha-iroha-transfer-example.test.ts b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/iroha-iroha-transfer-example.test.ts
new file mode 100644
index 00000000000..0d6b7c8f308
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/iroha-iroha-transfer-example.test.ts
@@ -0,0 +1,353 @@
+import http from "http";
+import { AddressInfo } from "net";
+import test, { Test } from "tape-promise/tape";
+import { v4 as uuidv4 } from "uuid";
+import { v4 as internalIpV4 } from "internal-ip";
+import bodyParser from "body-parser";
+import express from "express";
+import {
+ Containers,
+ pruneDockerAllIfGithubAction,
+ PostgresTestContainer,
+ IrohaTestLedger,
+} from "@hyperledger/cactus-test-tooling";
+import { PluginRegistry } from "@hyperledger/cactus-core";
+import { PluginImportType } from "@hyperledger/cactus-core-api";
+
+import {
+ IListenOptions,
+ LogLevelDesc,
+ Servers,
+} from "@hyperledger/cactus-common";
+import { RuntimeError } from "run-time-error";
+import {
+ PluginLedgerConnectorIroha,
+ DefaultApi as IrohaApi,
+ PluginFactoryLedgerConnector,
+} from "../../../main/typescript/public-api";
+
+import { Configuration } from "@hyperledger/cactus-core-api";
+
+import {
+ IrohaCommand,
+ IrohaQuery,
+ KeyPair,
+} from "../../../main/typescript/generated/openapi/typescript-axios";
+import cryptoHelper from "iroha-helpers-ts/lib/cryptoHelper";
+
+const testCase = "runs tx on an Iroha v1.2.0 ledger";
+const logLevel: LogLevelDesc = "ERROR";
+
+test.onFailure(async () => {
+ await Containers.logDiagnostics({ logLevel });
+});
+
+test("BEFORE " + testCase, async (t: Test) => {
+ const pruning = pruneDockerAllIfGithubAction({ logLevel });
+ await t.doesNotReject(pruning, "Pruning didn't throw OK");
+ t.end();
+});
+
+//Start Postgres databases.
+test(testCase, async (t: Test) => {
+ const postgres1 = new PostgresTestContainer({ logLevel });
+ const postgres2 = new PostgresTestContainer({ logLevel });
+ test.onFinish(async () => {
+ await postgres1.stop();
+ await postgres2.stop();
+ });
+
+ await postgres1.start();
+ await postgres2.start();
+ const postgresHost1 = await internalIpV4();
+ const postgresPort1 = await postgres1.getPostgresPort();
+ const postgresHost2 = await internalIpV4();
+ const postgresPort2 = await postgres2.getPostgresPort();
+ if (!postgresHost1 || !postgresHost2) {
+ throw new RuntimeError("Could not determine the internal IPV4 address.");
+ }
+
+ //Start the 1st Iroha ledger with default priv/pub key pairs.
+ const iroha1 = new IrohaTestLedger({
+ postgresHost: postgresHost1,
+ postgresPort: postgresPort1,
+ logLevel: logLevel,
+ });
+
+ //Start the 2nd Iroha ledger with randomly generated priv/pub key pairs.
+ const keyPairA: KeyPair = cryptoHelper.generateKeyPair();
+ const adminPriv2 = keyPairA.privateKey;
+ const adminPub2 = keyPairA.publicKey;
+ const keyPairB: KeyPair = cryptoHelper.generateKeyPair();
+ const nodePriv2 = keyPairB.privateKey;
+ const nodePub2 = keyPairB.publicKey;
+ const iroha2 = new IrohaTestLedger({
+ adminPriv: adminPriv2,
+ adminPub: adminPub2,
+ nodePriv: nodePriv2,
+ nodePub: nodePub2,
+ postgresHost: postgresHost2,
+ postgresPort: postgresPort2,
+ logLevel: logLevel,
+ });
+
+ test.onFinish(async () => {
+ await iroha1.stop();
+ await iroha2.stop();
+ });
+ await iroha1.start();
+ await iroha2.start();
+ const irohaHost1 = await internalIpV4();
+ const irohaHost2 = await internalIpV4();
+ if (!irohaHost1 || !irohaHost2) {
+ throw new RuntimeError("Could not determine the internal IPV4 address.");
+ }
+ const irohaPort1 = await iroha1.getRpcToriiPort();
+ const rpcToriiPortHost1 = await iroha1.getRpcToriiPortHost();
+ const irohaPort2 = await iroha2.getRpcToriiPort();
+ const rpcToriiPortHost2 = await iroha2.getRpcToriiPortHost();
+
+ //Start 2 connectors for 2 Iroha ledgers.
+ const factory1 = new PluginFactoryLedgerConnector({
+ pluginImportType: PluginImportType.Local,
+ });
+ const connector1: PluginLedgerConnectorIroha = await factory1.create({
+ rpcToriiPortHost: rpcToriiPortHost1,
+ instanceId: uuidv4(),
+ pluginRegistry: new PluginRegistry(),
+ });
+ const factory2 = new PluginFactoryLedgerConnector({
+ pluginImportType: PluginImportType.Local,
+ });
+ const connector2: PluginLedgerConnectorIroha = await factory2.create({
+ rpcToriiPortHost: rpcToriiPortHost2,
+ instanceId: uuidv4(),
+ pluginRegistry: new PluginRegistry(),
+ });
+ //register the 2 connectors with 2 express services
+ const expressApp1 = express();
+ expressApp1.use(bodyParser.json({ limit: "250mb" }));
+ const server1 = http.createServer(expressApp1);
+ const listenOptions1: IListenOptions = {
+ hostname: "0.0.0.0",
+ port: 0,
+ server: server1,
+ };
+ const addressInfo1 = (await Servers.listen(listenOptions1)) as AddressInfo;
+ test.onFinish(async () => await Servers.shutdown(server1));
+ const apiHost1 = `http://${addressInfo1.address}:${addressInfo1.port}`;
+ const apiConfig1 = new Configuration({ basePath: apiHost1 });
+ const apiClient1 = new IrohaApi(apiConfig1);
+
+ const expressApp2 = express();
+ expressApp2.use(bodyParser.json({ limit: "250mb" }));
+ const server2 = http.createServer(expressApp2);
+ const listenOptions2: IListenOptions = {
+ hostname: "0.0.0.0",
+ port: 0,
+ server: server2,
+ };
+ const addressInfo2 = (await Servers.listen(listenOptions2)) as AddressInfo;
+ test.onFinish(async () => await Servers.shutdown(server2));
+ const apiHost2 = `http://${addressInfo2.address}:${addressInfo2.port}`;
+ const apiConfig2 = new Configuration({ basePath: apiHost2 });
+ const apiClient2 = new IrohaApi(apiConfig2);
+
+ await connector1.getOrCreateWebServices();
+ await connector1.registerWebServices(expressApp1);
+ await connector2.getOrCreateWebServices();
+ await connector2.registerWebServices(expressApp2);
+
+ const adminPriv1 = await iroha1.getGenesisAccountPrivKey();
+ const admin1 = iroha1.getDefaultAdminAccount();
+ const domain1 = iroha1.getDefaultDomain();
+ const adminID1 = `${admin1}@${domain1}`;
+ const admin2 = iroha2.getDefaultAdminAccount();
+ const domain2 = iroha2.getDefaultDomain();
+ const adminID2 = `${admin2}@${domain2}`;
+
+ //Setup: create coolcoin#test for Iroha1
+ const asset = "coolcoin";
+ const assetID1 = `${asset}#${domain1}`;
+ const assetID2 = `${asset}#${domain1}`;
+ {
+ const req = {
+ commandName: IrohaCommand.CreateAsset,
+ baseConfig: {
+ irohaHost: irohaHost1,
+ irohaPort: irohaPort1,
+ creatorAccountId: adminID1,
+ privKey: [adminPriv1],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [asset, domain1, 3],
+ };
+ const res = await apiClient1.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ //Verify the generated priv/pub keys are equivalent to those pulled from the ledger.
+ {
+ const adminPriv2_ = await iroha2.getGenesisAccountPrivKey();
+ const adminPub2_ = await iroha2.getGenesisAccountPubKey();
+ const { publicKey, privateKey } = await iroha2.getNodeKeyPair();
+ t.equal(adminPriv2, adminPriv2_);
+ t.equal(adminPub2, adminPub2_);
+ t.equal(nodePriv2, privateKey);
+ t.equal(nodePub2, publicKey);
+ }
+
+ //Setup: create coolcoin#test for Iroha2
+ {
+ const req = {
+ commandName: IrohaCommand.CreateAsset,
+ baseConfig: {
+ irohaHost: irohaHost2,
+ irohaPort: irohaPort2,
+ creatorAccountId: adminID2,
+ privKey: [adminPriv2],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [asset, domain2, 3],
+ };
+ const res = await apiClient2.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+ //Iroha1's admin is initialized with 100 (coolcoin#test).
+ {
+ const req = {
+ commandName: IrohaCommand.AddAssetQuantity,
+ baseConfig: {
+ irohaHost: irohaHost1,
+ irohaPort: irohaPort1,
+ creatorAccountId: adminID1,
+ privKey: [adminPriv1],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [assetID1, "100.000"],
+ };
+ const res = await apiClient1.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ // Iroha1's admin transfers 30 (coolcoin#test) to Iroha2's admin.
+ // i.e., Iroha1's admin subtracts 30 (coolcoin#test).
+ {
+ const req = {
+ commandName: IrohaCommand.SubtractAssetQuantity,
+ baseConfig: {
+ irohaHost: irohaHost1,
+ irohaPort: irohaPort1,
+ creatorAccountId: adminID1,
+ privKey: [adminPriv1],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [assetID1, "30.000"],
+ };
+ const res = await apiClient1.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+ //i.e., Iroha2's admin adds 30 (coolcoin#test).
+ {
+ const req = {
+ commandName: IrohaCommand.AddAssetQuantity,
+ baseConfig: {
+ irohaHost: irohaHost2,
+ irohaPort: irohaPort2,
+ creatorAccountId: adminID2,
+ privKey: [adminPriv2],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [assetID2, "30.000"],
+ };
+ const res = await apiClient2.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+ //Verification: iroha1's admin has 70 (coolcoin#test).
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountAssets,
+ baseConfig: {
+ irohaHost: irohaHost1,
+ irohaPort: irohaPort1,
+ creatorAccountId: adminID1,
+ privKey: [adminPriv1],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID1, 10, assetID1],
+ };
+ const res = await apiClient1.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, [
+ {
+ assetId: assetID1,
+ accountId: adminID1,
+ balance: "70.000",
+ },
+ ]);
+ }
+ //Verification: iroha2's admin has 30 (coolcoin#test).
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountAssets,
+ baseConfig: {
+ irohaHost: irohaHost2,
+ irohaPort: irohaPort2,
+ creatorAccountId: adminID2,
+ privKey: [adminPriv2],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID2, 10, assetID2],
+ };
+ const res = await apiClient2.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, [
+ {
+ assetId: assetID2,
+ accountId: adminID2,
+ balance: "30.000",
+ },
+ ]);
+ }
+
+ t.end();
+});
+
+test("AFTER " + testCase, async (t: Test) => {
+ const pruning = pruneDockerAllIfGithubAction({ logLevel });
+ await t.doesNotReject(pruning, "Pruning didn't throw OK");
+ t.end();
+});
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/run-transaction-endpoint-v1.test.ts b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/run-transaction-endpoint-v1.test.ts
new file mode 100644
index 00000000000..47f41395511
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/integration/run-transaction-endpoint-v1.test.ts
@@ -0,0 +1,1148 @@
+import http from "http";
+import { AddressInfo } from "net";
+import test, { Test } from "tape-promise/tape";
+import { v4 as uuidv4 } from "uuid";
+import { v4 as internalIpV4 } from "internal-ip";
+import bodyParser from "body-parser";
+import express from "express";
+
+import {
+ Containers,
+ pruneDockerAllIfGithubAction,
+ PostgresTestContainer,
+ IrohaTestLedger,
+} from "@hyperledger/cactus-test-tooling";
+import { PluginRegistry } from "@hyperledger/cactus-core";
+import { PluginImportType } from "@hyperledger/cactus-core-api";
+
+import {
+ IListenOptions,
+ LogLevelDesc,
+ Servers,
+} from "@hyperledger/cactus-common";
+import { RuntimeError } from "run-time-error";
+import {
+ PluginLedgerConnectorIroha,
+ DefaultApi as IrohaApi,
+ PluginFactoryLedgerConnector,
+} from "../../../main/typescript/public-api";
+
+import { Configuration } from "@hyperledger/cactus-core-api";
+
+import {
+ IrohaCommand,
+ IrohaQuery,
+ KeyPair,
+} from "../../../main/typescript/generated/openapi/typescript-axios";
+import cryptoHelper from "iroha-helpers-ts/lib/cryptoHelper";
+
+const testCase = "runs tx on an Iroha v1.2.0 ledger";
+const logLevel: LogLevelDesc = "INFO";
+
+test.onFailure(async () => {
+ await Containers.logDiagnostics({ logLevel });
+});
+
+test("BEFORE " + testCase, async (t: Test) => {
+ const pruning = pruneDockerAllIfGithubAction({ logLevel });
+ await t.doesNotReject(pruning, "Pruning didn't throw OK");
+ t.end();
+});
+
+test(testCase, async (t: Test) => {
+ const postgres = new PostgresTestContainer({ logLevel });
+
+ test.onFinish(async () => {
+ await postgres.stop();
+ });
+
+ await postgres.start();
+ const postgresHost = await internalIpV4();
+ const postgresPort = await postgres.getPostgresPort();
+ const irohaHost = await internalIpV4();
+ if (!postgresHost || !irohaHost) {
+ throw new RuntimeError("Could not determine the internal IPV4 address.");
+ }
+
+ const keyPair1: KeyPair = cryptoHelper.generateKeyPair();
+ const adminPriv = keyPair1.privateKey;
+ const adminPubA = keyPair1.publicKey;
+ const keyPair2: KeyPair = cryptoHelper.generateKeyPair();
+ const nodePrivA = keyPair2.privateKey;
+ const nodePubA = keyPair2.publicKey;
+ const keyPair3: KeyPair = cryptoHelper.generateKeyPair();
+ const userPub = keyPair3.publicKey;
+ const iroha = new IrohaTestLedger({
+ adminPriv: adminPriv,
+ adminPub: adminPubA,
+ nodePriv: nodePrivA,
+ nodePub: nodePubA,
+ postgresHost: postgresHost,
+ postgresPort: postgresPort,
+ logLevel: logLevel,
+ });
+
+ test.onFinish(async () => {
+ await iroha.stop();
+ });
+ await iroha.start();
+ const irohaPort = await iroha.getRpcToriiPort();
+ const rpcToriiPortHost = await iroha.getRpcToriiPortHost();
+ const internalAddr = iroha.getInternalAddr();
+ const factory = new PluginFactoryLedgerConnector({
+ pluginImportType: PluginImportType.Local,
+ });
+
+ const connector: PluginLedgerConnectorIroha = await factory.create({
+ rpcToriiPortHost,
+ instanceId: uuidv4(),
+ pluginRegistry: new PluginRegistry(),
+ });
+
+ const expressApp = express();
+ expressApp.use(bodyParser.json({ limit: "250mb" }));
+ const server = http.createServer(expressApp);
+ const listenOptions: IListenOptions = {
+ hostname: "0.0.0.0",
+ port: 0,
+ server,
+ };
+ const addressInfo = (await Servers.listen(listenOptions)) as AddressInfo;
+ test.onFinish(async () => await Servers.shutdown(server));
+ const { address, port } = addressInfo;
+ const apiHost = `http://${address}:${port}`;
+ const apiConfig = new Configuration({ basePath: apiHost });
+ const apiClient = new IrohaApi(apiConfig);
+
+ await connector.getOrCreateWebServices();
+ await connector.registerWebServices(expressApp);
+
+ let firstTxHash;
+ const admin = iroha.getDefaultAdminAccount();
+ const domain = iroha.getDefaultDomain();
+ const adminID = `${admin}@${domain}`;
+ const user = uuidv4().substring(0, 5);
+ /**
+ * An account in Iroha ledger is formatted as: `account_name@domain_id`
+ * @see https://iroha.readthedocs.io/en/main/concepts_architecture/er_model.html?highlight=%3Casset_name%3E%23%3Cdomain_id%3E#account
+ */
+ const userID = `${user}@${domain}`;
+ {
+ const req = {
+ commandName: IrohaCommand.CreateAccount,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [user, domain, userPub],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccount,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, {
+ accountId: adminID,
+ domainId: domain,
+ quorum: 1,
+ jsonData: "{}",
+ });
+ }
+
+ const moneyCreatorRole = "money_creator";
+ const newDomain = "test2";
+ {
+ const req = {
+ commandName: IrohaCommand.CreateDomain,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [newDomain, moneyCreatorRole],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ const asset = "coolcoin";
+ /**
+ * An asset in Iroha ledger is formatted as: `asset_name#domain_id`
+ * @see https://iroha.readthedocs.io/en/main/concepts_architecture/er_model.html?highlight=%3Casset_name%3E%23%3Cdomain_id%3E#asset
+ */
+ const assetID = `${asset}#${domain}`;
+ {
+ const req = {
+ commandName: IrohaCommand.CreateAsset,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [asset, domain, 3],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAssetInfo,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [assetID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, {
+ assetId: assetID,
+ domainId: domain,
+ precision: 3,
+ });
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.AddAssetQuantity,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [assetID, "123.123"],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ const txDescription = uuidv4().substring(0, 5) + Date.now();
+ {
+ const req = {
+ commandName: IrohaCommand.TransferAsset,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, userID, assetID, txDescription, "57.75"],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ firstTxHash = res.data.transactionReceipt.txHash;
+ console.log(firstTxHash);
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountAssets,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, 100, assetID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, [
+ {
+ assetId: assetID,
+ accountId: adminID,
+ balance: "65.373",
+ },
+ ]);
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountAssets,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, 100, assetID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, [
+ {
+ assetId: assetID,
+ accountId: userID,
+ balance: "57.75",
+ },
+ ]);
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.SubtractAssetQuantity,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [assetID, "30.123"],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountAssets,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, 100, assetID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, [
+ {
+ assetId: assetID,
+ accountId: adminID,
+ balance: "35.250",
+ },
+ ]);
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetSignatories,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEquals(res.data.transactionReceipt, [adminPubA]);
+ }
+
+ const keyPair4: KeyPair = cryptoHelper.generateKeyPair();
+ const adminPubB = keyPair4.publicKey;
+ {
+ const req = {
+ commandName: IrohaCommand.AddSignatory,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, adminPubB],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetSignatories,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.true(res.data.transactionReceipt.includes(adminPubA));
+ t.true(res.data.transactionReceipt.includes(adminPubB));
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.RemoveSignatory,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, adminPubB],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetSignatories,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, [adminPubA]);
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetRoles,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, [
+ "cactus_test",
+ "cactus_test_full",
+ "admin",
+ "user",
+ moneyCreatorRole,
+ ]);
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetRolePermissions,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [moneyCreatorRole],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ console.log(res.data.transactionReceipt);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ /**
+ * Iroha Javascript SDK maps each permission to an index number
+ * @see https://github.com/hyperledger/iroha-javascript/blob/master/src/proto/primitive_pb.d.ts#L193-L247
+ */
+ const permissionArr = [3, 11, 12, 13];
+ t.deepEqual(res.data.transactionReceipt, permissionArr);
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetTransactions,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ /**
+ * param[0] needs to be an array of transactions
+ * Example: [[TxHash1, TxHash2, TxHash3]]
+ * @see https://iroha.readthedocs.io/en/main/develop/api/queries.html?highlight=GetTransactions#id25
+ */
+ params: [[firstTxHash]],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(
+ res.data.transactionReceipt.array[0][0][0][0][0][0].slice(-1)[0],
+ [adminID, userID, assetID, txDescription, "57.75"],
+ );
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountTransactions,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, 100, firstTxHash],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(
+ res.data.transactionReceipt.transactionsList[0].payload.reducedPayload
+ .commandsList,
+ [
+ {
+ transferAsset: {
+ srcAccountId: adminID,
+ destAccountId: userID,
+ assetId: assetID,
+ description: txDescription,
+ amount: "57.75",
+ },
+ },
+ ],
+ );
+ t.equal(
+ res.data.transactionReceipt.transactionsList[0].signaturesList[0]
+ .publicKey,
+ adminPubA,
+ );
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountAssetTransactions,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, assetID, 100, undefined],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.deepEqual(
+ res.data.transactionReceipt.transactionsList[0].payload.reducedPayload
+ .commandsList,
+ [
+ {
+ transferAsset: {
+ srcAccountId: adminID,
+ destAccountId: userID,
+ assetId: assetID,
+ description: txDescription,
+ amount: "57.75",
+ },
+ },
+ ],
+ );
+ t.equal(
+ res.data.transactionReceipt.transactionsList[0].signaturesList[0]
+ .publicKey,
+ adminPubA,
+ );
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetPeers,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, [
+ {
+ address: internalAddr,
+ peerKey: nodePubA,
+ tlsCertificate: "",
+ },
+ ]);
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetBlock,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [1],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(
+ res.data.transactionReceipt.payload.transactionsList[0].payload
+ .reducedPayload.commandsList[0].addPeer.peer,
+ {
+ address: internalAddr,
+ peerKey: nodePubA,
+ tlsCertificate: "",
+ },
+ );
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.AppendRole,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, moneyCreatorRole],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.DetachRole,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, moneyCreatorRole],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ const testRole = uuidv4().substring(0, 5);
+ {
+ const req = {
+ commandName: IrohaCommand.CreateRole,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [testRole, [6, 7]],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.GrantPermission,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, "CAN_CALL_ENGINE_ON_MY_BEHALF"],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.RevokePermission,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, "CAN_CALL_ENGINE_ON_MY_BEHALF"],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.SetAccountDetail,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, "age", "18"],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountDetail,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, "age", adminID, 1, "age", adminID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, {
+ "admin@test": { age: "18" },
+ });
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.CompareAndSetAccountDetail,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, "age", "118", "18"], //change age from 18 to 118
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetAccountDetail,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [userID, "age", adminID, 1, "age", adminID],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, {
+ "admin@test": { age: "118" },
+ });
+ }
+
+ {
+ const req = {
+ commandName: IrohaQuery.GetEngineReceipts,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [firstTxHash],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.deepEqual(res.data.transactionReceipt.array, [[]]);
+ }
+
+ const key = uuidv4().substring(0, 5) + Date.now();
+ const value = uuidv4().substring(0, 5) + Date.now();
+ {
+ const req = {
+ commandName: IrohaCommand.SetSettingValue,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [key, value],
+ };
+ await t.rejects(
+ apiClient.runTransactionV1(req),
+ /[\s\S]*/,
+ "SetSettingValue transaction is rejected OK",
+ );
+ }
+
+ /**
+ * The callee and input values are taken from Iroha doc's example.
+ * At some point, we should generate it on our own.
+ * @see https://iroha.readthedocs.io/en/main/develop/api/commands.html?highlight=CallEngine#id18
+ */
+ const callee = "7C370993FD90AF204FD582004E2E54E6A94F2651";
+ const input =
+ "40c10f19000000000000000000000000969453762b0c739dd285b31635efa00e24c2562800000000000000000000000000000000000000000000000000000000000004d2";
+ {
+ const req = {
+ commandName: IrohaCommand.CallEngine,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [undefined, adminID, callee, input],
+ };
+ await t.rejects(
+ apiClient.runTransactionV1(req),
+ /[\s\S]*/,
+ "CallEngine transaction is rejected OK",
+ );
+ }
+
+ /**
+ * FIXME - the Iroha Javascript SDK does not give any output if we try to produce a pending transaction
+ * This results in an infinite loop and thus the following code cannot be executed.
+ * Once the Iroha Javascript SDK is justitied. We can safely produce a pending transaction.
+ * @see https://github.com/hyperledger/iroha-javascript/issues/66
+ * Dealing with it will cause the test suite fail, so only testing against an empty pending transaction case.
+ */
+ {
+ const req = {
+ commandName: IrohaQuery.GetPendingTransactions,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [5, undefined],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.deepEqual(res.data.transactionReceipt, []);
+ }
+
+ const keyPair5: KeyPair = cryptoHelper.generateKeyPair();
+ const adminPubC = keyPair5.publicKey;
+ const adminPrivC = keyPair5.privateKey;
+ {
+ const req = {
+ commandName: IrohaCommand.AddSignatory,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, adminPubC],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ {
+ const req = {
+ commandName: IrohaCommand.SetAccountQuorum,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv],
+ quorum: 1,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [adminID, 2],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ console.log(res.data.transactionReceipt);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ const keyPair6: KeyPair = cryptoHelper.generateKeyPair();
+ const nodePubB = keyPair6.publicKey;
+ /**
+ * Take advantage of Postgres's address to fake it as the peer address
+ * since it is different from existing Iroha node's address
+ */
+ const peerAddr = `${postgresHost}:${postgresPort}`;
+ {
+ const req = {
+ commandName: IrohaCommand.AddPeer,
+ baseConfig: {
+ irohaHost: irohaHost,
+ irohaPort: irohaPort,
+ creatorAccountId: adminID,
+ privKey: [adminPriv, adminPrivC],
+ quorum: 2,
+ timeoutLimit: 5000,
+ tls: false,
+ },
+ params: [peerAddr, nodePubB],
+ };
+ const res = await apiClient.runTransactionV1(req);
+ t.ok(res);
+ t.ok(res.data);
+ t.equal(res.status, 200);
+ t.equal(res.data.transactionReceipt.status, "COMMITTED");
+ }
+
+ // // Use Promise.race to cancel the promise
+ // {
+ // const req1 = {
+ // commandName: "producePendingTx",
+ // params: [],
+ // };
+ // Promise.race([
+ // //FIXME - the Iroha Javascript SDK does not give any output if we try to produce a pending transaction
+ // // This results in an infinite loop and thus the following code cannot be executed.
+ // // This fix is not perfect. It cancels the request with a timeout, but will result in grpc "Error: 14 UNAVAILABLE: GOAWAY received
+ // // Once the Iroha Javascript SDK is justitied. We can safely produce a pending transaction.
+ // apiClient.runTransactionV1(req1),
+ // new Promise((resolve) => setTimeout(resolve, 1000)),
+ // ]);
+ // }
+
+ // use bluebird to cancel Promise
+ // {
+ // const req1 = {
+ // commandName: "producePendingTx",
+ // params: [],
+ // };
+ // const promise = apiClient.runTransactionV1(req1);
+ // const p2 = new Promise((onCancel) => {
+ // promise;
+ // onCancel(() => console.log("p2 canceled"));
+ // });
+ // p2.cancel();
+ // }
+
+ // // {
+ // // const req = {
+ // // commandName: "removePeer",
+ // // params: [
+ // // "0000000000000000000000000000000000000000000000000000000000000002",
+ // // ],
+ // // };
+ // // const res = await apiClient.runTransactionV1(req);
+ // // console.log(res.data.transactionReceipt);
+ // // }
+
+ // // {
+ // // const req = {
+ // // commandName: "fetchCommits",
+ // // params: [],
+ // // };
+ // // const res = await apiClient.runTransactionV1(req);
+ // // t.ok(res);
+ // // t.ok(res.data);
+ // // t.equal(res.status, 200);
+ // // console.log(res.data.transactionReceipt);
+ // // }
+ t.end();
+});
+
+test("AFTER " + testCase, async (t: Test) => {
+ const pruning = pruneDockerAllIfGithubAction({ logLevel });
+ await t.doesNotReject(pruning, "Pruning didn't throw OK");
+ t.end();
+});
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/api-surface.test.ts b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/api-surface.test.ts
new file mode 100644
index 00000000000..a77b09a8292
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/api-surface.test.ts
@@ -0,0 +1,8 @@
+import test, { Test } from "tape-promise/tape";
+
+import * as apiSurface from "../../../main/typescript/public-api";
+
+test("Library can be loaded", (t: Test) => {
+ t.ok(apiSurface, "apiSurface truthy OK");
+ t.end();
+});
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/iroha-test-ledger-parameters.test.ts b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/iroha-test-ledger-parameters.test.ts
new file mode 100644
index 00000000000..e066d2fb2c9
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/iroha-test-ledger-parameters.test.ts
@@ -0,0 +1,35 @@
+import test, { Test } from "tape";
+import { IrohaTestLedger } from "@hyperledger/cactus-test-tooling";
+
+test("constructor does not throw with the default config", async (t: Test) => {
+ t.plan(1);
+
+ // No options
+ const irohaTestLedger = new IrohaTestLedger({
+ postgresHost: "127.0.0.1",
+ postgresPort: 5432,
+ });
+
+ t.ok(irohaTestLedger);
+ t.end();
+});
+
+test("Iroha environment variables passed correctly", async (t: Test) => {
+ t.plan(2);
+ const simpleEnvVars = [
+ "IROHA_POSTGRES_USER=postgres",
+ "IROHA_POSTGRES_PASSWORD=mysecretpassword",
+ "KEY=node0",
+ ];
+
+ const irohaOptions = {
+ postgresHost: "localhost",
+ postgresPort: 5432,
+ envVars: simpleEnvVars,
+ };
+ const irohaTestLedger = new IrohaTestLedger(irohaOptions);
+
+ t.equal(irohaTestLedger.envVars, simpleEnvVars);
+ t.ok(irohaTestLedger);
+ t.end();
+});
diff --git a/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/postgres-test-container-parameters.test.ts b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/postgres-test-container-parameters.test.ts
new file mode 100644
index 00000000000..ad8f1241083
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/src/test/typescript/unit/postgres-test-container-parameters.test.ts
@@ -0,0 +1,29 @@
+import test, { Test } from "tape";
+import { PostgresTestContainer } from "@hyperledger/cactus-test-tooling";
+
+test("constructor does not throw with the default config", async (t: Test) => {
+ t.plan(1);
+
+ // No options
+ const postgresTestContainer = new PostgresTestContainer();
+
+ t.ok(postgresTestContainer);
+ t.end();
+});
+
+test("Postgres environment variables passed correctly", async (t: Test) => {
+ t.plan(2);
+ const simpleEnvVars = [
+ "POSTGRES_USER=postgres",
+ "POSTGRES_PASSWORD=mysecretpassword",
+ ];
+
+ const postgresOptions = {
+ envVars: simpleEnvVars,
+ };
+ const postgresTestLedger = new PostgresTestContainer(postgresOptions);
+
+ t.equal(postgresTestLedger.envVars, simpleEnvVars);
+ t.ok(postgresTestLedger);
+ t.end();
+});
diff --git a/packages/cactus-plugin-ledger-connector-iroha/tsconfig.json b/packages/cactus-plugin-ledger-connector-iroha/tsconfig.json
new file mode 100644
index 00000000000..36b99e2d190
--- /dev/null
+++ b/packages/cactus-plugin-ledger-connector-iroha/tsconfig.json
@@ -0,0 +1,32 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "composite": true,
+ "outDir": "./dist/lib/",
+ "declarationDir": "dist/types",
+ "resolveJsonModule": true,
+ "rootDir": "./src",
+ "tsBuildInfoFile": "../../.build-cache/cactus-plugin-ledger-connector-iroha.tsbuildinfo"
+ },
+ "include": [
+ "./src",
+ "src/**/*.json"
+ ],
+ "references": [
+ {
+ "path": "../cactus-common/tsconfig.json"
+ },
+ {
+ "path": "../cactus-core/tsconfig.json"
+ },
+ {
+ "path": "../cactus-core-api/tsconfig.json"
+ },
+ {
+ "path": "../cactus-plugin-keychain-memory/tsconfig.json"
+ },
+ {
+ "path": "../cactus-test-tooling/tsconfig.json"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/packages/cactus-test-tooling/src/main/typescript/iroha/iroha-test-ledger.ts b/packages/cactus-test-tooling/src/main/typescript/iroha/iroha-test-ledger.ts
new file mode 100644
index 00000000000..6eb5287abe9
--- /dev/null
+++ b/packages/cactus-test-tooling/src/main/typescript/iroha/iroha-test-ledger.ts
@@ -0,0 +1,451 @@
+import Docker, { Container, ContainerInfo } from "dockerode";
+import Joi from "joi";
+import { EventEmitter } from "events";
+import {
+ LogLevelDesc,
+ Logger,
+ LoggerProvider,
+ Bools,
+ Checks,
+} from "@hyperledger/cactus-common";
+import { ITestLedger } from "../i-test-ledger";
+import { IKeyPair } from "../i-key-pair";
+import { Containers } from "../common/containers";
+
+/*
+ * Contains options for Iroha container
+ */
+export interface IIrohaTestLedgerOptions {
+ readonly adminPriv?: string;
+ readonly adminPub?: string;
+ readonly nodePriv?: string;
+ readonly nodePub?: string;
+ readonly tlsCert?: string;
+ readonly tlsKey?: string;
+ readonly toriiTlsPort?: number;
+ readonly postgresHost: string;
+ readonly postgresPort: number;
+ readonly imageVersion?: string;
+ readonly imageName?: string;
+ readonly rpcToriiPort?: number;
+ readonly envVars?: string[];
+ readonly logLevel?: LogLevelDesc;
+ readonly emitContainerLogs?: boolean;
+}
+
+/*
+ * Provides default options for Iroha container
+ */
+export const IROHA_TEST_LEDGER_DEFAULT_OPTIONS = Object.freeze({
+ imageVersion: "2021-08-16--1183",
+ imageName: "ghcr.io/hyperledger/cactus-iroha-all-in-one",
+ adminPriv: " ",
+ adminPub: " ",
+ nodePriv: " ",
+ nodePub: " ",
+ tlsCert: " ",
+ tlsKey: " ",
+ rpcToriiPort: 50051,
+ toriiTlsPort: 55552,
+ envVars: [
+ "IROHA_POSTGRES_USER=postgres",
+ "IROHA_POSTGRES_PASSWORD=my-secret-password",
+ "KEY=node0",
+ ],
+});
+
+/*
+ * Provides validations for Iroha container's options
+ */
+export const IROHA_TEST_LEDGER_OPTIONS_JOI_SCHEMA: Joi.Schema = Joi.object().keys(
+ {
+ adminPriv: Joi.string().min(1).max(64).required(),
+ adminPub: Joi.string().min(1).max(64).required(),
+ nodePriv: Joi.string().min(1).max(64).required(),
+ nodePub: Joi.string().min(1).max(64).required(),
+ tlsCert: Joi.string().min(1).required(),
+ tlsKey: Joi.string().min(1).required(),
+ toriiTlsPort: Joi.number().port().required(),
+ postgresPort: Joi.number().port().required(),
+ postgresHost: Joi.string().hostname().required(),
+ imageVersion: Joi.string().min(5).required(),
+ imageName: Joi.string().min(1).required(),
+ rpcToriiPort: Joi.number().port().required(),
+ envVars: Joi.array().allow(null).required(),
+ },
+);
+
+export class IrohaTestLedger implements ITestLedger {
+ public readonly imageVersion: string;
+ public readonly imageName: string;
+ public readonly rpcToriiPort: number;
+ public readonly envVars: string[];
+ public readonly emitContainerLogs: boolean;
+ public readonly postgresHost: string;
+ public readonly postgresPort: number;
+ public readonly adminPriv: string;
+ public readonly adminPub: string;
+ public readonly nodePriv: string;
+ public readonly nodePub: string;
+ public readonly tlsCert?: string;
+ public readonly tlsKey?: string;
+ public readonly toriiTlsPort?: number;
+
+ private readonly log: Logger;
+ private container: Container | undefined;
+ private containerId: string | undefined;
+
+ constructor(public readonly options: IIrohaTestLedgerOptions) {
+ const fnTag = `IrohaTestLedger#constructor()`;
+ if (!options) {
+ throw new TypeError(`IrohaTestLedger#ctor options was falsy.`);
+ }
+ Checks.nonBlankString(options.postgresHost, `${fnTag} postgresHost`);
+ Checks.truthy(options.postgresPort, `${fnTag} postgresPort`);
+
+ this.postgresHost = options.postgresHost;
+ this.postgresPort = options.postgresPort;
+ this.adminPriv =
+ options.adminPriv || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.adminPriv;
+ this.adminPub =
+ options.adminPub || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.adminPub;
+ this.nodePriv =
+ options.nodePriv || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.nodePriv;
+ this.nodePub = options.nodePub || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.nodePub;
+
+ this.imageVersion =
+ options.imageVersion || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.imageVersion;
+ this.imageName =
+ options.imageName || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.imageName;
+ this.rpcToriiPort =
+ options.rpcToriiPort || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.rpcToriiPort;
+ this.envVars = options.envVars || [
+ ...IROHA_TEST_LEDGER_DEFAULT_OPTIONS.envVars,
+ ];
+ this.tlsCert = options.tlsCert || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.tlsCert;
+ this.tlsKey = options.tlsKey || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.tlsKey;
+ this.toriiTlsPort =
+ options.toriiTlsPort || IROHA_TEST_LEDGER_DEFAULT_OPTIONS.toriiTlsPort;
+
+ this.envVars.push(`IROHA_POSTGRES_HOST=${this.postgresHost}`);
+ this.envVars.push(`IROHA_POSTGRES_PORT=${this.postgresPort}`);
+ this.envVars.push(`ADMIN_PRIV=${this.adminPriv}`);
+ this.envVars.push(`ADMIN_PUB=${this.adminPub}`);
+ this.envVars.push(`NODE_PRIV=${this.nodePriv}`);
+ this.envVars.push(`NODE_PUB=${this.nodePub}`);
+
+ this.emitContainerLogs = Bools.isBooleanStrict(options.emitContainerLogs)
+ ? (options.emitContainerLogs as boolean)
+ : true;
+
+ this.validateConstructorOptions();
+ const label = "iroha-test-ledger";
+ const level = options.logLevel || "INFO";
+ this.log = LoggerProvider.getOrCreate({ level, label });
+ }
+
+ public getContainer(): Container {
+ const fnTag = "IrohaTestLedger#getContainer()";
+ if (!this.container) {
+ throw new Error(`${fnTag} container not yet started by this instance.`);
+ } else {
+ return this.container;
+ }
+ }
+
+ public get imageFqn(): string {
+ return `${this.imageName}:${this.imageVersion}`;
+ }
+
+ public async getRpcToriiPortHost(): Promise {
+ const ipAddress = "127.0.0.1";
+ const hostPort: number = await this.getRpcToriiPort();
+ return `http://${ipAddress}:${hostPort}`;
+ }
+
+ /**
+ * Output is based on the standard Iroha Torii port number(50051).
+ *
+ * @see https://github.com/hyperledger/iroha/blob/main/example/config.docker
+ */
+ public getDefaultToriiPort(): number {
+ return 50051;
+ }
+
+ /**
+ * Output is based on the standard Iroha genesis.block content.
+ *
+ * @see https://github.com/hyperledger/iroha/blob/main/example/genesis.block
+ */
+ public getInternalAddr(): string {
+ return "127.0.0.1:10001";
+ }
+
+ /**
+ * Output is based on the standard Iroha genesis.block content.
+ *
+ * @see https://github.com/hyperledger/iroha/blob/main/example/genesis.block
+ */
+ public getDefaultAdminAccount(): string {
+ return "admin";
+ }
+
+ /**
+ * Output is based on the standard Iroha genesis.block content.
+ *
+ * @see https://github.com/hyperledger/iroha/blob/main/example/genesis.block
+ */
+ public getDefaultDomain(): string {
+ return "test";
+ }
+
+ /**
+ * Output is based on the standard Iroha admin user public key file location.
+ *
+ * @see https://github.com/hyperledger/iroha/blob/main/example/admin%40test.pub
+ * @see https://github.com/hyperledger/iroha/blob/main/example/genesis.block
+ */
+ public async getGenesisAccountPubKey(): Promise {
+ const fnTag = `IrohaTestLedger#getGenesisAccountPubKey()`;
+ if (!this.container) {
+ throw new Error(`${fnTag} this.container cannot be falsy.`);
+ }
+ const publicKey = await Containers.pullFile(
+ this.container,
+ "/opt/iroha_data/admin@test.pub",
+ );
+ return publicKey;
+ }
+
+ /**
+ * Output is based on the standard Iroha admin user private key file location.
+ *
+ * @see https://github.com/hyperledger/iroha/blob/main/example/admin%40test.priv
+ */
+ public async getGenesisAccountPrivKey(): Promise {
+ const fnTag = `IrohaTestLedger#getGenesisAccountPrivKey()`;
+ if (!this.container) {
+ throw new Error(`${fnTag} this.container cannot be falsy.`);
+ }
+ const privateKey = await Containers.pullFile(
+ this.container,
+ "/opt/iroha_data/admin@test.priv",
+ );
+ return privateKey;
+ }
+
+ /**
+ * Output is based on the standard Iroha node private/public keypair file location.
+ *
+ * @see https://github.com/hyperledger/iroha/blob/main/example/node0.priv
+ * @see https://github.com/hyperledger/iroha/blob/main/example/test%40test.pub
+ */
+ public async getNodeKeyPair(): Promise {
+ const fnTag = `IrohaTestLedger#getNodeKeyPair()`;
+ if (!this.container) {
+ throw new Error(`${fnTag} this.container cannot be falsy.`);
+ }
+ const publicKey = await Containers.pullFile(
+ this.container,
+ "/opt/iroha_data/node0.pub",
+ );
+ const privateKey = await Containers.pullFile(
+ this.container,
+ "/opt/iroha_data/node0.priv",
+ );
+ return { publicKey, privateKey };
+ }
+
+ public async start(omitPull = false): Promise {
+ if (this.container) {
+ await this.container.stop();
+ await this.container.remove();
+ }
+ const docker = new Docker();
+ if (!omitPull) {
+ this.log.debug(`Pulling container image ${this.imageFqn} ...`);
+ await Containers.pullImage(this.imageFqn);
+ this.log.debug(`Pulled ${this.imageFqn} OK. Starting container...`);
+ }
+
+ return new Promise((resolve, reject) => {
+ const admin = this.getDefaultAdminAccount();
+ const domain = this.getDefaultDomain();
+ const adminID = `${admin}@${domain}`;
+ const toriiPort = this.getDefaultToriiPort();
+ const eventEmitter: EventEmitter = docker.run(
+ this.imageFqn,
+ [],
+ [],
+ {
+ ExposedPorts: {
+ [`${this.rpcToriiPort}/tcp`]: {}, // Iroha RPC - Torii
+ },
+ Env: this.envVars,
+ Healthcheck: {
+ //Healthcheck script usage: python3 /healthcheck.py userID toriiPort
+ Test: [
+ "CMD-SHELL",
+ `python3 /healthcheck.py ${adminID} ${toriiPort}`,
+ ],
+ Interval: 1000000000, // 1 second
+ Timeout: 3000000000, // 3 seconds
+ Retries: 299,
+ StartPeriod: 3000000000, // 3 seconds
+ },
+ HostConfig: {
+ PublishAllPorts: true,
+ AutoRemove: true,
+ },
+ },
+ {},
+ (err: unknown) => {
+ if (err) {
+ reject(err);
+ }
+ },
+ );
+
+ eventEmitter.once("start", async (container: Container) => {
+ this.log.debug(`Started container OK. Waiting for healthcheck...`);
+ this.container = container;
+ this.containerId = container.id;
+ if (this.emitContainerLogs) {
+ const logOptions = { follow: true, stderr: true, stdout: true };
+ const logStream = await container.logs(logOptions);
+ logStream.on("data", (data: Buffer) => {
+ this.log.debug(`[${this.imageFqn}] %o`, data.toString("utf-8"));
+ });
+ }
+ try {
+ await this.waitForHealthCheck();
+ this.log.debug(`Healthcheck passing OK.`);
+ resolve(container);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ });
+ }
+
+ public async waitForHealthCheck(timeoutMs = 180000): Promise {
+ const fnTag = "IrohaTestLedger#waitForHealthCheck()";
+ const startedAt = Date.now();
+ let isHealthy = false;
+ do {
+ if (Date.now() >= startedAt + timeoutMs) {
+ throw new Error(`${fnTag} timed out (${timeoutMs}ms)`);
+ }
+ const containerInfo = await this.getContainerInfo();
+ this.log.debug(`ContainerInfo.Status=%o`, containerInfo.Status);
+ this.log.debug(`ContainerInfo.State=%o`, containerInfo.State);
+ isHealthy = containerInfo.Status.endsWith("(healthy)");
+ if (!isHealthy) {
+ await new Promise((resolve2) => setTimeout(resolve2, 1000));
+ }
+ } while (!isHealthy);
+ }
+
+ public stop(): Promise {
+ return Containers.stop(this.container as Container);
+ }
+
+ public destroy(): Promise {
+ const fnTag = "IrohaTestLedger#destroy()";
+ if (this.container) {
+ return this.container.remove();
+ } else {
+ const ex = new Error(`${fnTag} Container not found, nothing to destroy.`);
+ return Promise.reject(ex);
+ }
+ }
+
+ protected async getContainerInfo(): Promise {
+ const docker = new Docker();
+ const image = this.imageFqn;
+ const containerInfos = await docker.listContainers({});
+
+ let aContainerInfo;
+ if (this.containerId !== undefined) {
+ aContainerInfo = containerInfos.find((ci) => ci.Id === this.containerId);
+ }
+
+ if (aContainerInfo) {
+ return aContainerInfo;
+ } else {
+ throw new Error(`IrohaTestLedger#getContainerInfo() no image "${image}"`);
+ }
+ }
+
+ /**
+ * Return the randomly allocated Iroha Torii port number
+ */
+ public async getRpcToriiPort(): Promise {
+ const fnTag = "IrohaTestLedger#getRpcToriiPort()";
+ const aContainerInfo = await this.getContainerInfo();
+ const { rpcToriiPort: thePort } = this;
+ const { Ports: ports } = aContainerInfo;
+
+ if (ports.length < 1) {
+ throw new Error(`${fnTag} no ports exposed or mapped at all`);
+ }
+ const mapping = ports.find((x) => x.PrivatePort === thePort);
+ if (mapping) {
+ if (!mapping.PublicPort) {
+ throw new Error(`${fnTag} port ${thePort} mapped but not public`);
+ } else if (mapping.IP !== "0.0.0.0") {
+ throw new Error(`${fnTag} port ${thePort} mapped to localhost`);
+ } else {
+ return mapping.PublicPort;
+ }
+ } else {
+ throw new Error(`${fnTag} no mapping found for ${thePort}`);
+ }
+ }
+
+ public async getContainerIpAddress(): Promise {
+ const fnTag = "IrohaTestLedger#getContainerIpAddress()";
+ const aContainerInfo = await this.getContainerInfo();
+
+ if (aContainerInfo) {
+ const { NetworkSettings } = aContainerInfo;
+ const networkNames: string[] = Object.keys(NetworkSettings.Networks);
+ if (networkNames.length < 1) {
+ throw new Error(`${fnTag} container not connected to any networks`);
+ } else {
+ // return IP address of container on the first network that we found
+ // it connected to. Make this configurable?
+ return NetworkSettings.Networks[networkNames[0]].IPAddress;
+ }
+ } else {
+ throw new Error(`${fnTag} cannot find image: ${this.imageName}`);
+ }
+ }
+
+ private validateConstructorOptions(): void {
+ const validationResult = Joi.validate(
+ {
+ adminPriv: this.adminPriv,
+ adminPub: this.adminPub,
+ nodePriv: this.nodePriv,
+ nodePub: this.nodePub,
+ tlsCert: this.tlsCert,
+ tlsKey: this.tlsKey,
+ toriiTlsPort: this.toriiTlsPort,
+ postgresHost: this.postgresHost,
+ postgresPort: this.postgresPort,
+ imageVersion: this.imageVersion,
+ imageName: this.imageName,
+ rpcToriiPort: this.rpcToriiPort,
+ envVars: this.envVars,
+ },
+ IROHA_TEST_LEDGER_OPTIONS_JOI_SCHEMA,
+ );
+
+ if (validationResult.error) {
+ throw new Error(
+ `IrohaTestLedger#ctor ${validationResult.error.annotate()}`,
+ );
+ }
+ }
+}
diff --git a/packages/cactus-test-tooling/src/main/typescript/postgres/postgres-test-container.ts b/packages/cactus-test-tooling/src/main/typescript/postgres/postgres-test-container.ts
new file mode 100644
index 00000000000..ae4469beaee
--- /dev/null
+++ b/packages/cactus-test-tooling/src/main/typescript/postgres/postgres-test-container.ts
@@ -0,0 +1,329 @@
+import Docker, { Container, ContainerInfo } from "dockerode";
+import Joi from "joi";
+import tar from "tar-stream";
+import { EventEmitter } from "events";
+import {
+ LogLevelDesc,
+ Logger,
+ LoggerProvider,
+ Bools,
+} from "@hyperledger/cactus-common";
+import { ITestLedger } from "../i-test-ledger";
+import { Streams } from "../common/streams";
+import { Containers } from "../common/containers";
+
+/*
+ * Contains options for Postgres container
+ */
+export interface IPostgresTestContainerConstructorOptions {
+ readonly imageVersion?: string;
+ readonly imageName?: string;
+ readonly postgresPort?: number;
+ readonly envVars?: string[];
+ readonly logLevel?: LogLevelDesc;
+ readonly emitContainerLogs?: boolean;
+}
+
+/*
+ * Provides default options for Postgres container
+ */
+export const POSTGRES_TEST_CONTAINER_DEFAULT_OPTIONS = Object.freeze({
+ imageVersion: "9.5-alpine",
+ imageName: "postgres",
+ postgresPort: 5432,
+ envVars: ["POSTGRES_USER=postgres", "POSTGRES_PASSWORD=my-secret-password"],
+});
+
+/*
+ * Provides validations for Postgres container's options
+ */
+export const POSTGRES_TEST_CONTAINER_OPTIONS_JOI_SCHEMA: Joi.Schema = Joi.object().keys(
+ {
+ imageVersion: Joi.string().min(5).required(),
+ imageName: Joi.string().min(1).required(),
+ postgresPort: Joi.number().min(1024).max(65535).required(),
+ envVars: Joi.array().allow(null).required(),
+ },
+);
+
+export class PostgresTestContainer implements ITestLedger {
+ public readonly imageVersion: string;
+ public readonly imageName: string;
+ public readonly postgresPort: number;
+ public readonly envVars: string[];
+ public readonly emitContainerLogs: boolean;
+
+ private readonly log: Logger;
+ private container: Container | undefined;
+ private containerId: string | undefined;
+
+ constructor(
+ public readonly options: IPostgresTestContainerConstructorOptions = {},
+ ) {
+ if (!options) {
+ throw new TypeError(`PostgresTestContainer#ctor options was falsy.`);
+ }
+ this.imageVersion =
+ options.imageVersion ||
+ POSTGRES_TEST_CONTAINER_DEFAULT_OPTIONS.imageVersion;
+ this.imageName =
+ options.imageName || POSTGRES_TEST_CONTAINER_DEFAULT_OPTIONS.imageName;
+ this.postgresPort =
+ options.postgresPort ||
+ POSTGRES_TEST_CONTAINER_DEFAULT_OPTIONS.postgresPort;
+ this.envVars =
+ options.envVars || POSTGRES_TEST_CONTAINER_DEFAULT_OPTIONS.envVars;
+
+ this.emitContainerLogs = Bools.isBooleanStrict(options.emitContainerLogs)
+ ? (options.emitContainerLogs as boolean)
+ : true;
+
+ this.validateConstructorOptions();
+ const label = "postgres-test-container";
+ const level = options.logLevel || "INFO";
+ this.log = LoggerProvider.getOrCreate({ level, label });
+ }
+
+ public getContainer(): Container {
+ const fnTag = "PostgresTestContainer#getContainer()";
+ if (!this.container) {
+ throw new Error(`${fnTag} container not yet started by this instance.`);
+ } else {
+ return this.container;
+ }
+ }
+
+ public getimageName(): string {
+ return `${this.imageName}:${this.imageVersion}`;
+ }
+
+ public async getPostgresPortHost(): Promise {
+ const ipAddress = "127.0.0.1";
+ const hostPort: number = await this.getPostgresPort();
+ return `http://${ipAddress}:${hostPort}`;
+ }
+
+ public async getFileContents(filePath: string): Promise {
+ const response: any = await this.getContainer().getArchive({
+ path: filePath,
+ });
+ const extract: tar.Extract = tar.extract({ autoDestroy: true });
+
+ return new Promise((resolve, reject) => {
+ let fileContents = "";
+ extract.on("entry", async (header: any, stream, next) => {
+ stream.on("error", (err: Error) => {
+ reject(err);
+ });
+ const chunks: string[] = await Streams.aggregate(stream);
+ fileContents += chunks.join("");
+ stream.resume();
+ next();
+ });
+
+ extract.on("finish", () => {
+ resolve(fileContents);
+ });
+
+ response.pipe(extract);
+ });
+ }
+
+ public async start(): Promise {
+ const imageFqn = this.getimageName();
+
+ if (this.container) {
+ await this.container.stop();
+ await this.container.remove();
+ }
+ const docker = new Docker();
+
+ this.log.debug(`Pulling container image ${imageFqn} ...`);
+ await this.pullContainerImage(imageFqn);
+ this.log.debug(`Pulled ${imageFqn} OK. Starting container...`);
+
+ return new Promise((resolve, reject) => {
+ const eventEmitter: EventEmitter = docker.run(
+ imageFqn,
+ [],
+ [],
+ {
+ Env: this.envVars,
+ Healthcheck: {
+ Test: ["CMD-SHELL", "pg_isready -U postgres"],
+ Interval: 1000000000, // 1 second
+ Timeout: 3000000000, // 3 seconds
+ Retries: 299,
+ StartPeriod: 3000000000, // 3 seconds
+ },
+ HostConfig: {
+ PublishAllPorts: true,
+ AutoRemove: true,
+ },
+ },
+ {},
+ (err: unknown) => {
+ if (err) {
+ reject(err);
+ }
+ },
+ );
+
+ eventEmitter.once("start", async (container: Container) => {
+ this.log.debug(`Started container OK. Waiting for healthcheck...`);
+ this.container = container;
+ this.containerId = container.id;
+ if (this.emitContainerLogs) {
+ const logOptions = { follow: true, stderr: true, stdout: true };
+ const logStream = await container.logs(logOptions);
+ logStream.on("data", (data: Buffer) => {
+ this.log.debug(`[${imageFqn}] %o`, data.toString("utf-8"));
+ });
+ }
+ try {
+ await this.waitForHealthCheck();
+ this.log.debug(`Healthcheck passing OK.`);
+ resolve(container);
+ } catch (ex) {
+ reject(ex);
+ }
+ });
+ });
+ }
+
+ public async waitForHealthCheck(timeoutMs = 180000): Promise {
+ const fnTag = "PostgresTestContainer#waitForHealthCheck()";
+ // const httpUrl = await this.getRpcApiHttpHost();
+ const startedAt = Date.now();
+ let isHealthy = false;
+ do {
+ if (Date.now() >= startedAt + timeoutMs) {
+ throw new Error(`${fnTag} timed out (${timeoutMs}ms)`);
+ }
+ const containerInfo = await this.getContainerInfo();
+ this.log.debug(`ContainerInfo.Status=%o`, containerInfo.Status);
+ this.log.debug(`ContainerInfo.State=%o`, containerInfo.State);
+ isHealthy = containerInfo.Status.endsWith("(healthy)");
+ if (!isHealthy) {
+ await new Promise((resolve2) => setTimeout(resolve2, 1000));
+ }
+ } while (!isHealthy);
+ }
+
+ public stop(): Promise {
+ return Containers.stop(this.container as Container);
+ }
+
+ public destroy(): Promise {
+ const fnTag = "PostgresTestContainer#destroy()";
+ if (this.container) {
+ return this.container.remove();
+ } else {
+ const ex = new Error(`${fnTag} Container not found, nothing to destroy.`);
+ return Promise.reject(ex);
+ }
+ }
+
+ protected async getContainerInfo(): Promise {
+ const docker = new Docker();
+ const image = this.getimageName();
+ const containerInfos = await docker.listContainers({});
+
+ let aContainerInfo;
+ if (this.containerId !== undefined) {
+ aContainerInfo = containerInfos.find((ci) => ci.Id === this.containerId);
+ }
+
+ if (aContainerInfo) {
+ return aContainerInfo;
+ } else {
+ throw new Error(
+ `PostgresTestContainer#getContainerInfo() no image "${image}"`,
+ );
+ }
+ }
+
+ public async getPostgresPort(): Promise {
+ const fnTag = "PostgresTestContainer#getPostgresPort()";
+ const aContainerInfo = await this.getContainerInfo();
+ const { postgresPort: thePort } = this;
+ const { Ports: ports } = aContainerInfo;
+
+ if (ports.length < 1) {
+ throw new Error(`${fnTag} no ports exposed or mapped at all`);
+ }
+ const mapping = ports.find((x) => x.PrivatePort === thePort);
+ if (mapping) {
+ if (!mapping.PublicPort) {
+ throw new Error(`${fnTag} port ${thePort} mapped but not public`);
+ } else if (mapping.IP !== "0.0.0.0") {
+ throw new Error(`${fnTag} port ${thePort} mapped to localhost`);
+ } else {
+ return mapping.PublicPort;
+ }
+ } else {
+ throw new Error(`${fnTag} no mapping found for ${thePort}`);
+ }
+ }
+
+ public async getContainerIpAddress(): Promise {
+ const fnTag = "PostgresTestContainer#getContainerIpAddress()";
+ const aContainerInfo = await this.getContainerInfo();
+
+ if (aContainerInfo) {
+ const { NetworkSettings } = aContainerInfo;
+ const networkNames: string[] = Object.keys(NetworkSettings.Networks);
+ if (networkNames.length < 1) {
+ throw new Error(`${fnTag} container not connected to any networks`);
+ } else {
+ // return IP address of container on the first network that we found
+ // it connected to. Make this configurable?
+ return NetworkSettings.Networks[networkNames[0]].IPAddress;
+ }
+ } else {
+ throw new Error(`${fnTag} cannot find image: ${this.imageName}`);
+ }
+ }
+
+ private pullContainerImage(containerNameAndTag: string): Promise {
+ return new Promise((resolve, reject) => {
+ const docker = new Docker();
+ docker.pull(containerNameAndTag, (pullError: any, stream: any) => {
+ if (pullError) {
+ reject(pullError);
+ } else {
+ docker.modem.followProgress(
+ stream,
+ (progressError: any, output: any[]) => {
+ if (progressError) {
+ reject(progressError);
+ } else {
+ resolve(output);
+ }
+ },
+ );
+ }
+ });
+ });
+ }
+
+ private validateConstructorOptions(): void {
+ const validationResult = Joi.validate<
+ IPostgresTestContainerConstructorOptions
+ >(
+ {
+ imageVersion: this.imageVersion,
+ imageName: this.imageName,
+ postgresPort: this.postgresPort,
+ envVars: this.envVars,
+ },
+ POSTGRES_TEST_CONTAINER_OPTIONS_JOI_SCHEMA,
+ );
+
+ if (validationResult.error) {
+ throw new Error(
+ `PostgresTestContainer#ctor ${validationResult.error.annotate()}`,
+ );
+ }
+ }
+}
diff --git a/packages/cactus-test-tooling/src/main/typescript/public-api.ts b/packages/cactus-test-tooling/src/main/typescript/public-api.ts
index 502ab60d6d6..4370cd2181e 100755
--- a/packages/cactus-test-tooling/src/main/typescript/public-api.ts
+++ b/packages/cactus-test-tooling/src/main/typescript/public-api.ts
@@ -50,6 +50,20 @@ export {
FABRIC_TEST_LEDGER_OPTIONS_JOI_SCHEMA,
} from "./fabric/fabric-test-ledger-v1";
+export {
+ IrohaTestLedger,
+ IIrohaTestLedgerOptions,
+ IROHA_TEST_LEDGER_DEFAULT_OPTIONS,
+ IROHA_TEST_LEDGER_OPTIONS_JOI_SCHEMA,
+} from "./iroha/iroha-test-ledger";
+
+export {
+ PostgresTestContainer,
+ IPostgresTestContainerConstructorOptions,
+ POSTGRES_TEST_CONTAINER_DEFAULT_OPTIONS,
+ POSTGRES_TEST_CONTAINER_OPTIONS_JOI_SCHEMA,
+} from "./postgres/postgres-test-container";
+
export {
CactusKeychainVaultServer,
ICactusKeychainVaultServerOptions,
diff --git a/tsconfig.json b/tsconfig.json
index f76dc8a947a..ab114f4789c 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -49,6 +49,7 @@
{
"path": "./packages/cactus-plugin-ledger-connector-fabric/tsconfig.json"
},
+ { "path": "./packages/cactus-plugin-ledger-connector-iroha/tsconfig.json" },
{
"path": "./packages/cactus-plugin-ledger-connector-quorum/tsconfig.json"
},
diff --git a/webpack.prod.node.js b/webpack.prod.node.js
index a1e8b896900..d1b5c871316 100644
--- a/webpack.prod.node.js
+++ b/webpack.prod.node.js
@@ -69,6 +69,7 @@ module.exports = {
externals: {
"swarm-js": "swarm-js",
"node-ssh": "node-ssh",
+ "grpc": "grpc",
npm: "npm",
"fabric-client": "fabric-client",
"fabric-ca-client": "fabric-ca-client",