diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000000..53c9fabe1b --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,175 @@ +#!groovy + +// Copyright IBM Corp All Rights Reserved +// +// SPDX-License-Identifier: Apache-2.0 +// + +// Jenkinsfile get triggered when a patchset a submitted or merged +// On Verify job, pull fabric, nodeenv, javaenv images from nexus3 and +// run gulp tests. On merge job, pull above images from nexus3 and publish +// npm modules with snapshot tag and API docs after merge job is successful + +@Library("fabric-ci-lib") _ // global shared library from ci-management repository +// global shared library from ci-management repository +// https://github.com/hyperledger/ci-management/tree/master/vars (Global Shared scripts) +timestamps { // set the timestamps on the jenkins console + timeout(40) { // Build timeout set to 40 mins + if(env.NODE_ARCH != "hyp-x") { + node ('hyp-z') { // trigger jobs on s390x builds nodes + env.NODE_VER = "8.14.0" // Set node version + env.GOPATH = "$WORKSPACE/gopath" + env.PATH = "$GOPATH/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:~/npm/bin:/home/jenkins/.nvm/versions/node/v${NODE_VER}/bin:$PATH" + buildStages() // call buildStages + } // End node + } else { + node ('hyp-x') { // trigger jobs on x86_64 builds nodes + def nodeHome = tool 'nodejs-8.14.0' + env.GOPATH = "$WORKSPACE/gopath" + env.PATH = "$GOPATH/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:${nodeHome}/bin:$PATH" + buildStages() // call buildStages + } // end node block + } + } // end timeout block +} // end timestamps block + +def buildStages() { + try { + def ROOTDIR = pwd() // workspace dir (/w/workspace/) + def failure_stage = "none" + // set MARCH value to amd64, s390x, ppc64le + env. MARCH = sh(returnStdout: true, script: "uname -m | sed 's/x86_64/amd64/g'").trim() + stage('Clean Environment') { + // delete working directory + deleteDir() + // Clean build environment before start the build + fabBuildLibrary.cleanupEnv() + // Display jenkins environment details + fabBuildLibrary.envOutput() + } + + stage('Checkout SCM') { + // Get changes from gerrit + fabBuildLibrary.cloneRefSpec('fabric-sdk-node') + // Load properties from ci.properties file + props = fabBuildLibrary.loadProperties() + } + + stage("Build Artifacts") { + dir("$ROOTDIR/$BASE_DIR") { + if(props["IMAGE_SOURCE"] == "build") { + // Set PATH + env.GOROOT = "/opt/go/go" + props["GO_VER"] + ".linux." + "$MARCH" + env.GOPATH = "$GOPATH/bin" + env.PATH = "$GOROOT/bin:$GOPATH/bin:$PATH" + // call buildFabric to clone and build images + fabBuildLibrary.cloneScm('fabric', '$GERRIT_BRANCH') + fabBuildLibrary.fabBuildImages('fabric', 'docker') + // call buildFabric to clone and build images + fabBuildLibrary.cloneScm('fabric-ca', '$GERRIT_BRANCH') + fabBuildLibrary.fabBuildImages('fabric-ca', 'docker') + // Pull Docker Images from nexus3 + fabBuildLibrary.pullDockerImages(props["FAB_BASE_VERSION"], props["FAB_IMAGES_LIST"]) + // Pull Thirdparty Docker Images from hyperledger DockerHub + fabBuildLibrary.pullThirdPartyImages(props["FAB_BASEIMAGE_VERSION"], props["FAB_THIRDPARTY_IMAGES_LIST"]) + } else { + if(env.GERRIT_BRANCH == "master") { + // Pull Docker Images from nexus3 + fabBuildLibrary.pullDockerImages(props["FAB_BASE_VERSION"], props["FAB_IMAGES_LIST"]) + // Pull Thirdparty Docker Images from hyperledger DockerHub + fabBuildLibrary.pullThirdPartyImages(props["FAB_BASEIMAGE_VERSION"], props["FAB_THIRDPARTY_IMAGES_LIST"]) + } + else { + sh 'echo -e "\\033[1m SKIP PULLING IMAGES FROM NEXUS.\\033[0m"' + sh 'echo -e "\\033[1m Let gulp docker-ready pull images from DockerHub\\033[0m"' + } + } + } + } + // Run gulp tests (headless and integration tests) + stage("Headless & Integration Tests") { + wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { + try { + dir("$ROOTDIR/$PROJECT_DIR/scripts/ci_scripts") { + sh './ciScript.sh --sdk_e2e_Tests' + } + } + catch (err) { + failure_stage = "sdk_e2e_Tests" + currentBuild.result = 'FAILURE' + throw err + } + } + } + +// Publish npm modules only from amd64 merge jobs +if ((env.JOB_TYPE == "merge") && (env.MARCH = "amd64")) { + publishNpm() + apiDocs() +} else { + echo "Don't publish npm modules and api docs from VERIFY job" +} + } finally { // post build actions + // Don't fail the build if there is no coverage report file + step([$class: 'CoberturaPublisher', autoUpdateHealth: false, autoUpdateStability: false, + coberturaReportFile: '**/cobertura-coverage.xml', failUnhealthy: false, failUnstable: false, + failNoReports: false, maxNumberOfBuilds: 0, onlyStable: false, sourceEncoding: 'ASCII', + zoomCoverageChart: false]) + // Don't fail the build if there is no log file + archiveArtifacts allowEmptyArchive: true, artifacts: '**/*.log' + // Send notifications only for merge failures + if (env.JOB_TYPE == "merge") { + if (currentBuild.result == 'FAILURE') { + // Send notification to rocketChat channel + // Send merge build failure email notifications to the submitter + sendNotifications(currentBuild.result, props["CHANNEL_NAME"]) + } + } + // Delete containers + fabBuildLibrary.deleteContainers() + // Delete unused docker images (none,dev,test-vp etc..) + fabBuildLibrary.deleteUnusedImages() + } // end finally block +} // end buildStages + +def publishNpm() { + // Publish npm modules after successful merge + stage("Publish npm Modules") { + sh 'echo "-------> Publish npm Modules"' + withCredentials([[$class : 'StringBinding', + credentialsId: 'NPM_LOCAL', + variable : 'NPM_TOKEN']]) { + try { + dir("$ROOTDIR/$PROJECT_DIR/scripts/ci_script") { + sh './ciScript.sh --publish_NpmModules' + } + } catch (err) { + failure_stage = "publish_NpmModules" + currentBuild.result = 'FAILURE' + throw err + } + } + } +} + +def apiDocs() { + // Publish SDK_NODE API docs after successful merge + stage("Publish API Docs") { + sh 'echo "--------> Publish API Docs"' + withCredentials([[$class : 'UsernamePasswordMultiBinding', + credentialsId: 'sdk-node-credentials', + usernameVariable: 'NODE_SDK_USERNAME', + passwordVariable: 'NODE_SDK_PASSWORD']]) { + try { + dir("$ROOTDIR/$PROJECT_DIR/scripts/ci_script") { + sh './ciScript.sh --publish_ApiDocs' + } + } + catch (err) { + failure_stage = "publish_Api_Docs" + currentBuild.result = 'FAILURE' + throw err + } + } + } +} \ No newline at end of file diff --git a/Jenkinsfile.x b/Jenkinsfile.x deleted file mode 100644 index e46b96b818..0000000000 --- a/Jenkinsfile.x +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright IBM Corp All Rights Reserved -// -// SPDX-License-Identifier: Apache-2.0 -// -timeout(40) { -node ('hyp-x') { // trigger build on x86_64 node - timestamps { - try { - def ROOTDIR = pwd() // workspace dir (/w/workspace/) - def nodeHome = tool 'nodejs-8.14.0' - env.VERSION = sh(returnStdout: true, script: 'curl -O https://raw.githubusercontent.com/hyperledger/fabric/master/Makefile && cat Makefile | grep "BASE_VERSION =" | cut -d "=" -f2').trim() - env.VERSION = "$VERSION" // BASE_VERSION from fabric Makefile - env.ARCH = "amd64" - env.IMAGE_TAG = "${ARCH}-${VERSION}-stable" // fabric latest stable version from nexus - env.PROJECT_VERSION = "${VERSION}-stable" - env.PROJECT_DIR = "gopath/src/github.com/hyperledger" - env.GOPATH = "$WORKSPACE/gopath" - env.PATH = "$GOPATH/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:${nodeHome}/bin:$PATH" - def jobname = sh(returnStdout: true, script: 'echo ${JOB_NAME} | grep -q "verify" && echo patchset || echo merge').trim() - def failure_stage = "none" -// delete working directory - deleteDir() - stage("Fetch Patchset") { - try { - if (jobname == "patchset") { - println "$GERRIT_REFSPEC" - println "$GERRIT_BRANCH" - checkout([ - $class: 'GitSCM', - branches: [[name: '$GERRIT_REFSPEC']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'gopath/src/github.com/hyperledger/$PROJECT'], [$class: 'CheckoutOption', timeout: 10]], - userRemoteConfigs: [[credentialsId: 'hyperledger-jobbuilder', name: 'origin', refspec: '$GERRIT_REFSPEC:$GERRIT_REFSPEC', url: '$GIT_BASE']]]) - } else { - // Clone fabric-sdk-node on merge - println "Clone $PROJECT repository" - checkout([ - $class: 'GitSCM', - branches: [[name: 'refs/heads/$GERRIT_BRANCH']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'gopath/src/github.com/hyperledger/$PROJECT']], - userRemoteConfigs: [[credentialsId: 'hyperledger-jobbuilder', name: 'origin', refspec: '+refs/heads/$GERRIT_BRANCH:refs/remotes/origin/$GERRIT_BRANCH', url: '$GIT_BASE']]]) - } - dir("${ROOTDIR}/$PROJECT_DIR/$PROJECT") { - sh ''' - # Print last two commit details - echo - git log -n2 --pretty=oneline --abbrev-commit - echo - ''' - } - } - catch (err) { - failure_stage = "Fetch patchset" - throw err - } - } -// clean environment and get env data - stage("Clean Environment - Get Env Info") { - wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { - try { - dir("${ROOTDIR}/$PROJECT_DIR/fabric-sdk-node/scripts/Jenkins_Scripts") { - sh './CI_Script.sh --clean_Environment --env_Info' - } - } - catch (err) { - failure_stage = "Clean Environment - Get Env Info" - throw err - } - } - } - -// Pull fabric,fabric-ca and Javaenv - stage("Pull Docker Images") { - wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { - try { - dir("${ROOTDIR}/$PROJECT_DIR/fabric-sdk-node/scripts/Jenkins_Scripts") { - sh './CI_Script.sh --pull_Docker_Images' - } - } - catch (err) { - failure_stage = "Pull docker images" - currentBuild.result = 'FAILURE' - throw err - } - } - } - -// Run gulp tests (headless and integration tests) - stage("Headless & Integration Tests") { - wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { - try { - dir("${ROOTDIR}/$PROJECT_DIR/fabric-sdk-node/scripts/Jenkins_Scripts") { - // Get the testFabricVersion and thirdpartyVersion from package.json - // and Pull the DockerImages from dockerhub and run the Integration Tests - sh './CI_Script.sh --sdk_E2e_Tests' - } - } - catch (err) { - failure_stage = "sdk_E2e_Tests" - currentBuild.result = 'FAILURE' - throw err - } - } - } - -// Publish npm modules from merged job -if (env.JOB_NAME == "fabric-sdk-node-merge-x86_64") { - publishNpm() -} else { - echo "------> Don't publish npm modules from VERIFY job" - } - -// Publish API Docs from merged job only -if (env.JOB_NAME == "fabric-sdk-node-merge-x86_64") { - apiDocs() -} else { - echo "------> Don't publish API Docs from VERIFY job" - } - - } finally { // Code for coverage report - step([$class: 'CoberturaPublisher', autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: '**/cobertura-coverage.xml', failUnhealthy: false, failUnstable: false, failNoReports: false, maxNumberOfBuilds: 0, onlyStable: false, sourceEncoding: 'ASCII', zoomCoverageChart: false]) - archiveArtifacts allowEmptyArchive: true, artifacts: '**/*.log' - if (env.JOB_NAME == "fabric-sdk-node-merge-x86_64") { - if (currentBuild.result == 'FAILURE') { // Other values: SUCCESS, UNSTABLE - // Sends merge failure notifications to Jenkins-robot RocketChat Channel - rocketSend message: "Build Notification - STATUS: *${currentBuild.result}* - BRANCH: *${env.GERRIT_BRANCH}* - PROJECT: *${env.PROJECT}* - BUILD_URL: (<${env.BUILD_URL}|Open>)" - } - } - } // finally block - } // timestamps block -} // node block block -} // timeout block - -def publishNpm() { -// Publish npm modules after successful merge - stage("Publish npm Modules") { - sh 'echo "-------> Publish npm Modules"' - def ROOTDIR = pwd() - withCredentials([[$class : 'StringBinding', - credentialsId: 'NPM_LOCAL', - variable : 'NPM_TOKEN']]) { - try { - dir("${ROOTDIR}/$PROJECT_DIR/fabric-sdk-node/scripts/Jenkins_Scripts") { - sh './CI_Script.sh --publish_NpmModules' - } - } - catch (err) { - failure_stage = "publish_NpmModules" - currentBuild.result = 'FAILURE' - throw err - } - } - } -} - -def apiDocs() { -// Publish SDK_NODE API docs after successful merge - stage("Publish API Docs") { - sh 'echo "--------> Publish API Docs"' - def ROOTDIR = pwd() - withCredentials([[$class : 'UsernamePasswordMultiBinding', - credentialsId: 'sdk-node-credentials', - usernameVariable: 'NODE_SDK_USERNAME', - passwordVariable: 'NODE_SDK_PASSWORD']]) { - try { - dir("${ROOTDIR}/$PROJECT_DIR/fabric-sdk-node/scripts/Jenkins_Scripts") { - sh './CI_Script.sh --publish_ApiDocs' - } - } - catch (err) { - failure_stage = "publish_Api_Docs" - currentBuild.result = 'FAILURE' - throw err - } - } - } -} diff --git a/Jenkinsfile.z b/Jenkinsfile.z deleted file mode 100644 index 48e7f433ca..0000000000 --- a/Jenkinsfile.z +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright IBM Corp All Rights Reserved -// -// SPDX-License-Identifier: Apache-2.0 -// -timeout(40) { -node('hyp-z') { - timestamps { - try { - - def ROOTDIR = pwd() // workspace dir (/w/workspace/ - env.NODE_VER = "8.14.0" - // Fetch the BASE_VERSION from fabric master branch to pull images from nexus - env.VERSION = sh(returnStdout: true, script: 'curl -O https://raw.githubusercontent.com/hyperledger/fabric/master/Makefile && cat Makefile | grep "BASE_VERSION =" | cut -d "=" -f2').trim() - env.VERSION = "$VERSION" // BASE_VERSION from fabric Makefile - env.ARCH = "s390x" // `uname -m` - env.IMAGE_TAG = "${ARCH}-${VERSION}-stable" // fabric latest stable version from nexus3 - env.PROJECT_VERSION = "${VERSION}-stable" - env.PROJECT_DIR = "gopath/src/github.com/hyperledger" - env.GOPATH = "$WORKSPACE/gopath" - env.PATH = "$GOPATH/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:~/npm/bin:/home/jenkins/.nvm/versions/node/v${NODE_VER}/bin:$PATH" - def jobname = sh(returnStdout: true, script: 'echo ${JOB_NAME} | grep -q "verify" && echo patchset || echo merge').trim() - def failure_stage = "none" -// delete working directory - deleteDir() - stage("Fetch Patchset") { // fetch gerrit refspec on latest commit - cleanWs() - try { - if (jobname == "patchset") { - println "$GERRIT_REFSPEC" - println "$GERRIT_BRANCH" - checkout([ - $class: 'GitSCM', - branches: [[name: '$GERRIT_REFSPEC']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'gopath/src/github.com/hyperledger/$PROJECT'], [$class: 'CheckoutOption', timeout: 10]], - userRemoteConfigs: [[credentialsId: 'hyperledger-jobbuilder', name: 'origin', refspec: '$GERRIT_REFSPEC:$GERRIT_REFSPEC', url: '$GIT_BASE']]]) - } else { - // Clone fabric-sdk-node on merge - println "Clone $PROJECT repository" - checkout([ - $class: 'GitSCM', - branches: [[name: 'refs/heads/$GERRIT_BRANCH']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'gopath/src/github.com/hyperledger/$PROJECT']], - userRemoteConfigs: [[credentialsId: 'hyperledger-jobbuilder', name: 'origin', refspec: '+refs/heads/$GERRIT_BRANCH:refs/remotes/origin/$GERRIT_BRANCH', url: '$GIT_BASE']]]) - } - dir("${ROOTDIR}/$PROJECT_DIR/$PROJECT") { - sh ''' - # Print last two commit details - echo - git log -n2 --pretty=oneline --abbrev-commit - echo - ''' - } - } - catch (err) { - failure_stage = "Fetch patchset" - throw err - } - } -// clean environment and get env data - stage("Clean Environment - Get Env Info") { - wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { - try { - dir("${ROOTDIR}/$PROJECT_DIR/fabric-sdk-node/scripts/Jenkins_Scripts") { - sh './CI_Script.sh --clean_Environment --env_Info' - } - } - catch (err) { - failure_stage = "Clean Environment - Get Env Info" - currentBuild.result = 'FAILURE' - throw err - } - } - } - -// Pull fabric, fabric-ca Docker Images - stage("Pull Docker Images") { - wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { - try { - dir("${ROOTDIR}/$PROJECT_DIR/fabric-sdk-node/scripts/Jenkins_Scripts") { - sh './CI_Script.sh --pull_Docker_Images' - } - } - catch (err) { - failure_stage = "Pull docker images" - currentBuild.result = 'FAILURE' - throw err - } - } - } - -// Run gulp tests (headless and integration tests) - stage("Headless & IntegrationTests") { - wrap([$class: 'AnsiColorBuildWrapper', 'colorMapName': 'xterm']) { - try { - dir("${ROOTDIR}/$PROJECT_DIR/fabric-sdk-node/scripts/Jenkins_Scripts") { - sh './CI_Script.sh --sdk_E2e_Tests' - } - } - catch (err) { - failure_stage = "sdk_E2e_Tests" - currentBuild.result = 'FAILURE' - throw err - } - } - } - } finally { // log artifacts - archiveArtifacts allowEmptyArchive: true, artifacts: '**/*.log' - if (env.JOB_NAME == "fabric-sdk-node-merge-s390x") { - if (currentBuild.result == 'FAILURE') { // Other values: SUCCESS, UNSTABLE - rocketSend message: "Build Notification - STATUS: *${currentBuild.result}* - BRANCH: *${env.GERRIT_BRANCH}* - PROJECT: *${env.PROJECT}* - BUILD_URL: (<${env.BUILD_URL}|Open>)" - } - } - } // finally - } // timestamps -} // node -} // timeout diff --git a/README.md b/README.md index 5476eec911..9c1a788065 100644 --- a/README.md +++ b/README.md @@ -105,6 +105,10 @@ HFC defines the following abstract classes for application developers to supply 3. If the user application uses an alternative membership service than the one provided by the component `fabric-ca`, the client code will likely need to use an alternative client to `fabric-ca-client` to interact with that membership service. +### Continuous Integration + +Please have a look at [Continuous Integration Process](docs/sdk-node-ci.md) + ### Contributing Check [the documentation](./CONTRIBUTING.md) on how to contribute to this project for the full details. diff --git a/build/tasks/test.js b/build/tasks/test.js index 3a0f4dc82a..5decc0b802 100644 --- a/build/tasks/test.js +++ b/build/tasks/test.js @@ -100,7 +100,8 @@ gulp.task('docker-clean', shell.task([ // clean up all the containers created by docker-compose 'docker-compose -f test/fixtures/docker-compose/docker-compose-tls-level-db.yaml -p node down', - 'docker-compose -f test/fixtures/docker-compose/docker-compose-tls.yaml -p node down' + 'docker-compose -f test/fixtures/docker-compose/docker-compose-tls.yaml -p node down', + 'docker ps -a' ], { verbose: true, // so we can see the docker command output ignoreErrors: true // kill, rm, and rmi may fail because the containers may have been cleaned up or not exist @@ -108,7 +109,8 @@ gulp.task('docker-clean', shell.task([ gulp.task('docker-ready', ['docker-clean'], shell.task([ // make sure that necessary containers are up by docker-compose - 'docker-compose -f test/fixtures/docker-compose/docker-compose-tls-level-db.yaml -p node up -d' + 'docker-compose -f test/fixtures/docker-compose/docker-compose-tls-level-db.yaml -p node up -d && sleep 15', + 'docker ps -a' ])); gulp.task('lint', ['eslint', 'tslint']); diff --git a/ci.properties b/ci.properties new file mode 100644 index 0000000000..618d9379db --- /dev/null +++ b/ci.properties @@ -0,0 +1,15 @@ +# Set nexus if you would like to pull images from nexus3, Choose this for master branch +IMAGE_SOURCE=nexus +# Pull below list of images from nexus3 if IMAGE_SOURCE set to nexus +# Set "nodeenv" and "javaenv" if you set IMAGE_SOURCE to build +FAB_IMAGES_LIST=ca peer orderer ccenv baseos nodeenv javaenv +# Set base version from related fabric branch +FAB_BASE_VERSION=2.0.0 +# Set base image version from related fabric branch +FAB_BASEIMAGE_VERSION=0.4.15 +# Pull below list of images from Hyperledger DockerHub +FAB_THIRDPARTY_IMAGES_LIST=couchdb +# Set related rocketChat channel name. Default: jenkins-robot +CHANNEL_NAME=jenkins-robot +# Set compaitable go version +GO_VER=1.11.5 diff --git a/docs/images/sdk-node-pipeline-flow.png b/docs/images/sdk-node-pipeline-flow.png new file mode 100644 index 0000000000..e9cd9a2ce8 Binary files /dev/null and b/docs/images/sdk-node-pipeline-flow.png differ diff --git a/docs/sdk-node-ci.md b/docs/sdk-node-ci.md new file mode 100644 index 0000000000..205c88318f --- /dev/null +++ b/docs/sdk-node-ci.md @@ -0,0 +1,126 @@ +# Continuous Integration Process + +This document explains the fabric-sdk-node Jenkins pipeline flow and FAQ's on the build process to +help developer to get more femilarize with the process flow. + +To manage CI jobs, we use [JJB](https://docs.openstack.org/infra/jenkins-job-builder). Please see +the pipeline job configuration template here https://ci-docs.readthedocs.io/en/latest/source/pipeline_jobs.html#job-templates. + +## CI Pipeline flow + +![](sdk-node-pipeline-flow.png) + +- Every Gerrit patchset triggers a verify job and run the below tests from the `Jenkinsfile` + + - gulp + - gulp ca + - gulp test + - gulp test-logging + - gulp run-test-scenario targets to run sdk-node tests. + +All the above tests run on the Hyperledger infarstructure x86_64 build nodes. All these nodes uses +the packer with pre-configured software packages. This helps us to run the tests in much faster than +installing required packages everytime. + +As we trigger `fabric-sdk-node-verify-x86_64` pipeline jobs for every gerrit patchset, we execute +the pipeline stages in the below order. + +**VERIFY FLOW** + + CleanEnvironment -- OutputEnvironment -- Checkout SCM -- Build Artifacts -- Headless & Integration Tests + +and below is the series of stages for the merge job flow. (`fabric-sdk-node-merge-x86_64`) + + **MERGE FLOW** + + CleanEnvironment -- OutputEnvironment -- Checkout SCM -- Build Artifacts -- Headless & Integration Tests -- Publish NPM modules -- Publish API Docs + +- After cleanEnvironment and Display the environment details on the Jenkins console, CI scripts + fetches the Gerrit refspec and try to execute **Headless and Integration Tests**. `docker-ready` + is a sub target which will try to pull master latest stable images from Hyperledger DockerHub. + Once the tests are executed successfully, it checks the condition whether it is a verify or merge. + If it is a merge job, Jenkins triggers the **Publish npm modules** and **API Docs** stages and publishes + the npm modules and api docs to gh-pages. + + Note: CI script provides an option to build the images on the latest fabric commit and run the + sdk-node tests. For this you have to modify **IMAGE_SOURCE** to **build** in the ci.properties file. + If you would like to pull the images from nexus, update **IMAGE_SOURCE** to **nexus**. + Though we pull the images from nexus with this change, in release branches the gulp file pulls the + images from dockerhub. So till we change the build process in the gulp file, let's pull these images + from docker hub. + +- Snapshot npm modules can be seen here. https://www.npmjs.com/package/fabric-client, https://www.npmjs.com/package/fabric-ca-client etc.. + +- API docs can be accessible from https://fabric-sdk-node.github.io/master/index.html + +- Jenkins sends build notifications only on the merge failure job. Jenkins sends build notifications + to RocketChat `jenkins-robot` channel and an email to the owner of the patchset. If you would like to + send build notifications to someother channel, simply change the channel name in the ci.properties file. + +See below **FAQ's** to contribute to CI changes. + +## FAQ's + +#### Supported platforms + +- x86_64 +- s390x (Not for every patchset but run tests in daily builds) + +#### Trigger failed jobs through gerrit comments + +Developers can re-trigger the failed verify jobs by post **reverify** as a comment phrase to the gerrit +change set that retriggers all the verify jobs. To do so, follow the below process: + +Step 1: Open the gerrit patch set for which you want to reverify the build + +Step 2: Click on Reply, then type **reverify** and click on post + +This kicks off all the fabric-sdk-node verify jobs. Once the build is triggered, you can observe the +Jenkins console output, if you are interested in viewing the log messages to determine how well the +build jobs are progressing. + +Developer can post below comments to trigger the particular failed build: + + reverify-x or reverify - to restart the build on sdk-node-verify x86_64 platform. + remerge-x or remerge - to restart the build on sdk-node-verify x86_64 platform. + +#### Where to see the output of the stages? + +Piepline supports two views (stages and blueocean). Staged views shows on the Jenkins job +main page and it shows each stage in order and the status. For better view, we suggest you to +access the BlueOcean plugin. Click on the JOB Number and click on the **Open Blue Ocean** link +that shows the build stages in pipeline view. Also, we capture the `.logs files` and keep them +on the Job console. + +#### How to add more stages to this pipeline flow? + +We use scripted pipeline syntax with groovy and shell scripts. Also, we use global shared library +scripts which are placed in https://github.com/hyperledger/ci-management/tree/master/vars. Try to +leverage the common functions in your code. All you have to do is, undestand the pipeline flow of +the tests, add one more stage as mentioned in the existing Jenkinsfile. + +#### What steps I have to modify when I create a new branch from master? + +As the Jenkinsfile is parametrized, you no need to modify anything in the Jenkinsfile but you may endup modifying ci.properties file with the Base Versions, Baseimage versions, GO_VER etc... as per the new branch configuration. + +#### Build Scripts + +Multiple build scripts are used in fabric-sdk-node CI flow. We use global shared library scripts +and Jenkinsfile. + +Global Shared Library - https://github.com/hyperledger/ci-management/tree/master/vars + +Jenkinsfile - https://github.com/hyperledger/fabric-sdk-node/tree/master/Jenkinsfile + +ci.properties - https://github.com/hyperledger/fabric-sdk-node/tree/master/ci.properties +(ci.properties is the only file you have to modify with the values requried for the specific branch.) + +Packer Scripts - https://github.com/hyperledger/ci-management/blob/master/packer/provision/docker.sh +(Packer is a tool for automatically creating VM and container images, configuring them and +post-processing them into standard output formats. We build Hyperledger's CI images via Packer +and attach them to x86_64 build nodes. On s390x, we install manually. See the packages we install +as a pre-requisite in the CI x86 build nodes.) + +#### How to reach out to CI team? + +Post your questions or feedback in https://chat.hyperledger.org/channel/ci-pipeline or https://chat.hyperledger.org/channel/fabric-ci Rocket Chat channels. Also, we suggest you to create a task/bug in JIRA under FABCI project. https://jira.hyperledger.org/projects/FABCI \ No newline at end of file diff --git a/scripts/Jenkins_Scripts/CI_Script.sh b/scripts/Jenkins_Scripts/CI_Script.sh deleted file mode 100755 index 06d804a9ba..0000000000 --- a/scripts/Jenkins_Scripts/CI_Script.sh +++ /dev/null @@ -1,226 +0,0 @@ -#!/bin/bash -e -# -# Copyright IBM Corp All Rights Reserved -# -# SPDX-License-Identifier: Apache-2.0 -# - -export CONTAINER_LIST=(orderer peer0.org1 peer0.org2) -export NEXUS_URL=nexus3.hyperledger.org:10001 -export ORG_NAME="hyperledger/fabric" - -# error check -err_Check() { - - echo -e "\033[31m $1" "\033[0m" - docker images | grep hyperledger && docker ps -a - - # Write orderer, peer logs - for CONTAINER in ${CONTAINER_LIST[*]}; do - docker logs $CONTAINER.example.com >& $CONTAINER.log - done - - # Write ca logs into ca_peerOrg1.log - docker logs ca_peerOrg1 >& ca_peerOrg1.log - # Write ca logs into ca_peerOrg2.log - docker logs ca_peerOrg2 >& ca_peerOrg2.log - # Write couchdb container logs into couchdb.log file - docker logs couchdb >& couchdb.log - - # Copy debug log - cp ${WORKSPACE}/gopath/src/github.com/hyperledger/fabric-sdk-node/test/temp/*.log $WORKSPACE || true - clean_Environment - exit 1 -} - -Parse_Arguments() { - while [ $# -gt 0 ]; do - case $1 in - --env_Info) - env_Info - ;; - --pull_Docker_Images) - pull_Docker_Images - ;; - --clean_Environment) - clean_Environment - ;; - --sdk_E2e_Tests) - sdk_E2e_Tests - ;; - --publish_NpmModules) - publish_NpmModules - ;; - --publish_ApiDocs) - publish_ApiDocs - ;; - esac - shift - done -} - -clean_Environment() { - -echo "-----------> Clean Docker Containers & Images, unused/lefover build artifacts" -function clearContainers () { - CONTAINER_IDS=$(docker ps -aq) - if [ -z "$CONTAINER_IDS" ] || [ "$CONTAINER_IDS" = " " ]; then - echo "---- No containers available for deletion ----" - else - docker rm -f $CONTAINER_IDS || true - fi -} - -function removeUnwantedImages() { - DOCKER_IMAGES_SNAPSHOTS=$(docker images | grep snapshot | grep -v grep | awk '{print $1":" $2}') - - if [ -z "$DOCKER_IMAGES_SNAPSHOTS" ] || [ "$DOCKER_IMAGES_SNAPSHOTS" = " " ]; then - echo "---- No snapshot images available for deletion ----" - else - docker rmi -f $DOCKER_IMAGES_SNAPSHOTS || true - fi - DOCKER_IMAGE_IDS=$(docker images | grep -v 'couchdb\|kafka\|zookeeper\|cello' | awk '{print $3}') - - if [ -z "$DOCKER_IMAGE_IDS" ] || [ "$DOCKER_IMAGE_IDS" = " " ]; then - echo "---- No images available for deletion ----" - else - docker rmi -f $DOCKER_IMAGE_IDS || true - fi -} - -# Delete nvm prefix & then delete nvm -rm -rf $HOME/.node-gyp/ $HOME/.npm/ $HOME/.npmrc || true - -# remove tmp/hfc and hfc-key-store data -rm -rf /home/jenkins/npm /tmp/fabric-shim /tmp/hfc* /tmp/npm* /home/jenkins/kvsTemp /home/jenkins/.hfc-key-store /tmp/fabric-binaries - -rm -rf /var/hyperledger/* - -clearContainers -removeUnwantedImages -} - -env_Info() { - # This function prints system info - - #### Build Env INFO - echo -e "\033[32m -----------> Build Env INFO" "\033[0m" - # Output all information about the Jenkins environment - uname -a - cat /etc/*-release - env - gcc --version - docker version - docker info - docker-compose version - pgrep -a docker - docker ps - docker images -} - -# pull fabric, fabric-ca images from nexus -pull_Docker_Images() { - for IMAGES in peer orderer ca baseos ccenv javaenv nodeenv; do - if [ $IMAGES == "javaenv" ]; then - if [ $ARCH == "s390x" ]; then - # Do not pull javaenv if OS_VER == s390x - echo "\033[32m -----------> skipping pull of javaenv image on s390x" "\033[0m" - else - # Pull javaenv at same level as node SDK - echo "\033[32m -----------> pull $ORG_NAME-$IMAGES:${IMAGE_TAG} image" "\033[0m" - echo - docker pull $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} > /dev/null 2>&1 - if [ $? -ne 0 ]; then - echo -e "\033[31m FAILED to pull docker images" "\033[0m" - exit 1 - fi - docker tag $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} $ORG_NAME-$IMAGES - docker tag $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} $ORG_NAME-$IMAGES:${ARCH}-${VERSION} - docker tag $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} $ORG_NAME-$IMAGES:${VERSION} - docker rmi -f $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} - fi - else - echo "-----------> pull $IMAGES image" - echo - docker pull $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} > /dev/null 2>&1 - if [ $? -ne 0 ]; then - echo -e "\033[31m FAILED to pull docker images" "\033[0m" - exit 1 - fi - docker tag $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} $ORG_NAME-$IMAGES - docker tag $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} $ORG_NAME-$IMAGES:${ARCH}-${VERSION} - docker tag $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} $ORG_NAME-$IMAGES:${VERSION} - docker rmi -f $NEXUS_URL/$ORG_NAME-$IMAGES:${IMAGE_TAG} - fi - done - echo - docker images | grep hyperledger/fabric -} -# Install NPM -install_Npm() { - -echo "-------> ARCH:" $ARCH -if [[ $ARCH == "s390x" || $ARCH == "ppc64le" ]]; then - # Source nvmrc.sh - source /etc/profile.d/nvmrc.sh - echo "------> Install NodeJS" - # Install NODE_VER - echo "------> Use $NODE_VER" - nvm install $NODE_VER - nvm use --delete-prefix v$NODE_VER --silent - npm install || err_Check "ERROR!!! npm install failed" - npm config set prefix ~/npm && npm install -g gulp && npm install -g istanbul - - echo -e "\033[32m npm version ------> $(npm -v)" "\033[0m" - echo -e "\033[32m node version ------> $(node -v)" "\033[0m" - -else - echo -e "\033[32m npm version ------> $(npm -v)" "\033[0m" - echo -e "\033[32m node version ------> $(node -v)" "\033[0m" - - npm install || err_Check "ERROR!!! npm install failed" - npm install -g gulp && npm install -g istanbul -fi -} - -# run sdk e2e tests -sdk_E2e_Tests() { - - cd ${WORKSPACE}/gopath/src/github.com/hyperledger/fabric-sdk-node - - # Install NPM before start the tests - install_Npm - - # Generate crypto material before running the tests - if [ $ARCH == "s390x" ]; then - # Run the s390x gulp task - gulp install-and-generate-certs-s390 || err_Check "ERROR!!! gulp install and generation of test certificates failed" - else - # Run the amd64 gulp task - gulp install-and-generate-certs || err_Check "ERROR!!! gulp install and generation of test certificates failed" - fi - - echo -e "\033[32m Execute Headless and Integration Tests" "\033[0m" - gulp test || err_Check "ERROR!!! gulp test failed" - - echo -e "\033[32m Execute logging test only" "\033[0m" - gulp test-logging || err_Check "ERROR!!! gulp test failed" - - echo -e "\033[32m Execute cucumber tests" "\033[0m" - gulp run-test-scenario || err_Check "ERROR!!! gulp test failed" -} - -# Publish npm modules after successful merge on amd64 -publish_NpmModules() { - echo - echo -e "\033[32m -----------> Publish npm modules from amd64" "\033[0m" - ./Publish_NPM_Modules.sh -} - -# Publish NODE_SDK API docs after successful merge on amd64 -publish_ApiDocs() { - echo - echo -e "\033[32m -----------> Publish NODE_SDK API docs after successful merge on amd64" "\033[0m" - ./Publish_API_Docs.sh -} -Parse_Arguments $@ diff --git a/scripts/check_license.sh b/scripts/check_license.sh index 9a7f95f6ed..6fd3bc94f9 100755 --- a/scripts/check_license.sh +++ b/scripts/check_license.sh @@ -32,6 +32,7 @@ function filterExcludedFiles { | grep -v "\.jar$" \ | grep -v "\.csr$" \ | grep -v "\.proto$" \ + | grep -v "\ci.properties$" \ | sort -u` } diff --git a/scripts/ci_scripts/ciScript.sh b/scripts/ci_scripts/ciScript.sh new file mode 100755 index 0000000000..151b5b3dd8 --- /dev/null +++ b/scripts/ci_scripts/ciScript.sh @@ -0,0 +1,140 @@ +#!/bin/bash -e +# +# Copyright IBM Corp All Rights Reserved +# +# SPDX-License-Identifier: Apache-2.0 +# + +export CONTAINER_LIST=(orderer peer0.org1 peer0.org2 ca0 ca1) + +# error check +err_Check() { + + echo -e "\033[31m $1" "\033[0m" + docker images | grep hyperledger && docker ps -a + + # Write orderer, peer & ca logs + for CONTAINER in ${CONTAINER_LIST[*]}; do + docker logs $CONTAINER.example.com >& $CONTAINER.log + done + + # Write couchdb container logs into couchdb.log file + docker logs couchdb >& couchdb.log + + # Copy debug log + cp ${WORKSPACE}/gopath/src/github.com/hyperledger/fabric-sdk-node/test/temp/*.log $WORKSPACE + exit 1 +} + +Parse_Arguments() { + while [ $# -gt 0 ]; do + case $1 in + --sdk_e2e_Tests) + sdk_e2e_Tests + ;; + --publish_NpmModules) + publish_NpmModules + ;; + --publish_ApiDocs) + publish_ApiDocs + ;; + *) + echo "Wrong function called" + exit 1 + ;; + esac + shift + done +} + +# Install npm +install_Npm() { + echo "-------> MARCH:" $MARCH + if [[ $MARCH == "s390x" || $MARCH == "ppc64le" ]]; then + set -x + # Source nvmrc.sh + source /etc/profile.d/nvmrc.sh + # Delete any existing prefix + npm config delete prefix + # Install NODE_VER + echo "------> Use $NODE_VER" + nvm install $NODE_VER || true + nvm use --delete-prefix v$NODE_VER --silent + npm install || err_Check "ERROR!!! npm install failed" + npm config set prefix ~/npm && npm install -g gulp && npm install -g istanbul + + echo -e "\033[32m npm version ------> $(npm -v)" "\033[0m" + echo -e "\033[32m node version ------> $(node -v)" "\033[0m" + set +x + else + echo -e "\033[32m npm version ------> $(npm -v)" "\033[0m" + echo -e "\033[32m node version ------> $(node -v)" "\033[0m" + set -x + npm install || err_Check "ERROR!!! npm install failed" + npm install -g gulp && npm install -g istanbul + set +x + fi +} + +# run sdk e2e tests +sdk_e2e_Tests() { + + cd ${WORKSPACE}/gopath/src/github.com/hyperledger/fabric-sdk-node + + # Install npm before start the tests + install_Npm + + # Generate crypto material before running the tests + if [ $ARCH == "s390x" ]; then + set -x + # Run the s390x gulp task + gulp install-and-generate-certs-s390 || err_Check "ERROR!!! gulp install and generation of test certificates failed" + set +x + else + set -x + # Run the amd64 gulp task + gulp install-and-generate-certs || err_Check "ERROR!!! gulp install and generation of test certificates failed" + set +x + fi + + echo " ########################" + echo -e "\033[1m R U N g u l p T E S T S \033[0m" + echo " ####################### " + + echo -e "\033[32m Execute Headless and Integration Tests" "\033[0m" + set -x + gulp test || err_Check "ERROR!!! gulp test failed" + set +x + + echo " ############################# " + echo -e "\033[1m R U N test-logging T E S T S \033[0m" + echo " ############################# " + + set -x + gulp test-logging || err_Check "ERROR!!! gulp test-logging failed" + set +x + + echo " ################################## " + echo -e "\033[1m R U N run-test-scenario T E S T S \033[0m" + echo " ################################## " + + echo -e "\033[32m Execute cucumber tests" "\033[0m" + set -x + gulp run-test-scenario || err_Check "ERROR!!! gulp run-test-scenario failed" + set +x +} + +# Publish npm modules after successful merge on amd64 +publish_NpmModules() { + echo + echo -e "\033[32m -----------> Publish npm modules from amd64" "\033[0m" + ./publishNpmModules.sh +} + +# Publish NODE_SDK API docs after successful merge on amd64 +publish_ApiDocs() { + echo + echo -e "\033[32m -----------> Publish NODE_SDK API docs after successful merge on amd64" "\033[0m" + ./publishApiDocs.sh +} +Parse_Arguments $@ diff --git a/scripts/Jenkins_Scripts/Publish_API_Docs.sh b/scripts/ci_scripts/publishApiDocs.sh similarity index 71% rename from scripts/Jenkins_Scripts/Publish_API_Docs.sh rename to scripts/ci_scripts/publishApiDocs.sh index 5207addcf2..769ba794dd 100755 --- a/scripts/Jenkins_Scripts/Publish_API_Docs.sh +++ b/scripts/ci_scripts/publishApiDocs.sh @@ -24,4 +24,11 @@ git commit -m "SDK commit - $SDK_COMMIT" # Credentials are stored as Global Variables in Jenkins git config remote.gh-pages.url https://$NODE_SDK_USERNAME:$NODE_SDK_PASSWORD@github.com/$NODE_SDK_USERNAME/$TARGET_REPO # Push API docs to target repository + +echo " ____ _ _ ____ _ _ _ ____ ___ ____ ___ ____ ____ " +echo "| _ \| | | / ___|| | | | / \ | _ \_ _| | _ \ / _ \ / ___/ ___| " +echo "| |_) | | | \___ \| |_| | / _ \ | |_) | | | | | | | | | | \___ \ " +echo "| __/| |_| |___) | _ | / ___ \| __/| | | |_| | |_| | |___ ___) | " +echo "|_| \___/|____/|_| |_| /_/ \_\_| |___| |____/ \___/ \____|____/ " + git push gh-pages master diff --git a/scripts/Jenkins_Scripts/Publish_NPM_Modules.sh b/scripts/ci_scripts/publishNpmModules.sh similarity index 55% rename from scripts/Jenkins_Scripts/Publish_NPM_Modules.sh rename to scripts/ci_scripts/publishNpmModules.sh index 5ea13450d4..3737f438af 100755 --- a/scripts/Jenkins_Scripts/Publish_NPM_Modules.sh +++ b/scripts/ci_scripts/publishNpmModules.sh @@ -5,14 +5,14 @@ # SPDX-License-Identifier: Apache-2.0 # -MODULES="fabric-protos fabric-common fabric-ca-client fabric-client fabric-network" +nodeModules="fabric-protos fabric-common fabric-ca-client fabric-client fabric-network" npmPublish() { - - if [[ "$CURRENT_TAG" = *"skip"* ]]; then - echo -e "\033[34m----> Don't publish $1 npm modules on skip tag \033[0m" - elif [[ "$CURRENT_TAG" = *"unstable"* ]]; then + if [[ "$CURRENT_TAG" = *"skip"* ]]; then + echo -e "\033[34m----> Don't publish $1 npm modules on skip tag \033[0m" + elif [[ "$CURRENT_TAG" = *"unstable"* ]]; then echo + # Get the current unstable version of a module from npm registry UNSTABLE_VER=$(npm dist-tags ls "$1" | awk "/$CURRENT_TAG"":"/'{ ver=$NF rel=$NF @@ -26,14 +26,11 @@ npmPublish() { # Get last digit of the unstable version built above UNSTABLE_INCREMENT=$(echo $UNSTABLE_VER| rev | cut -d '.' -f 1 | rev) fi - echo -e "\033[32m======> UNSTABLE_INCREMENT:" $UNSTABLE_INCREMENT "\033[0m" - # Append last digit with the package.json version export UNSTABLE_INCREMENT_VERSION=$RELEASE_VERSION.$UNSTABLE_INCREMENT echo -e "\033[32m======> UNSTABLE_INCREMENT_VERSION:" $UNSTABLE_INCREMENT_VERSION "\033[0" - - for module in ${MODULES}; do + for module in ${nodeModules}; do sed -i "s/\"${module}\": \".*\"/\"${module}\": \"${CURRENT_TAG}\"/" package.json done @@ -44,55 +41,54 @@ npmPublish() { # Publish unstable versions to npm registry npm publish --tag $CURRENT_TAG if [ $? != 0 ]; then - echo -e "\033[31m FAILED to Publish $CURRENT_TAG of $1 npm module" "\033[0m" - exit 1 + echo -e "\033[31m FAILED to publish $CURRENT_TAG of $1 npm module" "\033[0m" + exit 1 fi echo -e "\033[32m ========> PUBLISHED $CURRENT_TAG tag of $1 npm module SUCCESSFULLY" "\033[0m" - else - # Publish node modules on latest tag - echo -e "\033[32m ========> PUBLISH $RELEASE_VERSION" "\033[0m" - - for module in ${MODULES}; do - sed -i "s/\"${module}\": \".*\"/\"${module}\": \"${CURRENT_TAG}\"/" package.json - done - - npm publish --tag $CURRENT_TAG + else + # Publish node modules on latest tag + echo -e "\033[32m ========> PUBLISH $RELEASE_VERSION" "\033[0m" + for module in ${nodeModules}; do + sed -i "s/\"${module}\": \".*\"/\"${module}\": \"${CURRENT_TAG}\"/" package.json + done - if [ $? != 0 ]; then - echo -e "\033[31m FAILED TO PUBLISH $CURRENT_TAG of $1 npm module" "\033[0m" - exit 1 - fi - echo -e "\033[32m ========> PUBLISHED $CURRENT_TAG tag of $1 npm module SUCCESSFULLY" "\033[0m" + npm publish --tag $CURRENT_TAG + if [ $? != 0 ]; then + echo -e "\033[31m FAILED TO PUBLISH $CURRENT_TAG of $1 npm module" "\033[0m" + exit 1 + fi + echo -e "\033[32m ========> PUBLISHED $CURRENT_TAG tag of $1 npm module SUCCESSFULLY" "\033[0m" fi } versions() { - # Get the unstable tag from package.json + # Get the unstable tag from package.json CURRENT_TAG=$(grep '"tag":' package.json | cut -d\" -f4) echo -e "\033[32m ======> Current TAG: $CURRENT_TAG" "\033[0m" - # Get the version from package.json RELEASE_VERSION=$(grep '"version":' package.json | cut -d\" -f4) echo -e "\033[32m ======> Current Version: $RELEASE_VERSION" "\033[0m" } -############ -# START HERE -############ +echo " ____ _ _ ____ _ ___ ____ _ _ _ _ ____ __ __" +echo "| _ \| | | | __ )| | |_ _/ ___|| | | | | \ | | _ \| \/ |" +echo "| |_) | | | | _ \| | | |\___ \| |_| | | \| | |_) | |\/| |" +echo "| __/| |_| | |_) | |___ | | ___) | _ | | |\ | __/| | | |" +echo "|_| \___/|____/|_____|___|____/|_| |_| |_| \_|_| |_| |_|" -echo -e "\033[34m----------> START PUBLISHING FROM HERE" "\033[0m" cd $WORKSPACE/gopath/src/github.com/hyperledger/fabric-sdk-node # Set NPM_TOKEN from CI configuration +# Please post in #ci-pipeline channel if you observe npm_token issue npm config set //registry.npmjs.org/:_authToken=$NPM_TOKEN -# Add or delete modules from here.. -for module in ${MODULES}; do - if [ -d "$module" ]; then - echo -e "\033[32m Publishing $module" "\033[0m" - cd $module - versions - npmPublish $module - cd - - fi +# Publish node modules +for module in ${nodeModules}; do + if [ -d "$module" ]; then + echo -e "\033[32m Publishing $module" "\033[0m" + cd $module + versions + npmPublish $module + cd - + fi done