diff --git a/.github/workflows/backend-tests.yml b/.github/workflows/backend-tests.yml index 8312a062..96394519 100644 --- a/.github/workflows/backend-tests.yml +++ b/.github/workflows/backend-tests.yml @@ -18,7 +18,7 @@ jobs: with: node-version: ${{ matrix.node-version }} # kmap.import-action.start-db # - - run: node build/start-database + - run: node build/start-database -d - run: npm install -g yarn # kmap.import-action.cache # diff --git a/.github/workflows/frontend-tests.yml b/.github/workflows/frontend-tests.yml index def5062c..19683f12 100644 --- a/.github/workflows/frontend-tests.yml +++ b/.github/workflows/frontend-tests.yml @@ -76,7 +76,7 @@ jobs: run: echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p # kmap.import-action.start-db # - - run: node build/start-database + - run: node build/start-database -d - run: yarn install --frozen-lockfile --prefer-offline --network-timeout 1000000 diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 00000000..0515e617 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,54 @@ +# syntax=docker/dockerfile:1 +# This implements a multi-stage container build process + +# First build the project +FROM node:14-alpine AS dev +RUN apk update && apk add --no-cache python g++ make bash + +# Set environment variables +ENV DOCKER=DOCKER + +# Set our working directory +WORKDIR /usr/src/kmap/backend + +# Install our dependencies +COPY package*.json ./ +COPY yarn.lock* ./ +COPY scripts scripts/ +RUN chmod +x scripts/backend-prebuild.sh +RUN yarn install + +# Build the project +COPY . . +RUN yarn run build + +# Now build the container that is used in production +FROM node:14.17-alpine as prod + +# Install bash +RUN apk update && apk add --no-cache bash + +# Set environment variables +ARG NODE_ENV=production +ENV NODE_ENV=${NODE_ENV} +ENV DOCKER=DOCKER + +# Set our working directory +WORKDIR /usr/src/kmap/backend + +# Install our dependencies (production only) +COPY package*.json ./ +COPY yarn.lock* ./ +COPY scripts scripts/ +RUN chmod +x scripts/backend-prebuild.sh +RUN yarn install --production + +# Copy the dist folder from the dev container +COPY --from=dev /usr/src/kmap/backend/dist dist + +# Copy the docker specific scripts and files +COPY docker docker + +# Start +RUN chmod +x docker/run.sh +CMD ["docker/run.sh"] diff --git a/backend/docker/run.sh b/backend/docker/run.sh new file mode 100644 index 00000000..95b1a48c --- /dev/null +++ b/backend/docker/run.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +# If the host points to localhost (or is not set), we need have to replace this +# with the address of the docker host +if [[ -z ${NEO4J_HOST} || ${NEO4J_HOST} == localhost || ${NEO4J_HOST} == 127.0.0.1 || ${NEO4J_HOST} == ::1 ]]; then + echo "Rewrite neo4j host to docker host, as localhost was specified" + export NEO4J_HOST=$(ip route show | awk '/default/ {print $3}'); +fi + +if [[ -z ${NEO4J_PORT} ]]; then + echo "Using default neo4j port 7687" + export NEO4J_PORT=7687; +fi + +# TODO +export NEO4J_SCHEME=neo4j; + +echo "Starting backend..." +node dist/src/main.js diff --git a/backend/package.json b/backend/package.json index 93100a9b..a725cff2 100644 --- a/backend/package.json +++ b/backend/package.json @@ -7,11 +7,11 @@ "license": "MIT", "scripts": { "postinstall": "run-script-os", - "postinstall:windows": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ..\\build\\backend-prebuild.ps1", - "postinstall:default": "../build/backend-prebuild.sh", + "postinstall:windows": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command .\\scripts\\backend-prebuild.ps1", + "postinstall:default": "scripts/backend-prebuild.sh", "prebuild": "run-script-os", - "prebuild:windows": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ..\\build\\backend-prebuild.ps1", - "prebuild:default": "../build/backend-prebuild.sh", + "prebuild:windows": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command .\\scripts\\backend-prebuild.ps1", + "prebuild:default": "scripts/backend-prebuild.sh", "build": "nest build", "format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"", "start": "nest start", diff --git a/build/backend-prebuild.ps1 b/backend/scripts/backend-prebuild.ps1 similarity index 90% rename from build/backend-prebuild.ps1 rename to backend/scripts/backend-prebuild.ps1 index 3f74ece2..83f237ff 100644 --- a/build/backend-prebuild.ps1 +++ b/backend/scripts/backend-prebuild.ps1 @@ -1,5 +1,5 @@ # Goto the backend dir -cd ../backend +cd $PSScriptRoot\.. # Copy shared files New-Item -ItemType Directory -Force -Path ".\src\shared" diff --git a/backend/scripts/backend-prebuild.sh b/backend/scripts/backend-prebuild.sh new file mode 100755 index 00000000..0cbfea55 --- /dev/null +++ b/backend/scripts/backend-prebuild.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +# Please make sure that this file is executable if you are working in a windows environment +# This can be done by running "git update-index --chmod=+x .\backend-prebuild.sh" in the build directory + +# Goto the backend dir +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "$SCRIPT_DIR"/.. + +# Only execute if not running inside a docker container +if [ "$DOCKER" != "DOCKER" ]; then + # Copy shared files + cp -a ../shared/src/. ./src/shared/ + + # Execute linter + rimraf dist + yarn lint +fi diff --git a/backend/src/config/neo4j/createNeo4jDriver.ts b/backend/src/config/neo4j/createNeo4jDriver.ts index 06cafa25..420295c0 100644 --- a/backend/src/config/neo4j/createNeo4jDriver.ts +++ b/backend/src/config/neo4j/createNeo4jDriver.ts @@ -6,22 +6,57 @@ import { Driver, } from 'neo4j-driver'; -/** - * Adapted from @nest-neo4j's createDriver - * @param config - * @param neo4jConfig - */ -export const createNeo4jDriver = async ( +function delay(timeout: number): Promise { + if (timeout <= 0) { + return Promise.resolve(); + } + + return new Promise((resolve) => { + setTimeout(() => { + resolve(); + }, timeout); + }); +} + +async function tryConnect(driver: Driver): Promise { + let session; + try { + await driver.verifyConnectivity(); + session = driver.session(); + await session.run('MATCH (n) RETURN n LIMIT 1'); + return true; + } catch (e) { + return false; + } finally { + await session?.close(); + } +} + +async function createDriverWithRetry( config: Neo4jConfig, neo4jConfig: OriginalNeo4jConfig -): Promise => { +): Promise { const driver = neo4jDriver( `${config.scheme}://${config.host}:${config.port}`, auth.basic(config.username, config.password), neo4jConfig ); - await driver.verifyConnectivity(); + // eslint-disable-next-line no-await-in-loop + while (!(await tryConnect(driver))) { + const timeout = 5000; + // eslint-disable-next-line no-console + console.log(`Connection Error: Retrying in ${timeout}ms`); + // eslint-disable-next-line no-await-in-loop + await delay(timeout); + } return driver; -}; +} + +/** + * Adapted from @nest-neo4j's createDriver + * @param config + * @param neo4jConfig + */ +export const createNeo4jDriver = createDriverWithRetry; diff --git a/build/backend-prebuild.sh b/build/backend-prebuild.sh deleted file mode 100755 index 3bc3e8a9..00000000 --- a/build/backend-prebuild.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -# Please make sure that this file is executable if you are working in a windows environment -# This can be done by running "git update-index --chmod=+x .\backend-prebuild.sh" in the build directory - -# Goto the backend dir -cd ../backend - -# Copy shared files -cp -a ../shared/src/. ./src/shared/ - -# Execute linter -rimraf dist -yarn lint diff --git a/build/frontend-prebuild.sh b/build/frontend-prebuild.sh deleted file mode 100755 index 124779c6..00000000 --- a/build/frontend-prebuild.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -# Please make sure that this file is executable if you are working in a windows environment -# This can be done by running "git update-index --chmod=+x .\frontend-prebuild.sh" in the build directory - -# Goto the frontend dir -cd ../frontend - -# Copy shared files -cp -a ../shared/src/. ./src/shared/ - -# Execute linter -yarn lint diff --git a/build/parse-args.js b/build/parse-args.js new file mode 100644 index 00000000..2bee6b29 --- /dev/null +++ b/build/parse-args.js @@ -0,0 +1,66 @@ +const debug = false; + +function camelize(str) { + return str + .split('-') + .map( + (item, index) => index > 0 ? item.charAt(0).toUpperCase() + item.slice(1).toLowerCase() : item.toLowerCase() + ).join(""); +} + +function logFound(arg, key, value) { + if (debug) { + console.log("Found argument '" + arg + "'. Writing property '" + key + "' with value '" + value + "' to args object."); + } +} + +const parseArgs = function(input, flags, args) { + const result = {}; + + for (let i = 0; i < input.length; i++) { + const key = input[i]; + let argProcessed = false; + + if (flags) { + for (const flag of flags) { + if ('--' + flag.name === key || (typeof flag.abbr === 'string' && '-' + flag.abbr === key)) { + + const camelizedName = camelize(flag.name); + + logFound(flag.name, camelizedName, 'true'); + + result[camelizedName] = true; + argProcessed = true; + } + } + } + + if (!argProcessed && args) { + for (const arg of args) { + if ('--' + arg.name === key) { + i++; + + if (i >= input.length) { + throw new Error('No value present for command-line argument:' + key); + } + + const value = input[i]; + const camelizedName = camelize(arg.name); + + logFound(arg.name, camelizedName, value); + + result[camelizedName] = value; + argProcessed = true; + } + } + } + + if (!argProcessed) { + throw new Error('Unknown command-line argument: ' + key); + } + } + + return result; +} + +exports.parseArgs = parseArgs; diff --git a/build/start-database.js b/build/start-database.js index b0125d36..dff3156e 100644 --- a/build/start-database.js +++ b/build/start-database.js @@ -1,40 +1,99 @@ -/* -Starts the database docker container with the testing dump. - */ - const path = require("path"); const { execSync } = require("child_process"); +const { parseArgs } = require("./parse-args"); const containerName="neo4j-db" const repoPath = path.join(__dirname, ".."); -const mountPath = path.join(repoPath, "backend", "docker", "mount"); -const pluginsPath = path.join(repoPath, "backend", "docker", "plugins"); +const mountPath = path.join(repoPath, "database", "mount"); +const pluginsPath = path.join(repoPath, "database", "plugins"); -async function main() { - try { - execSync(`docker container stop ${containerName}`) - } catch(e) {} +const flags = [ + { + name: 'detached', + abbr: 'd' + } +]; + +const args = [ + { name: 'mount-path' }, + { name: 'plugin-path' }, + { name: 'db-password' }, + { name: 'container-name' }, + { name: 'dump' }, +]; + +function buildCommand(args) { + let command = `docker run ${args.detached ? '-d' : ''} -p 7474:7474 -p 7687:7687`; + command += ` -v ${args.mountPath}:/mnt/amos`; + command += ` -v ${args.pluginsPath}:/plugins`; + command += ` --env NEO4J_ACCEPT_LICENSE_AGREEMENT=yes`; + command += ` --env DB_PASSWORD=${args.dbPassword}`; + command += ` --env DB_PATH=/mnt/amos/dumps/${args.dump}.dump`; + command += ` --env 'NEO4JLABS_PLUGINS=["apoc", "graph-data-science"]'`; + command += ` --env 'NEO4J_dbms_security_procedures_unrestricted=apoc.*,gds.*'`; + command += ` --env 'NEO4J_dbms_security_procedures_allowlist=apoc.*,gds.*'`; + command += ` --env NEO4J_apoc_import_file_enabled=true`; + command += ` --name ${args.containerName} neo4j:4.2-enterprise`; + command += ` /mnt/amos/load-dump.sh`; + + return command; +} + +function postProcessArgs(args) { + const result = { ...args }; + + if (result.mountPath === undefined) { + result.mountPath = mountPath; + } + + if (result.pluginsPath === undefined) { + result.pluginsPath = pluginsPath; + } + + if (result.dbPassword === undefined) { + result.dbPassword = 'amos'; + } + + if (result.containerName === undefined) { + result.containerName = containerName; + } + + if (result.dump === undefined) { + result.dump = 'testing-dump'; + } + + return result; +} + +function execCMDAndCatchErrors(cmd) { + const isWin = process.platform === "win32"; + let execOptions = { stdio: 'inherit' }; + + if (isWin) { + execOptions.shell = 'powershell.exe'; + } try { - execSync(`docker container rm ${containerName}`) - } catch(e) {} - - const command = ` - docker run -d -p 7474:7474 -p 7687:7687 \\ - -v ${mountPath}:/mnt/amos \\ - -v ${pluginsPath}:/plugins \\ - --env NEO4J_ACCEPT_LICENSE_AGREEMENT=yes \\ - --env DB_PASSWORD=amos \\ - --env DB_PATH=/mnt/amos/dumps/testing-dump.dump \\ - --env 'NEO4JLABS_PLUGINS=["apoc", "graph-data-science"]' \\ - --env 'NEO4J_dbms_security_procedures_unrestricted=apoc.*,gds.*' \\ - --env 'NEO4J_dbms_security_procedures_allowlist=apoc.*,gds.*' \\ - --env NEO4J_apoc_import_file_enabled=true \\ - --name ${containerName} neo4j:4.2-enterprise \\ - /mnt/amos/load-dump.sh -` - - execSync(command); + execSync(cmd, execOptions) + } catch(e) { + console.log("Error executing command."); + console.log(e); + } +} + +/* + * Starts the database docker container with the testing dump. + */ +async function main() { + const input = process.argv.slice(2); + const parsedArgs = parseArgs(input, flags, args); + const postProcessedArgs = postProcessArgs(parsedArgs); + + execCMDAndCatchErrors(`docker container stop ${postProcessedArgs.containerName}`); + execCMDAndCatchErrors(`docker container rm ${postProcessedArgs.containerName}`); + + const command = buildCommand(postProcessedArgs); + execCMDAndCatchErrors(command); } main(); diff --git a/build/workflows/actions/start-db.yml b/build/workflows/actions/start-db.yml index b0459a8e..db5ecfd3 100644 --- a/build/workflows/actions/start-db.yml +++ b/build/workflows/actions/start-db.yml @@ -1 +1 @@ -- run: node build/start-database +- run: node build/start-database -d diff --git a/backend/docker/.gitignore b/database/.gitignore similarity index 100% rename from backend/docker/.gitignore rename to database/.gitignore diff --git a/backend/docker/README.md b/database/README.md similarity index 100% rename from backend/docker/README.md rename to database/README.md diff --git a/backend/docker/mount/dumps/northwind.dump b/database/mount/dumps/northwind.dump similarity index 100% rename from backend/docker/mount/dumps/northwind.dump rename to database/mount/dumps/northwind.dump diff --git a/backend/docker/mount/dumps/testing-dump-big.dump b/database/mount/dumps/testing-dump-big.dump similarity index 100% rename from backend/docker/mount/dumps/testing-dump-big.dump rename to database/mount/dumps/testing-dump-big.dump diff --git a/backend/docker/mount/dumps/testing-dump.dump b/database/mount/dumps/testing-dump.dump similarity index 100% rename from backend/docker/mount/dumps/testing-dump.dump rename to database/mount/dumps/testing-dump.dump diff --git a/backend/docker/mount/load-dump.sh b/database/mount/load-dump.sh similarity index 100% rename from backend/docker/mount/load-dump.sh rename to database/mount/load-dump.sh diff --git a/backend/docker/northwind-ER-diagram.png b/database/northwind-ER-diagram.png similarity index 100% rename from backend/docker/northwind-ER-diagram.png rename to database/northwind-ER-diagram.png diff --git a/backend/docker/northwind-graph.png b/database/northwind-graph.png similarity index 100% rename from backend/docker/northwind-graph.png rename to database/northwind-graph.png diff --git a/deploy.ps1 b/deploy.ps1 new file mode 100644 index 00000000..9edf8905 --- /dev/null +++ b/deploy.ps1 @@ -0,0 +1,35 @@ +function CopyFilesToFolder ($fromFolder, $toFolder) { + xcopy /S /I /Q /Y /F $fromFolder $toFolder +} + +# Deploy the backend to a docker container +# First copy the shared files, as this is not done during the build process when running in docker +New-Item -ItemType Directory -Force -Path "./backend/src/shared" | Out-Null +xcopy /S /I /Q /Y /F ".\shared\src" ".\backend\src\shared" | Out-Null + +# Now build the container as spec'ed by the backend dockerfile +docker build -t kmap.backend ./backend + +# Deploy the frontend to a docker container that runs nginx +# First copy the shared files, as this is not done during the build process when running in docker +New-Item -ItemType Directory -Force -Path "./frontend/src/shared" | Out-Null +xcopy /S /I /Q /Y /F ".\shared\src" ".\frontend\src\shared" | Out-Null + +# Remove the .env file, if it is present. +if (Test-Path .\frontend\.env) { + Remove-Item .\frontend\.env +} + +# Now build the container as spec'ed by the backend dockerfile +docker build -t kmap.frontend ./frontend + +# Compose the output folder +if (Test-Path .\artifacts ) { + Remove-Item -Recurse -Force .\artifacts +} +New-Item -ItemType Directory -Force -Path .\artifacts | Out-Null +CopyFilesToFolder ".\deploy" ".\artifacts" +CopyFilesToFolder ".\database" ".\artifacts\database" +New-Item -ItemType Directory -Force -Path .\artifacts\images | Out-Null +docker save -o .\artifacts\images\kmap.backend kmap.backend +docker save -o .\artifacts\images\kmap.frontend kmap.frontend \ No newline at end of file diff --git a/deploy/backend.env b/deploy/backend.env new file mode 100644 index 00000000..e3eb906e --- /dev/null +++ b/deploy/backend.env @@ -0,0 +1,5 @@ +NEO4J_HOST=neo4j-db +NEO4J_PORT=7687 +NEO4J_USERNAME=neo4j +NEO4J_PASSWORD=amos +NEO4J_DATABASE=neo4j diff --git a/deploy/database.env b/deploy/database.env new file mode 100644 index 00000000..9675afa2 --- /dev/null +++ b/deploy/database.env @@ -0,0 +1,7 @@ +NEO4J_ACCEPT_LICENSE_AGREEMENT=yes +DB_PASSWORD=amos +DB_PATH=/mnt/amos/dumps/northwind.dump +NEO4JLABS_PLUGINS=["apoc", "graph-data-science"] +NEO4J_dbms_security_procedures_unrestricted=apoc.*,gds.* +NEO4J_dbms_security_procedures_allowlist=apoc.*,gds.* +NEO4J_apoc_import_file_enabled=true diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml new file mode 100644 index 00000000..c1d1c368 --- /dev/null +++ b/deploy/docker-compose.yml @@ -0,0 +1,35 @@ +version: "3.9" +services: + neo4j-db: + image: neo4j:4.2-enterprise + restart: always + command: /mnt/amos/load-dump.sh + expose: + - "7687" + working_dir: /mnt/amos + volumes: + - ./database/mount:/mnt/amos + env_file: + - ./database.env + networks: + - kmap-network + kmap.backend: + image: kmap.backend + restart: always + expose: + - "8080" + env_file: + - ./backend.env + networks: + - kmap-network + kmap.frontend: + image: kmap.frontend + restart: always + ports: + - 80:80 + env_file: + - ./frontend.env + networks: + - kmap-network +networks: + kmap-network: diff --git a/deploy/frontend.env b/deploy/frontend.env new file mode 100644 index 00000000..e69de29b diff --git a/deploy/kmap.sh b/deploy/kmap.sh new file mode 100644 index 00000000..967f7acc --- /dev/null +++ b/deploy/kmap.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +docker-compose down + +docker load -i ./images/kmap.frontend +docker load -i ./images/kmap.backend + +docker-compose up -d diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 00000000..57b21cb9 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,45 @@ +# syntax=docker/dockerfile:1 +# This implements a multi-stage container build process + +# First build the project +FROM node:14-alpine AS dev +RUN apk update && apk add --no-cache python g++ make bash + +# Set environment variables +ENV DOCKER=DOCKER + +# Set our working directory +WORKDIR /usr/src/kmap/frontend + +# Install our dependencies +COPY package*.json ./ +COPY yarn.lock* ./ +COPY scripts scripts/ +RUN chmod +x scripts/frontend-prebuild.sh +RUN yarn install + +# Build the project +COPY . . +RUN yarn run build + +# Now build the container that is used in production +FROM nginx:1.20.1-alpine as prod + +# Install bash +RUN apk update && apk add --no-cache bash + +# Set our working directory +WORKDIR /usr/src/kmap/frontend + +# Copy the build folder from the dev container +COPY --from=dev /usr/src/kmap/frontend/build dist + +# Copy the docker specific scripts and files +COPY docker docker + +# Copy the nginx configuration +RUN mv docker/nginx.conf /etc/nginx/nginx.conf + +# Start +RUN chmod +x docker/run.sh +CMD ["docker/run.sh"] diff --git a/frontend/cypress.json b/frontend/cypress.json index 73fff938..f0fab91a 100644 --- a/frontend/cypress.json +++ b/frontend/cypress.json @@ -1,5 +1,6 @@ { "video": false, "experimentalFetchPolyfill": true, - "componentFolder": "cypress/unit" + "componentFolder": "cypress/unit", + "baseUrl": "http://localhost:3000" } diff --git a/frontend/cypress/integration/exploration/exploration.e2e.js b/frontend/cypress/integration/exploration/exploration.e2e.js index 038295dd..0412dd41 100644 --- a/frontend/cypress/integration/exploration/exploration.e2e.js +++ b/frontend/cypress/integration/exploration/exploration.e2e.js @@ -3,6 +3,7 @@ import layoutsData from '../../../src/exploration/previews/layoutsData'; context('Exploration', () => { // Global setup beforeEach(() => { + cy.wait(5000); cy.visit('http://localhost:3000/exploration'); }); @@ -33,6 +34,12 @@ context('Exploration', () => { cy.url().should('eq', `http://localhost:3000${layoutsData.H.path}`); }); + it('routes to chord page', () => { + cy.get('.Previews'); + cy.get('.LayoutPreview').contains('Chord Diagram').click(); + cy.url().should('eq', `http://localhost:3000${layoutsData.P.path}`); + }); + // TODO: add more tests when combined with questions }); }); diff --git a/frontend/cypress/integration/visualization/tabs.e2e.js b/frontend/cypress/integration/visualization/tabs.e2e.js index c3e8b500..c055a1af 100644 --- a/frontend/cypress/integration/visualization/tabs.e2e.js +++ b/frontend/cypress/integration/visualization/tabs.e2e.js @@ -29,4 +29,9 @@ context('Visualization Tabs', () => { cy.get('.MuiTabs-root').contains('Schema').click(); cy.get('main').contains('Schema'); }); + + it('has chord tab', () => { + cy.get('.MuiTabs-root').contains('Chord Diagram').click(); + cy.get('main').get('.svg-container'); + }); }); diff --git a/frontend/docker/nginx.conf b/frontend/docker/nginx.conf new file mode 100644 index 00000000..47a02601 --- /dev/null +++ b/frontend/docker/nginx.conf @@ -0,0 +1,25 @@ +error_log logs/error.log; +pid logs/nginx.pid; + +events { + worker_connections 1024; +} + +http { + server { + # IPv4 + listen *:80; + # IPv6 + listen [::]:80; + + location /api { + proxy_set_header Host $host; + proxy_pass http://kmap.backend:8080; + } + + location / { + root /usr/src/kmap/frontend/dist; + try_files $uri /index.html; + } + } +} diff --git a/frontend/docker/run.sh b/frontend/docker/run.sh new file mode 100644 index 00000000..b12ddf6c --- /dev/null +++ b/frontend/docker/run.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +mkdir /etc/nginx/logs +touch /etc/nginx/logs/error.log + +echo "Starting nginx..." +nginx -g 'daemon off;'; diff --git a/frontend/package.json b/frontend/package.json index 3b1f7b4f..530e1c67 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -7,11 +7,11 @@ "start": "react-scripts start", "start:forTests": "react-scripts -r @cypress/instrument-cra start", "postinstall": "run-script-os", - "postinstall:windows": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ..\\build\\frontend-prebuild.ps1", - "postinstall:default": "../build/frontend-prebuild.sh", + "postinstall:windows": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command .\\scripts\\frontend-prebuild.ps1", + "postinstall:default": "./scripts/frontend-prebuild.sh", "prebuild": "run-script-os", - "prebuild:windows": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command ..\\build\\frontend-prebuild.ps1", - "prebuild:default": "../build/frontend-prebuild.sh", + "prebuild:windows": "@powershell -NoProfile -ExecutionPolicy Unrestricted -Command .\\scripts\\frontend-prebuild.ps1", + "prebuild:default": "./scripts/frontend-prebuild.sh", "build": "react-scripts build", "test": "npm run cy:run:e2e && npm run cy:run:unit", "eject": "react-scripts eject", @@ -46,6 +46,7 @@ "inversify": "^5.1.1", "lru_map": "^0.4.1", "react": "^17.0.2", + "react-chord-diagram": "^1.7.0", "react-dom": "^17.0.2", "react-graph-vis": "^1.0.7", "react-router-dom": "^5.2.0", diff --git a/frontend/public/exploration-preview/chord-diagram.png b/frontend/public/exploration-preview/chord-diagram.png new file mode 100644 index 00000000..f1f3a85e Binary files /dev/null and b/frontend/public/exploration-preview/chord-diagram.png differ diff --git a/build/frontend-prebuild.ps1 b/frontend/scripts/frontend-prebuild.ps1 similarity index 89% rename from build/frontend-prebuild.ps1 rename to frontend/scripts/frontend-prebuild.ps1 index 680f19c8..cba9949e 100644 --- a/build/frontend-prebuild.ps1 +++ b/frontend/scripts/frontend-prebuild.ps1 @@ -1,5 +1,5 @@ # Goto the frontend dir -cd ../frontend +cd $PSScriptRoot\.. # Copy shared files New-Item -ItemType Directory -Force -Path ".\src\shared" diff --git a/frontend/scripts/frontend-prebuild.sh b/frontend/scripts/frontend-prebuild.sh new file mode 100755 index 00000000..9a91364e --- /dev/null +++ b/frontend/scripts/frontend-prebuild.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +# Please make sure that this file is executable if you are working in a windows environment +# This can be done by running "git update-index --chmod=+x .\frontend-prebuild.sh" in the build directory + +# Goto the frontend dir +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +cd "$SCRIPT_DIR"/.. + +# Only execute if not running inside a docker container +if [ "$DOCKER" != "DOCKER" ]; then + + # Copy shared files + cp -a ../shared/src/. ./src/shared/ + + # Execute linter + yarn lint + +fi diff --git a/frontend/src/@types/react-chord-diagram.d.ts b/frontend/src/@types/react-chord-diagram.d.ts new file mode 100644 index 00000000..af5fb9a9 --- /dev/null +++ b/frontend/src/@types/react-chord-diagram.d.ts @@ -0,0 +1 @@ +declare module 'react-chord-diagram'; diff --git a/frontend/src/exploration/previews/layoutsData.ts b/frontend/src/exploration/previews/layoutsData.ts index b07c52a7..f5dc57d2 100644 --- a/frontend/src/exploration/previews/layoutsData.ts +++ b/frontend/src/exploration/previews/layoutsData.ts @@ -36,9 +36,9 @@ const layoutsData: Record = { path: '/visualization/schema', // TODO #299: Add missing visualisation preview links }, P: { - filename: 'empty-layout.png', - description: 'Pie Chart', - path: '/visualization/schema', // TODO #299: Add missing visualisation preview links + filename: 'chord-diagram.png', + description: 'Chord Diagram', + path: '/visualization/chord', }, }; diff --git a/frontend/src/routing/routes.tsx b/frontend/src/routing/routes.tsx index 8c547f55..39595788 100644 --- a/frontend/src/routing/routes.tsx +++ b/frontend/src/routing/routes.tsx @@ -6,6 +6,7 @@ import Schema from '../visualization/Schema'; import Visualization from '../visualization/Visualization'; import RouteDefinition from './RouteDefinition'; import GraphPage from '../visualization/GraphPage'; +import ChordPage from '../visualization/ChordPage'; const routes: Record = { Home: { @@ -34,6 +35,11 @@ const routes: Record = { label: 'Hierarchies', content: () => , }, + { + path: '/visualization/chord', + label: 'Chord Diagram', + content: () => , + }, ], }, Exploration: { diff --git a/frontend/src/services/schema/SchemaService.ts b/frontend/src/services/schema/SchemaService.ts index c426e83a..ca150792 100644 --- a/frontend/src/services/schema/SchemaService.ts +++ b/frontend/src/services/schema/SchemaService.ts @@ -2,7 +2,11 @@ import { injectable } from 'inversify'; import 'reflect-metadata'; -import { EdgeType, NodeType } from '../../shared/schema'; +import { + EdgeType, + NodeType, + NodeTypeConnectionInfo, +} from '../../shared/schema'; import { CancellationToken } from '../../utils/CancellationToken'; /** @@ -27,4 +31,13 @@ export default abstract class SchemaService { public abstract getNodeTypes( cancellation?: CancellationToken ): Promise; + + /** + * Retrieves the connections between node types together with their count. + * @param cancellation A CancellationToken used to cancel the asynchronous operation. + * @returns A promise that represents the asynchronous operations. When evaluated, the promise result contains an array of node connections. + */ + public abstract getNodeTypeConnectionInfo( + cancellation?: CancellationToken + ): Promise; } diff --git a/frontend/src/services/schema/SchemaServiceImpl.ts b/frontend/src/services/schema/SchemaServiceImpl.ts index 30d08427..03d7d800 100644 --- a/frontend/src/services/schema/SchemaServiceImpl.ts +++ b/frontend/src/services/schema/SchemaServiceImpl.ts @@ -1,6 +1,10 @@ import { inject, injectable } from 'inversify'; import 'reflect-metadata'; -import { EdgeType, NodeType } from '../../shared/schema'; +import { + EdgeType, + NodeType, + NodeTypeConnectionInfo, +} from '../../shared/schema'; import { AsyncLazy } from '../../shared/utils'; import { CancellationToken } from '../../utils/CancellationToken'; import withCancellation from '../../utils/withCancellation'; @@ -18,6 +22,9 @@ export default class SchemaServiceImpl extends SchemaService { private readonly http: HttpService = null!; private readonly edgeTypesLazy: AsyncLazy; private readonly nodeTypesLazy: AsyncLazy; + private readonly nodeTypeConnectionInfoLazy: AsyncLazy< + NodeTypeConnectionInfo[] + >; public constructor() { super(); @@ -31,6 +38,11 @@ export default class SchemaServiceImpl extends SchemaService { this.requestNodeTypes.bind(this), { retryOnError: true } ); + + this.nodeTypeConnectionInfoLazy = new AsyncLazy( + this.requestNodeTypeConnectionInfo.bind(this), + { retryOnError: true } + ); } private requestEdgeTypes(): Promise { @@ -41,6 +53,12 @@ export default class SchemaServiceImpl extends SchemaService { return this.http.get('/api/schema/node-types'); } + private requestNodeTypeConnectionInfo(): Promise { + return this.http.get( + '/api/schema/node-type-connection-info' + ); + } + public getEdgeTypes(cancellation?: CancellationToken): Promise { return withCancellation(this.edgeTypesLazy.value, cancellation); } @@ -48,4 +66,13 @@ export default class SchemaServiceImpl extends SchemaService { public getNodeTypes(cancellation?: CancellationToken): Promise { return withCancellation(this.nodeTypesLazy.value, cancellation); } + + public getNodeTypeConnectionInfo( + cancellation?: CancellationToken + ): Promise { + return withCancellation( + this.nodeTypeConnectionInfoLazy.value, + cancellation + ); + } } diff --git a/frontend/src/stores/exploration/interfaces/ExplorationWeight.ts b/frontend/src/stores/exploration/interfaces/ExplorationWeight.ts index 51f88b32..81f99e7a 100644 --- a/frontend/src/stores/exploration/interfaces/ExplorationWeight.ts +++ b/frontend/src/stores/exploration/interfaces/ExplorationWeight.ts @@ -27,7 +27,7 @@ export interface ExplorationWeight { */ L: number; /** - * Pie Chart + * Chord Diagram */ P: number; } diff --git a/frontend/src/visualization/ChordPage.tsx b/frontend/src/visualization/ChordPage.tsx new file mode 100644 index 00000000..eaa10ca9 --- /dev/null +++ b/frontend/src/visualization/ChordPage.tsx @@ -0,0 +1,109 @@ +import React from 'react'; +import { combineLatest } from 'rxjs'; +import ChordDiagram from 'react-chord-diagram'; +import { map } from 'rxjs/operators'; +import { Grid, makeStyles } from '@material-ui/core'; +import useService from '../dependency-injection/useService'; +import { SchemaService } from '../services/schema'; +import { NodeTypeConnectionInfo } from '../shared/schema'; +import useObservable from '../utils/useObservable'; +import { EntityStyleProvider, EntityStyleStore } from '../stores/colors'; + +type ChordData = { + matrix: number[][]; + names: string[]; + colors: string[]; +}; + +const useStyles = makeStyles({ + root: { + paddingTop: '80px', // otherwise page is hidden under header + }, +}); + +/** + * Generate a matrix with node connections, and a Record mapping node types to their index in the matrix and their color. + * @param nodeTypeConnectionInfo data source to generate the matrix from. + * @param styleProvider {@link EntityStyleProvider} instance. + * @returns A tuple of [matrix, nodes], where + * matrix is of size n*n, with each element matrix[i] being an array of n numbers, + * and each matrix[i][j] represents and edge from the ith node in the graph + * to the jth node, + * nodes maps the node typenames to their indices in the matrix and their color. + */ +function convertToChordData( + nodeTypeConnectionInfo: NodeTypeConnectionInfo[], + styleProvider: EntityStyleProvider +): ChordData { + const ret: ChordData = { matrix: [], names: [], colors: [] }; + // early exit on empty input + if (nodeTypeConnectionInfo.length === 0) { + return ret; + } + + // map node names to their index in the matrix. + const nodes: Record = {}; + // fill names and colors array. + let counter = 0; + nodeTypeConnectionInfo.forEach((node) => { + if (nodes[node.from] === undefined) { + // save node index. + nodes[node.from] = counter; + counter += 1; + // fill names and colors arrays. + const fakeNode = { id: -1, types: [node.from] }; + ret.names.push(node.from); + ret.colors.push(styleProvider.getStyle(fakeNode).color); + } + }); + + // i*j matrix containing number of connection from node i to j. + const matrix: number[][] = []; + // initialize empty n*n matrix where n is number of nodes. + for (let i = 0; i < counter; i += 1) { + matrix.push(new Array(counter).fill(0)); + } + + // generate matrix + for (const node of nodeTypeConnectionInfo) { + const i = nodes[node.from]; + const j = nodes[node.to]; + matrix[i][j] += node.numConnections; + } + + ret.matrix = matrix; + + return ret; +} + +export default function ChordPage(): JSX.Element { + const classes = useStyles(); + + const schemaService = useService(SchemaService); + const entityStyleStore = useService(EntityStyleStore); + + const chordData = useObservable( + combineLatest([ + schemaService.getNodeTypeConnectionInfo(), + entityStyleStore.getState(), + ]).pipe(map((next) => convertToChordData(next[0], next[1]))), + { matrix: [], names: [], colors: [] } + ); + + return ( + + + + ); +} diff --git a/frontend/src/visualization/Visualization.tsx b/frontend/src/visualization/Visualization.tsx index d1a4a478..782090d3 100644 --- a/frontend/src/visualization/Visualization.tsx +++ b/frontend/src/visualization/Visualization.tsx @@ -17,7 +17,7 @@ function Visualization(): JSX.Element { // TODO: remove placeholder grids and fill cards with existing tabs const schemaCard = cards[cards.length - 1]; - const placeholders = Array(3).fill(schemaCard); + const placeholders = Array(2).fill(schemaCard); return ( <> diff --git a/frontend/src/visualization/dashboard-card/DashboardCard.tsx b/frontend/src/visualization/dashboard-card/DashboardCard.tsx index 7b733833..44883693 100644 --- a/frontend/src/visualization/dashboard-card/DashboardCard.tsx +++ b/frontend/src/visualization/dashboard-card/DashboardCard.tsx @@ -5,6 +5,7 @@ import CardHeader from '@material-ui/core/CardHeader'; import CardContent from '@material-ui/core/CardContent'; import Typography from '@material-ui/core/Typography'; import Icon from '@material-ui/core/Icon'; +import { Link } from 'react-router-dom'; import CardDefinition from './CardDefinition'; const useStyles = makeStyles({ @@ -18,7 +19,7 @@ export default function DashboardCard(card: CardDefinition): JSX.Element { const classes = useStyles(); return ( - + {icon}} @@ -35,6 +36,6 @@ export default function DashboardCard(card: CardDefinition): JSX.Element { - + ); } diff --git a/frontend/src/visualization/dashboard-card/cardContents.tsx b/frontend/src/visualization/dashboard-card/cardContents.tsx index 571a348c..250dbd45 100644 --- a/frontend/src/visualization/dashboard-card/cardContents.tsx +++ b/frontend/src/visualization/dashboard-card/cardContents.tsx @@ -27,6 +27,12 @@ const cardContents: Array = [ description: 'Display queried data in a hierarchically structured graph.', icon: 'device_hub', }, + { + label: 'Chord Diagram', + subLabel: 'Edge visualization', + description: 'Get an overview of how nodes are connected to each other.', + icon: 'donut_large', + }, ]; /** diff --git a/frontend/src/visualization/filtering/Filter.tsx b/frontend/src/visualization/filtering/Filter.tsx index aa73ee43..410f462b 100644 --- a/frontend/src/visualization/filtering/Filter.tsx +++ b/frontend/src/visualization/filtering/Filter.tsx @@ -93,7 +93,7 @@ const Filter = (): JSX.Element => { entityStyleStore.getValue() ); - const schemaService = useService(SchemaService, null); + const schemaService = useService(SchemaService); const loadingStore = useService(LoadingStore); const errorStore = useService(ErrorStore); diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 2f0bf069..9f42b3a0 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -28,6 +28,6 @@ }, "include": [ "src", - "cypress" + "cypress", ] } diff --git a/frontend/yarn.lock b/frontend/yarn.lock index e28bd5ea..9ec87802 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -5819,6 +5819,95 @@ cypress@^7.3.0: url "^0.11.0" yauzl "^2.10.0" +d3-array@1: + version "1.2.4" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-1.2.4.tgz#635ce4d5eea759f6f605863dbcfc30edc737f71f" + integrity sha512-KHW6M86R+FUPYGb3R5XiYjXPq7VzwxZ22buHhAEVG5ztoEcZZMLov530mmccaqA1GghZArjQV46fuc8kUqhhHw== + +d3-array@2, d3-array@^2.3.0, d3-array@^2.4.0: + version "2.12.1" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-2.12.1.tgz#e20b41aafcdffdf5d50928004ececf815a465e81" + integrity sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ== + dependencies: + internmap "^1.0.0" + +d3-chord@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/d3-chord/-/d3-chord-1.0.6.tgz#309157e3f2db2c752f0280fedd35f2067ccbb15f" + integrity sha512-JXA2Dro1Fxw9rJe33Uv+Ckr5IrAa74TlfDEhE/jfLOaXegMQFQTAgAw9WnZL8+HxVBRXaRGCkrNU7pJeylRIuA== + dependencies: + d3-array "1" + d3-path "1" + +"d3-color@1 - 2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-2.0.0.tgz#8d625cab42ed9b8f601a1760a389f7ea9189d62e" + integrity sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ== + +d3-color@^1.4.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-1.4.1.tgz#c52002bf8846ada4424d55d97982fef26eb3bc8a" + integrity sha512-p2sTHSLCJI2QKunbGb7ocOh7DgTAn8IrLx21QRc/BSnodXM4sv6aLQlnfpvehFMLZEfBc6g9pH9SWQccFYfJ9Q== + +"d3-format@1 - 2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-2.0.0.tgz#a10bcc0f986c372b729ba447382413aabf5b0767" + integrity sha512-Ab3S6XuE/Q+flY96HXT0jOXcM4EAClYFnRGY5zsjRGNy6qCYrQsMffs7cV5Q9xejb35zxW5hf/guKw34kvIKsA== + +d3-format@^1.4.3: + version "1.4.5" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-1.4.5.tgz#374f2ba1320e3717eb74a9356c67daee17a7edb4" + integrity sha512-J0piedu6Z8iB6TbIGfZgDzfXxUFN3qQRMofy2oPdXzQibYGqPB/9iMcxr/TGalU+2RsyDO+U4f33id8tbnSRMQ== + +"d3-interpolate@1.2.0 - 2": + version "2.0.1" + resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-2.0.1.tgz#98be499cfb8a3b94d4ff616900501a64abc91163" + integrity sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ== + dependencies: + d3-color "1 - 2" + +d3-path@1: + version "1.0.9" + resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-1.0.9.tgz#48c050bb1fe8c262493a8caf5524e3e9591701cf" + integrity sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg== + +d3-scale@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-3.3.0.tgz#28c600b29f47e5b9cd2df9749c206727966203f3" + integrity sha512-1JGp44NQCt5d1g+Yy+GeOnZP7xHo0ii8zsQp6PGzd+C1/dl0KGsp9A7Mxwp+1D1o4unbTTxVdU/ZOIEBoeZPbQ== + dependencies: + d3-array "^2.3.0" + d3-format "1 - 2" + d3-interpolate "1.2.0 - 2" + d3-time "^2.1.1" + d3-time-format "2 - 3" + +d3-selection@^1.4.1: + version "1.4.2" + resolved "https://registry.yarnpkg.com/d3-selection/-/d3-selection-1.4.2.tgz#dcaa49522c0dbf32d6c1858afc26b6094555bc5c" + integrity sha512-SJ0BqYihzOjDnnlfyeHT0e30k0K1+5sR3d5fNueCNeuhZTnGw4M4o8mqJchSwgKMXCNFo+e2VTChiSJ0vYtXkg== + +d3-shape@^1.3.7: + version "1.3.7" + resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-1.3.7.tgz#df63801be07bc986bc54f63789b4fe502992b5d7" + integrity sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw== + dependencies: + d3-path "1" + +"d3-time-format@2 - 3": + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-3.0.0.tgz#df8056c83659e01f20ac5da5fdeae7c08d5f1bb6" + integrity sha512-UXJh6EKsHBTjopVqZBhFysQcoXSv/5yLONZvkQ5Kk3qbwiUYkdX17Xa1PT6U1ZWXGGfB1ey5L8dKMlFq2DO0Ag== + dependencies: + d3-time "1 - 2" + +"d3-time@1 - 2", d3-time@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-2.1.1.tgz#e9d8a8a88691f4548e68ca085e5ff956724a6682" + integrity sha512-/eIQe/eR4kCQwq7yxi7z4c6qEXf2IYGcjoWB5OOQy4Tq9Uv39/947qlDcN2TLkiTzQWzvnsuYPB9TrWaNfipKQ== + dependencies: + d3-array "2" + d@1, d@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" @@ -8470,6 +8559,11 @@ internal-slot@^1.0.3: has "^1.0.3" side-channel "^1.0.4" +internmap@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/internmap/-/internmap-1.0.1.tgz#0017cc8a3b99605f0302f2b198d272e015e5df95" + integrity sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw== + invariant@^2.2.2: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" @@ -12902,6 +12996,20 @@ react-app-polyfill@^2.0.0: regenerator-runtime "^0.13.7" whatwg-fetch "^3.4.1" +react-chord-diagram@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/react-chord-diagram/-/react-chord-diagram-1.7.0.tgz#a2945b57c0953380b958d1fa4e9282fbb006aae2" + integrity sha512-OkqBt+kXRpAzKwMKES9WSp7MfbGlbzyl8BX07pHVfsftCegh2iDIAL+jg3bHg5Vqu+h5HQmRZ4CXy1U2AX5QWg== + dependencies: + d3-array "^2.4.0" + d3-chord "^1.0.6" + d3-color "^1.4.0" + d3-format "^1.4.3" + d3-scale "^3.2.1" + d3-selection "^1.4.1" + d3-shape "^1.3.7" + prop-types "^15.7.2" + react-dev-utils@^11.0.3: version "11.0.4" resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-11.0.4.tgz#a7ccb60257a1ca2e0efe7a83e38e6700d17aa37a" diff --git a/ssh.sh b/ssh.sh new file mode 100644 index 00000000..159c24e7 --- /dev/null +++ b/ssh.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +KEY_FILE=./amos-fau-proj2@group.riehle.org +HOST_PUB_KEY_FILE=./Server_Keyscan.txt + +HOST=5.183.20.2 +USER=root + +SSH_DIR=~/.ssh/ +SSH_KEY_DIR="$SSH_DIR"keys/ +SSH_KEY_PATH="$SSH_KEY_DIR"$HOST.key +SSH_KNOWN_HOSTS_PATH="$SSH_DIR"known_hosts + +mkdir -p $SSH_KEY_DIR +cat $KEY_FILE > $SSH_KEY_PATH +sudo chmod 600 $SSH_KEY_PATH +touch $SSH_KNOWN_HOSTS_PATH +cat $HOST_PUB_KEY_FILE >> $SSH_KNOWN_HOSTS_PATH +ssh -i $SSH_KEY_PATH $USER@$HOST diff --git a/upload-artifacts.sh b/upload-artifacts.sh new file mode 100644 index 00000000..8a944d78 --- /dev/null +++ b/upload-artifacts.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +ARTIFACTS_DIR=./artifacts + +KEY_FILE=./amos-fau-proj2@group.riehle.org +HOST_PUB_KEY_FILE=./Server_Keyscan.txt + +HOST=5.183.20.2 +USER=root + +SSH_DIR=~/.ssh/ +SSH_KEY_DIR="$SSH_DIR"keys/ +SSH_KEY_PATH="$SSH_KEY_DIR"$HOST.key +SSH_KNOWN_HOSTS_PATH="$SSH_DIR"known_hosts + +mkdir -p $SSH_KEY_DIR +cat $KEY_FILE > $SSH_KEY_PATH +sudo chmod 600 $SSH_KEY_PATH +touch $SSH_KNOWN_HOSTS_PATH +cat $HOST_PUB_KEY_FILE >> $SSH_KNOWN_HOSTS_PATH +ssh -i $SSH_KEY_PATH $USER@$HOST 'cd ~/amos/ && docker-compose down && cd ~ && rm -rf amos' +scp -i $SSH_KEY_PATH -r $ARTIFACTS_DIR $USER@$HOST:~/amos/ +ssh -i $SSH_KEY_PATH $USER@$HOST 'cd ~/amos/ && ./kmap.sh' +# read x