Skip to content

Commit

Permalink
Merge branch 'develop' into lock_profile
Browse files Browse the repository at this point in the history
  • Loading branch information
ramirezfranciscof committed Jan 10, 2022
2 parents 27bd603 + c743a33 commit 3c73bec
Show file tree
Hide file tree
Showing 34 changed files with 448 additions and 669 deletions.
38 changes: 0 additions & 38 deletions .circleci/config.yml

This file was deleted.

24 changes: 24 additions & 0 deletions .github/system_tests/test_polish_workchains.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#!/usr/bin/env bash
set -ev

# Make sure the folder containing the workchains is in the python path before the daemon is started
MODULE_POLISH="${GITHUB_WORKSPACE}/.molecule/default/files/polish"
CLI_SCRIPT="${MODULE_POLISH}/cli.py"

declare -a EXPRESSIONS=("1 -2 -1 4 -5 -5 * * * * +" "2 1 3 3 -1 + ^ ^ +" "3 -5 -1 -4 + * ^" "2 4 2 -4 * * +" "3 1 1 5 ^ ^ ^" "3 1 3 4 -4 2 * + + ^ ^")
NUMBER_WORKCHAINS=5
TIMEOUT=600
CODE='add!' # Note the exclamation point is necessary to force the value to be interpreted as LABEL type identifier

# Get the absolute path for verdi
VERDI=$(which verdi)

if [ -n "$EXPRESSIONS" ]; then
for expression in "${EXPRESSIONS[@]}"; do
$VERDI -p test_${AIIDA_TEST_BACKEND} run "${CLI_SCRIPT}" -X $CODE -C -F -d -t $TIMEOUT "$expression"
done
else
for i in $(seq 1 $NUMBER_WORKCHAINS); do
$VERDI -p test_${AIIDA_TEST_BACKEND} run "${CLI_SCRIPT}" -X $CODE -C -F -d -t $TIMEOUT
done
fi
12 changes: 6 additions & 6 deletions .github/workflows/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,18 @@ sed -i "s|PLACEHOLDER_WORK_DIR|${GITHUB_WORKSPACE}|" "${CONFIG}/localhost.yaml"
sed -i "s|PLACEHOLDER_REMOTE_ABS_PATH_DOUBLER|${CONFIG}/doubler.sh|" "${CONFIG}/doubler.yaml"
sed -i "s|PLACEHOLDER_SSH_KEY|${HOME}/.ssh/slurm_rsa|" "${CONFIG}/slurm-ssh-config.yaml"

verdi setup --config "${CONFIG}/profile.yaml"
verdi setup --non-interactive --config "${CONFIG}/profile.yaml"

# set up localhost computer
verdi computer setup --config "${CONFIG}/localhost.yaml"
verdi computer setup --non-interactive --config "${CONFIG}/localhost.yaml"
verdi computer configure core.local localhost --config "${CONFIG}/localhost-config.yaml"
verdi computer test localhost
verdi code setup --config "${CONFIG}/doubler.yaml"
verdi code setup --config "${CONFIG}/add.yaml"
verdi code setup --non-interactive --config "${CONFIG}/doubler.yaml"
verdi code setup --non-interactive --config "${CONFIG}/add.yaml"

# set up slurm-ssh computer
verdi computer setup --config "${CONFIG}/slurm-ssh.yaml"
verdi computer configure core.ssh slurm-ssh --config "${CONFIG}/slurm-ssh-config.yaml" -n # needs slurm container
verdi computer setup --non-interactive --config "${CONFIG}/slurm-ssh.yaml"
verdi computer configure core.ssh slurm-ssh --non-interactive --config "${CONFIG}/slurm-ssh-config.yaml" -n # needs slurm container
verdi computer test slurm-ssh --print-traceback

verdi profile setdefault test_${AIIDA_TEST_BACKEND}
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@ set -ev

# Make sure the folder containing the workchains is in the python path before the daemon is started
SYSTEM_TESTS="${GITHUB_WORKSPACE}/.github/system_tests"
MODULE_POLISH="${GITHUB_WORKSPACE}/.molecule/default/files/polish"

export PYTHONPATH="${PYTHONPATH}:${SYSTEM_TESTS}"
export PYTHONPATH="${PYTHONPATH}:${SYSTEM_TESTS}:${MODULE_POLISH}"

# pytest options:
# - report timings of tests
Expand All @@ -23,6 +24,7 @@ export PYTEST_ADDOPTS="${PYTEST_ADDOPTS} --verbose"
# daemon tests
verdi daemon start 4
verdi -p test_${AIIDA_TEST_BACKEND} run ${SYSTEM_TESTS}/test_daemon.py
bash ${SYSTEM_TESTS}/test_polish_workchains.sh
verdi daemon stop

# tests for the testing infrastructure
Expand Down
25 changes: 0 additions & 25 deletions .jenkins/Dockerfile

This file was deleted.

135 changes: 0 additions & 135 deletions .jenkins/Jenkinsfile

This file was deleted.

9 changes: 0 additions & 9 deletions .jenkins/check-jenkinsfile.sh

This file was deleted.

1 change: 0 additions & 1 deletion .molecule/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ This folder contains configuration for running automated system integration test
This utilises [molecule](https://molecule.readthedocs.io) to automate the creation/destruction of a docker container environment and the setup and testing within it.

The tests are currently set up to stress-test the AiiDA engine by launching a number of workchains of varying complexity, defined by [reverse polish notation](https://en.wikipedia.org/wiki/Reverse_Polish_notation).
They are part of the continuous integration pipeline of AiiDA and are run using [Jenkins](https://www.jenkins.io/) on our own test runner.

## Running the tests locally

Expand Down
53 changes: 0 additions & 53 deletions .molecule/default/config_jenkins.yml

This file was deleted.

20 changes: 2 additions & 18 deletions .molecule/default/files/polish/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,16 +74,8 @@
default=False,
help='Only evaluate the expression and generate the workchain but do not launch it'
)
@click.option(
'-r',
'--retries',
type=click.INT,
default=1,
show_default=True,
help='Number of retries for running via the daemon'
)
@decorators.with_dbenv()
def launch(expression, code, use_calculations, use_calcfunctions, sleep, timeout, modulo, dry_run, daemon, retries):
def launch(expression, code, use_calculations, use_calcfunctions, sleep, timeout, modulo, dry_run, daemon):
"""
Evaluate the expression in Reverse Polish Notation in both a normal way and by procedurally generating
a workchain that encodes the sequence of operators and gets the stack of operands as an input. Multiplications
Expand Down Expand Up @@ -146,15 +138,7 @@ def launch(expression, code, use_calculations, use_calcfunctions, sleep, timeout
inputs['code'] = code

if daemon:
# the daemon tests have been known to fail on Jenkins, when the result node cannot be found
# to mitigate this, we can retry multiple times
for _ in range(retries):
output = run_via_daemon(workchains, inputs, sleep, timeout)
if output is not None:
break
if output is None:
sys.exit(1)
result, workchain, total_time = output
result, workchain, total_time = run_via_daemon(workchains, inputs, sleep, timeout)

else:
start_time = time.time()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
forward_sql = """
UPDATE db_dbnode SET node_type = 'data.core.array.ArrayData.' WHERE node_type = 'data.array.ArrayData.';
UPDATE db_dbnode SET node_type = 'data.core.array.bands.BandsData.' WHERE node_type = 'data.array.bands.BandsData.';
UPDATE db_dbnode SET node_type = 'data.core.array.kpoints.KpointData.' WHERE node_type = 'data.array.kpoints.KpointData.';
UPDATE db_dbnode SET node_type = 'data.core.array.kpoints.KpointsData.' WHERE node_type = 'data.array.kpoints.KpointsData.';
UPDATE db_dbnode SET node_type = 'data.core.array.projection.ProjectionData.' WHERE node_type = 'data.array.projection.ProjectionData.';
UPDATE db_dbnode SET node_type = 'data.core.array.trajectory.TrajectoryData.' WHERE node_type = 'data.array.trajectory.TrajectoryData.';
UPDATE db_dbnode SET node_type = 'data.core.array.xy.XyData.' WHERE node_type = 'data.array.xy.XyData.';
Expand All @@ -36,7 +36,7 @@
UPDATE db_dbnode SET node_type = 'data.core.orbital.OrbitalData.' WHERE node_type = 'data.orbital.OrbitalData.';
UPDATE db_dbnode SET node_type = 'data.core.remote.RemoteData.' WHERE node_type = 'data.remote.RemoteData.';
UPDATE db_dbnode SET node_type = 'data.core.remote.stash.RemoteStashData.' WHERE node_type = 'data.remote.stash.RemoteStashData.';
UPDATE db_dbnode SET node_type = 'data.core.remote.stash.folder.RemoteStashFolderData.' WHERE node_type = 'data.remote.stash.folder.';
UPDATE db_dbnode SET node_type = 'data.core.remote.stash.folder.RemoteStashFolderData.' WHERE node_type = 'data.remote.stash.folder.RemoteStashFolderData.';
UPDATE db_dbnode SET node_type = 'data.core.singlefile.SinglefileData.' WHERE node_type = 'data.singlefile.SinglefileData.';
UPDATE db_dbnode SET node_type = 'data.core.str.Str.' WHERE node_type = 'data.str.Str.';
UPDATE db_dbnode SET node_type = 'data.core.structure.StructureData.' WHERE node_type = 'data.structure.StructureData.';
Expand Down
Loading

0 comments on commit 3c73bec

Please sign in to comment.