Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: switch aws region weekly release #959

Merged
merged 5 commits into from
Dec 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/continuous-integration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -201,9 +201,9 @@ jobs:
# Manage instance type
INSTANCE_TYPE="c5.4xlarge"
if [[ "${BUILD_TYPE}" == "weekly" ]]; then
INSTANCE_TYPE="c6i.16xlarge"
INSTANCE_TYPE="c6i.32xlarge"
elif [[ "${BUILD_TYPE}" == "release" ]]; then
INSTANCE_TYPE="c6i.16xlarge"
INSTANCE_TYPE="c6i.32xlarge"
fi

# Manage python versions
Expand Down
28 changes: 26 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,17 @@ POETRY_VERSION:=1.8.4
APIDOCS_OUTPUT?="./docs/references/api"
OPEN_PR="true"

# Check the total number of CPU cores and use min(4, TOTAL_CPUS/4) for pytest
UNAME_S := $(shell uname -s)
ifeq ($(UNAME_S),Darwin) # macOS
TOTAL_CPUS := $(shell sysctl -n hw.ncpu)
else # Assume Linux
TOTAL_CPUS := $(shell nproc)
endif
PYTEST_CORES := $(shell if [ `expr $(TOTAL_CPUS) / 4` -lt 4 ]; then expr $(TOTAL_CPUS) / 4; else echo 4; fi)
# Calculate cores per pytest worker: total_cores / pytest_cores
FHE_NUMPY_CORES := $(shell expr $(TOTAL_CPUS) / $(PYTEST_CORES))

# At the end of the command, we currently need to force an 'import skorch' in Python in order to
# avoid an obscure bug that led to all pytest commands to fail when installing dependencies with
# Poetry >= 1.3. It is however not very clear how this import fixes the issue, as the bug was
Expand Down Expand Up @@ -214,7 +225,12 @@ spcc_internal: $(SPCC_DEPS)
.PHONY: pytest_internal # Run pytest
pytest_internal:
poetry run pytest --version
MKL_NUM_THREADS=4 OMP_NUM_THREADS=4 poetry run pytest $(TEST) \
MKL_NUM_THREADS=$(FHE_NUMPY_CORES) \
OMP_NUM_THREADS=$(FHE_NUMPY_CORES) \
OPENBLAS_NUM_THREADS=$(FHE_NUMPY_CORES) \
VECLIB_MAXIMUM_THREADS=$(FHE_NUMPY_CORES) \
NUMEXPR_NUM_THREADS=$(FHE_NUMPY_CORES) \
poetry run pytest $(TEST) \
-svv \
--count=$(COUNT) \
--randomly-dont-reorganize \
Expand All @@ -229,7 +245,15 @@ pytest_internal:
# --durations=10 is to show the 10 slowest tests
.PHONY: pytest_internal_parallel # Run pytest with multiple CPUs
pytest_internal_parallel:
"$(MAKE)" pytest_internal PYTEST_OPTIONS="-n $(N_CPU) --durations=10 ${PYTEST_OPTIONS}"
@echo "Total CPUs: $(TOTAL_CPUS)"
@echo "Assigning $(PYTEST_CORES) cores to pytest"
@echo "Leaving $(FHE_NUMPY_CORES) cores per pytest worker for FHE/Numpy/sklearn"
MKL_NUM_THREADS=$(FHE_NUMPY_CORES) \
OMP_NUM_THREADS=$(FHE_NUMPY_CORES) \
OPENBLAS_NUM_THREADS=$(FHE_NUMPY_CORES) \
VECLIB_MAXIMUM_THREADS=$(FHE_NUMPY_CORES) \
NUMEXPR_NUM_THREADS=$(FHE_NUMPY_CORES) \
"$(MAKE)" pytest_internal PYTEST_OPTIONS="-n $(PYTEST_CORES) --durations=10 ${PYTEST_OPTIONS}"

# --global-coverage-infos-json=global-coverage-infos.json is to dump the coverage report in the file
# --cov PATH is the directory PATH to consider for coverage. Default to SRC_DIR=src
Expand Down
Loading