From 8cb25d6797bd06204de72f040ffae86a8511bb04 Mon Sep 17 00:00:00 2001 From: Josh XT Date: Mon, 5 Feb 2024 14:03:37 -0500 Subject: [PATCH] zzzzzzzzzzzzzzzzzzzzzzzzzz --- cuda.Dockerfile | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/cuda.Dockerfile b/cuda.Dockerfile index ce019e7..1b3ae8e 100644 --- a/cuda.Dockerfile +++ b/cuda.Dockerfile @@ -1,6 +1,5 @@ FROM nvidia/cuda:12.3.1-devel-ubuntu22.04 -RUN --mount=type=cache,target=/var/cache/cuda/apt,sharing=locked \ - apt-get update --fix-missing && \ +RUN apt-get update --fix-missing && \ apt-get upgrade -y && \ apt-get install -y --fix-missing --no-install-recommends git build-essential cmake gcc g++ portaudio19-dev ffmpeg libportaudio2 libasound-dev python3 python3-pip wget ocl-icd-opencl-dev opencl-headers clinfo libclblast-dev libopenblas-dev ninja-build && \ mkdir -p /etc/OpenCL/vendors && echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd && \ @@ -12,8 +11,7 @@ ENV HOST=0.0.0.0 \ CUDA_DOCKER_ARCH=all \ LLAMA_CUBLAS=1 COPY cuda-requirements.txt . -RUN --mount=type=cache,target=/var/cache/cuda/pip,sharing=locked \ - python3 -m pip install --upgrade pip cmake scikit-build setuptools wheel --no-cache-dir && \ +RUN python3 -m pip install --upgrade pip cmake scikit-build setuptools wheel --no-cache-dir && \ CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir && \ pip install --no-cache-dir -r cuda-requirements.txt COPY . .