ray-worker-nvidia (latest)
Published 2026-02-03 03:11:29 +00:00 by billy
Installation
docker pull git.daviestechlabs.io/daviestechlabs/ray-worker-nvidia:latestsha256:fe5abc9850bf59853a4d70145f8ad3f55c499e4f259a580f33b627e2a72c6abf
About this package
Ray Serve worker for NVIDIA GPUs (Whisper STT, XTTS TTS)
Image Layers
| ARG RELEASE |
| ARG LAUNCHPAD_BUILD_ARCH |
| LABEL org.opencontainers.image.ref.name=ubuntu |
| LABEL org.opencontainers.image.version=22.04 |
| ADD file:63d5ab3ef0aab308c0e71cb67292c5467f60deafa9b0418cbb220affcd078444 in / |
| CMD ["/bin/bash"] |
| ENV NVARCH=x86_64 |
| ENV NVIDIA_REQUIRE_CUDA=cuda>=12.1 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=geforce,driver>=470,driver<471 brand=geforcertx,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=titan,driver>=470,driver<471 brand=titanrtx,driver>=470,driver<471 brand=tesla,driver>=525,driver<526 brand=unknown,driver>=525,driver<526 brand=nvidia,driver>=525,driver<526 brand=nvidiartx,driver>=525,driver<526 brand=geforce,driver>=525,driver<526 brand=geforcertx,driver>=525,driver<526 brand=quadro,driver>=525,driver<526 brand=quadrortx,driver>=525,driver<526 brand=titan,driver>=525,driver<526 brand=titanrtx,driver>=525,driver<526 |
| ENV NV_CUDA_CUDART_VERSION=12.1.105-1 |
| ENV NV_CUDA_COMPAT_PACKAGE=cuda-compat-12-1 |
| ARG TARGETARCH |
| LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends gnupg2 curl ca-certificates && curl -fsSLO https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/${NVARCH}/cuda-keyring_1.0-1_all.deb && dpkg -i cuda-keyring_1.0-1_all.deb && apt-get purge --autoremove -y curl && rm -rf /var/lib/apt/lists/* # buildkit |
| ENV CUDA_VERSION=12.1.1 |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-12-1=${NV_CUDA_CUDART_VERSION} ${NV_CUDA_COMPAT_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit |
| RUN |1 TARGETARCH=amd64 /bin/sh -c echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf # buildkit |
| ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin |
| ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 |
| COPY NGC-DL-CONTAINER-LICENSE / # buildkit |
| ENV NVIDIA_VISIBLE_DEVICES=all |
| ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility |
| ENV NV_CUDA_LIB_VERSION=12.1.1-1 |
| ENV NV_NVTX_VERSION=12.1.105-1 |
| ENV NV_LIBNPP_VERSION=12.1.0.40-1 |
| ENV NV_LIBNPP_PACKAGE=libnpp-12-1=12.1.0.40-1 |
| ENV NV_LIBCUSPARSE_VERSION=12.1.0.106-1 |
| ENV NV_LIBCUBLAS_PACKAGE_NAME=libcublas-12-1 |
| ENV NV_LIBCUBLAS_VERSION=12.1.3.1-1 |
| ENV NV_LIBCUBLAS_PACKAGE=libcublas-12-1=12.1.3.1-1 |
| ENV NV_LIBNCCL_PACKAGE_NAME=libnccl2 |
| ENV NV_LIBNCCL_PACKAGE_VERSION=2.17.1-1 |
| ENV NCCL_VERSION=2.17.1-1 |
| ENV NV_LIBNCCL_PACKAGE=libnccl2=2.17.1-1+cuda12.1 |
| ARG TARGETARCH |
| LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-libraries-12-1=${NV_CUDA_LIB_VERSION} ${NV_LIBNPP_PACKAGE} cuda-nvtx-12-1=${NV_NVTX_VERSION} libcusparse-12-1=${NV_LIBCUSPARSE_VERSION} ${NV_LIBCUBLAS_PACKAGE} ${NV_LIBNCCL_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_PACKAGE_NAME} ${NV_LIBNCCL_PACKAGE_NAME} # buildkit |
| COPY entrypoint.d/ /opt/nvidia/entrypoint.d/ # buildkit |
| COPY nvidia_entrypoint.sh /opt/nvidia/ # buildkit |
| ENV NVIDIA_PRODUCT_NAME=CUDA |
| ENTRYPOINT ["/opt/nvidia/nvidia_entrypoint.sh"] |
| ENV NV_CUDA_LIB_VERSION=12.1.1-1 |
| ENV NV_CUDA_CUDART_DEV_VERSION=12.1.105-1 |
| ENV NV_NVML_DEV_VERSION=12.1.105-1 |
| ENV NV_LIBCUSPARSE_DEV_VERSION=12.1.0.106-1 |
| ENV NV_LIBNPP_DEV_VERSION=12.1.0.40-1 |
| ENV NV_LIBNPP_DEV_PACKAGE=libnpp-dev-12-1=12.1.0.40-1 |
| ENV NV_LIBCUBLAS_DEV_VERSION=12.1.3.1-1 |
| ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME=libcublas-dev-12-1 |
| ENV NV_LIBCUBLAS_DEV_PACKAGE=libcublas-dev-12-1=12.1.3.1-1 |
| ENV NV_CUDA_NSIGHT_COMPUTE_VERSION=12.1.1-1 |
| ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE=cuda-nsight-compute-12-1=12.1.1-1 |
| ENV NV_NVPROF_VERSION=12.1.105-1 |
| ENV NV_NVPROF_DEV_PACKAGE=cuda-nvprof-12-1=12.1.105-1 |
| ENV NV_LIBNCCL_DEV_PACKAGE_NAME=libnccl-dev |
| ENV NV_LIBNCCL_DEV_PACKAGE_VERSION=2.17.1-1 |
| ENV NCCL_VERSION=2.17.1-1 |
| ENV NV_LIBNCCL_DEV_PACKAGE=libnccl-dev=2.17.1-1+cuda12.1 |
| ARG TARGETARCH |
| LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-dev-12-1=${NV_CUDA_CUDART_DEV_VERSION} cuda-command-line-tools-12-1=${NV_CUDA_LIB_VERSION} cuda-minimal-build-12-1=${NV_CUDA_LIB_VERSION} cuda-libraries-dev-12-1=${NV_CUDA_LIB_VERSION} cuda-nvml-dev-12-1=${NV_NVML_DEV_VERSION} ${NV_NVPROF_DEV_PACKAGE} ${NV_LIBNPP_DEV_PACKAGE} libcusparse-dev-12-1=${NV_LIBCUSPARSE_DEV_VERSION} ${NV_LIBCUBLAS_DEV_PACKAGE} ${NV_LIBNCCL_DEV_PACKAGE} ${NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_DEV_PACKAGE_NAME} ${NV_LIBNCCL_DEV_PACKAGE_NAME} # buildkit |
| ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs |
| ENV NV_CUDNN_VERSION=8.9.0.131 |
| ENV NV_CUDNN_PACKAGE_NAME=libcudnn8 |
| ENV NV_CUDNN_PACKAGE=libcudnn8=8.9.0.131-1+cuda12.1 |
| ENV NV_CUDNN_PACKAGE_DEV=libcudnn8-dev=8.9.0.131-1+cuda12.1 |
| ARG TARGETARCH |
| LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
| LABEL com.nvidia.cudnn.version=8.9.0.131 |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends ${NV_CUDNN_PACKAGE} ${NV_CUDNN_PACKAGE_DEV} && apt-mark hold ${NV_CUDNN_PACKAGE_NAME} && rm -rf /var/lib/apt/lists/* # buildkit |
| ENV TZ=America/Los_Angeles |
| ENV LC_ALL=C.UTF-8 |
| ENV LANG=C.UTF-8 |
| ENV PATH=/home/ray/anaconda3/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/nvidia/bin |
| ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64:/usr/local/nvidia/lib64 |
| ARG DEBIAN_FRONTEND=noninteractive |
| ARG PYTHON_VERSION=3.9 |
| ARG PYTHON_DEPSET |
| ARG RAY_UID=1000 |
| ARG RAY_GID=100 |
| RUN |5 DEBIAN_FRONTEND=noninteractive PYTHON_VERSION=3.11 PYTHON_DEPSET=python/deplocks/base_deps/ray_base_deps_py3.11.lock RAY_UID=1000 RAY_GID=100 /bin/sh -c /dev/pipes/EOF # buildkit |
| USER 1000 |
| ENV HOME=/home/ray |
| WORKDIR /home/ray |
| COPY python/requirements_compiled.txt /home/ray/requirements_compiled.txt # buildkit |
| COPY python/deplocks/base_deps/ray_base_deps_py3.11.lock /home/ray/python_depset.lock # buildkit |
| SHELL [/bin/bash -c] |
| RUN |5 DEBIAN_FRONTEND=noninteractive PYTHON_VERSION=3.11 PYTHON_DEPSET=python/deplocks/base_deps/ray_base_deps_py3.11.lock RAY_UID=1000 RAY_GID=100 /bin/bash -c /dev/pipes/EOF # buildkit |
| WORKDIR /home/ray |
| ARG WHEEL_PATH |
| ARG FIND_LINKS_PATH=.whl |
| ARG CONSTRAINTS_FILE=requirements_compiled.txt |
| COPY .whl/ray-2.53.0-cp311-cp311-manylinux2014_x86_64.whl . # buildkit |
| COPY .whl .whl # buildkit |
| RUN |3 WHEEL_PATH=.whl/ray-2.53.0-cp311-cp311-manylinux2014_x86_64.whl FIND_LINKS_PATH=.whl CONSTRAINTS_FILE=requirements_compiled.txt /bin/bash -c $HOME/anaconda3/bin/pip --no-cache-dir install -c $CONSTRAINTS_FILE `basename $WHEEL_PATH`[all] --find-links $FIND_LINKS_PATH && sudo rm `basename $WHEEL_PATH` # buildkit |
| RUN |3 WHEEL_PATH=.whl/ray-2.53.0-cp311-cp311-manylinux2014_x86_64.whl FIND_LINKS_PATH=.whl CONSTRAINTS_FILE=requirements_compiled.txt /bin/bash -c $HOME/anaconda3/bin/pip freeze > /home/ray/pip-freeze.txt # buildkit |
| LABEL org.opencontainers.image.title=Ray Worker - NVIDIA GPU |
| LABEL org.opencontainers.image.description=Ray Serve worker for NVIDIA GPUs (Whisper STT, XTTS TTS) |
| LABEL org.opencontainers.image.vendor=DaviesTechLabs |
| LABEL org.opencontainers.image.source=https://git.daviestechlabs.io/daviestechlabs/kuberay-images |
| LABEL org.opencontainers.image.licenses=MIT |
| LABEL gpu.target=nvidia-cuda-12.1 |
| LABEL ray.version=2.53.0 |
| WORKDIR /app |
| USER root |
| RUN /bin/bash -c apt-get update && apt-get install -y --no-install-recommends ffmpeg libsndfile1 && rm -rf /var/lib/apt/lists/* # buildkit |
| COPY /uv /usr/local/bin/uv # buildkit |
| USER ray |
| RUN /bin/bash -c uv pip install --system 'faster-whisper>=1.0.0,<2.0' 'TTS>=0.22.0,<1.0' 'soundfile>=0.12.0,<1.0' 'pydub>=0.25.0,<1.0' 'librosa>=0.10.0,<1.0' 'torch>=2.0.0,<3.0' 'torchaudio>=2.0.0,<3.0' 'fastapi>=0.100.0,<1.0' 'uvicorn>=0.23.0,<1.0' 'httpx>=0.27.0,<1.0' 'pydantic>=2.0.0,<3.0' # buildkit |
| COPY --chown=ray:ray ray-serve/ /app/ray_serve/ # buildkit |
| COPY --chown=ray:ray --chmod=755 dockerfiles/ray-entrypoint.sh /app/ray-entrypoint.sh # buildkit |
| ENV PYTHONPATH=/app PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1 CUDA_VISIBLE_DEVICES=0 RAY_HEAD_SVC=ai-inference-raycluster-head-svc GPU_RESOURCE=gpu_nvidia NUM_GPUS=1 |
| HEALTHCHECK &{["CMD-SHELL" "ray status --address=localhost:6379 || exit 1"] "30s" "10s" "1m0s" "0s" '\x03'} |
| ENTRYPOINT ["/app/ray-entrypoint.sh"] |
Labels
| Key | Value |
|---|---|
| com.nvidia.cudnn.version | 8.9.0.131 |
| gpu.target | nvidia-cuda-12.1 |
| io.ray.ray-commit | 0de211850589aea71f842873bc32574c702ab492 |
| io.ray.ray-version | 2.53.0 |
| maintainer | NVIDIA CORPORATION <cudatools@nvidia.com> |
| org.opencontainers.image.description | Ray Serve worker for NVIDIA GPUs (Whisper STT, XTTS TTS) |
| org.opencontainers.image.licenses | MIT |
| org.opencontainers.image.ref.name | ubuntu |
| org.opencontainers.image.source | https://git.daviestechlabs.io/daviestechlabs/kuberay-images |
| org.opencontainers.image.title | Ray Worker - NVIDIA GPU |
| org.opencontainers.image.vendor | DaviesTechLabs |
| org.opencontainers.image.version | 22.04 |
| ray.version | 2.53.0 |