diff --git a/docker/dockerfile.ctr b/docker/dockerfile.ctr index ea459669a..b67e766b9 100644 --- a/docker/dockerfile.ctr +++ b/docker/dockerfile.ctr @@ -78,26 +78,6 @@ RUN if [[ "${HUGECTR_DEV_MODE}" == "false" ]]; then \ ; fi ENV PYTHONPATH=${PYTHONPATH}:${HUGECTR_HOME}/lib -ARG _HUGECTR_BACKEND_REPO="github.com/triton-inference-server/hugectr_backend.git" -ARG TRITON_VERSION -# Install Triton inference backend. -RUN if [ "${HUGECTR_DEV_MODE}" == "false" ]; then \ - git clone --branch ${HUGECTR_BACKEND_VER} --depth 1 https://${_CI_JOB_TOKEN}${_HUGECTR_BACKEND_REPO} /repos/hugectr_triton_backend && \ - mkdir /repos/hugectr_triton_backend/build && \ - cd /repos/hugectr_triton_backend/build && \ - cmake \ - -DCMAKE_INSTALL_PREFIX:PATH=${HUGECTR_HOME} \ - -DTRITON_COMMON_REPO_TAG="r${TRITON_VERSION}" \ - -DTRITON_CORE_REPO_TAG="r${TRITON_VERSION}" \ - -DTRITON_BACKEND_REPO_TAG="r${TRITON_VERSION}" .. && \ - make -j$(nproc) && \ - make install && \ - cd ../.. && \ - rm -rf hugectr_triton_backend && \ - chmod +x ${HUGECTR_HOME}/lib/*.so ${HUGECTR_HOME}/backends/hugectr/*.so && \ - rm -rf /repos \ - ; fi -RUN ln -s ${HUGECTR_HOME}/backends/hugectr /opt/tritonserver/backends/hugectr # Clean up RUN rm -rf /usr/local/share/jupyter/lab/staging/node_modules/marked diff --git a/docker/dockerfile.torch b/docker/dockerfile.torch index bbc87040f..4eff5a1b1 100644 --- a/docker/dockerfile.torch +++ b/docker/dockerfile.torch @@ -38,6 +38,24 @@ COPY --chown=1000:1000 --from=dlfw /usr/local/lib/python${PYTHON_VERSION}/dist-p COPY --chown=1000:1000 --from=dlfw /usr/local/lib/python${PYTHON_VERSION}/dist-packages/numpy-*.dist-info /usr/local/lib/python${PYTHON_VERSION}/dist-packages/numpy.dist-info/ COPY --chown=1000:1000 --from=dlfw /usr/local/lib/python${PYTHON_VERSION}/dist-packages/torch-*.egg-info /usr/local/lib/python${PYTHON_VERSION}/dist-packages/torch.egg-info/ +# Argumeints "_XXXX" are only valid when $HUGECTR_DEV_MODE==false +# Install hps_torch in merlin-pytorch +ARG HUGECTR_DEV_MODE=false +ARG _HUGECTR_REPO="github.com/NVIDIA-Merlin/HugeCTR.git" +ARG _CI_JOB_TOKEN="" +ARG HUGECTR_VER=main + +RUN if [ "$HUGECTR_DEV_MODE" == "false" ]; then \ + export HUGECTR_HOME=/usr/local/hugectr && \ + rm -rf ${HUGECTR_HOME}/lib/libgmock* ${HUGECTR_HOME}/lib/pkgconfig/gmock* ${HUGECTR_HOME}/include/gmock && \ + rm -rf ${HUGECTR_HOME}/lib/libgtest* ${HUGECTR_HOME}/lib/pkgconfig/gtest* ${HUGECTR_HOME}/include/gtest && \ + git clone --branch ${HUGECTR_VER} --depth 1 --recurse-submodules --shallow-submodules https://${_CI_JOB_TOKEN}${_HUGECTR_REPO} /hugectr && \ + pushd /hugectr/hps_torch/ && \ + pip --no-cache-dir install ninja && \ + TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 9.0" python setup.py install && \ + popd && \ + rm -rf /hugectr \ + ; fi # Add all torch libraries to /usr/local RUN ln -s /opt/tritonserver/backends/pytorch/* /usr/local/lib/