diff --git a/backend/Dockerfile.model_server b/backend/Dockerfile.model_server index 90ded483122..080d946636a 100644 --- a/backend/Dockerfile.model_server +++ b/backend/Dockerfile.model_server @@ -24,15 +24,15 @@ RUN apt-get remove -y --allow-remove-essential perl-base && \ # Download tokenizers, distilbert for the Danswer model # Download model weights # Run Nomic to pull in the custom architecture and have it cached locally -RUN python -c "from transformers import AutoTokenizer; \ -AutoTokenizer.from_pretrained('mixedbread-ai/mxbai-rerank-xsmall-v1'); \ -from huggingface_hub import snapshot_download; \ -snapshot_download(repo_id='danswer/hybrid-intent-token-classifier', revision='v1.0.3'); \ -snapshot_download('mixedbread-ai/mxbai-rerank-xsmall-v1');" +# RUN python -c "from transformers import AutoTokenizer; \ +# AutoTokenizer.from_pretrained('mixedbread-ai/mxbai-rerank-xsmall-v1'); \ +# from huggingface_hub import snapshot_download; \ +# snapshot_download(repo_id='danswer/hybrid-intent-token-classifier', revision='v1.0.3'); \ +# snapshot_download('mixedbread-ai/mxbai-rerank-xsmall-v1');" # In case the user has volumes mounted to /root/.cache/huggingface that they've downloaded while # running Danswer, don't overwrite it with the built in cache folder -RUN mv /root/.cache/huggingface /root/.cache/temp_huggingface +# RUN mv /root/.cache/huggingface /root/.cache/temp_huggingface WORKDIR /app