Add timm dependency for InternVL2 model
Browse files- Dockerfile +3 -2
Dockerfile
CHANGED
@@ -33,7 +33,8 @@ gradio==3.38.0\n\
|
|
33 |
numpy\n\
|
34 |
Pillow\n\
|
35 |
torch>=2.0.0\n\
|
36 |
-
sentencepiece
|
|
|
37 |
|
38 |
# Install Python dependencies
|
39 |
RUN pip3 install --no-cache-dir --upgrade pip && \
|
@@ -57,7 +58,7 @@ if [ $(python3 -c "import torch; print(torch.cuda.is_available())") = "True" ];
|
|
57 |
python3 -c "import torch; print(f\"Total GPU memory: {torch.cuda.get_device_properties(0).total_memory / 1024 / 1024 / 1024:.2f} GB\")" \n\
|
58 |
fi \n\
|
59 |
echo "\n===== Package Information =====" \n\
|
60 |
-
pip3 list | grep -E "transformers|einops|torch|sentencepiece|gradio" \n\
|
61 |
echo "\n===== Starting Application =====" \n\
|
62 |
exec "$@"' > /entrypoint.sh && \
|
63 |
chmod +x /entrypoint.sh
|
|
|
33 |
numpy\n\
|
34 |
Pillow\n\
|
35 |
torch>=2.0.0\n\
|
36 |
+
sentencepiece\n\
|
37 |
+
timm==0.9.2" > requirements.txt
|
38 |
|
39 |
# Install Python dependencies
|
40 |
RUN pip3 install --no-cache-dir --upgrade pip && \
|
|
|
58 |
python3 -c "import torch; print(f\"Total GPU memory: {torch.cuda.get_device_properties(0).total_memory / 1024 / 1024 / 1024:.2f} GB\")" \n\
|
59 |
fi \n\
|
60 |
echo "\n===== Package Information =====" \n\
|
61 |
+
pip3 list | grep -E "transformers|einops|torch|sentencepiece|timm|gradio" \n\
|
62 |
echo "\n===== Starting Application =====" \n\
|
63 |
exec "$@"' > /entrypoint.sh && \
|
64 |
chmod +x /entrypoint.sh
|