redfernstech commited on
Commit
ee67a2e
·
verified ·
1 Parent(s): 62e443b

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +6 -48
Dockerfile CHANGED
@@ -1,52 +1,13 @@
1
- # Builder stage
2
- # FROM ubuntu:latest
3
-
4
- # # Update packages and install curl and gnupg
5
- # RUN apt-get update && apt-get install -y \
6
- # curl \
7
- # gnupg
8
-
9
- # # Add NVIDIA package repositories
10
- # RUN curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \
11
- # && echo "deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://nvidia.github.io/libnvidia-container/stable/deb/ $(. /etc/os-release; echo $UBUNTU_CODENAME) main" > /etc/apt/sources.list.d/nvidia-container-toolkit.list
12
-
13
- # # Install NVIDIA container toolkit (Check for any updated methods or URLs for Ubuntu jammy)
14
- # RUN apt-get update && apt-get install -y nvidia-container-toolkit || true
15
-
16
- # # Install application
17
- # RUN curl https://ollama.ai/install.sh | sh
18
- # # Below is to fix embedding bug as per
19
- # # RUN curl -fsSL https://ollama.com/install.sh | sed 's#https://ollama.com/download#https://github.com/jmorganca/ollama/releases/download/v0.1.29#' | sh
20
-
21
-
22
- # # Create the directory and give appropriate permissions
23
- # RUN mkdir -p /.ollama && chmod 777 /.ollama
24
-
25
- # WORKDIR /.ollama
26
-
27
- # # Copy the entry point script
28
- # COPY entrypoint.sh /entrypoint.sh
29
- # RUN chmod +x /entrypoint.sh
30
-
31
- # # Set the entry point script as the default command
32
- # ENTRYPOINT ["/entrypoint.sh"]
33
- # CMD ["ollama", "serve"]
34
-
35
- # # Set the model as an environment variable (this can be overridden)
36
- # ENV model=${model}
37
-
38
- # Expose the server port
39
- # Use the official Ollama Docker image as the base image
40
  FROM ollama/ollama:latest
41
 
42
- RUN apt update && apt install -y python3 && apt install -y python3-pip
 
43
 
44
- RUN pip install litellm
45
  RUN pip install 'litellm[proxy]'
46
 
47
  # Create a directory for Ollama data
48
- RUN mkdir -p /.ollama
49
- RUN chmod -R 777 /.ollama
50
 
51
  WORKDIR /.ollama
52
 
@@ -57,11 +18,8 @@ RUN chmod +x /entrypoint.sh
57
  # Set the entry point script as the default command
58
  ENTRYPOINT ["/entrypoint.sh"]
59
 
60
- # Set the model as an environment variable (this can be overridden)
61
- ENV model=${model}
62
-
63
  # Expose the port that Ollama runs on
64
  EXPOSE 7860
65
 
66
- # Command to start the Ollama server
67
- CMD ["serve"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  FROM ollama/ollama:latest
2
 
3
+ # Install Python and pip
4
+ RUN apt update && apt install -y python3 python3-pip
5
 
6
+ # Install litellm and its proxy dependencies
7
  RUN pip install 'litellm[proxy]'
8
 
9
  # Create a directory for Ollama data
10
+ RUN mkdir -p /.ollama && chmod -R 777 /.ollama
 
11
 
12
  WORKDIR /.ollama
13
 
 
18
  # Set the entry point script as the default command
19
  ENTRYPOINT ["/entrypoint.sh"]
20
 
 
 
 
21
  # Expose the port that Ollama runs on
22
  EXPOSE 7860
23
 
24
+ # Set the model name as an environment variable (this can be overridden)
25
+ ENV MODEL_NAME=your_model_name_here