DockerOllama / Dockerfile
VladTruTru's picture
OLLAMA_HOST=0.0.0.0:7860
99974a6 verified
FROM ollama/ollama:0.12.3
# RUN useradd -m -u 1000 ollama
# USER ollama
# ENV PATH="/home/ollama/.local/bin:$PATH"
WORKDIR /app
COPY --chown=user ./requirements.txt ./requirements.txt
RUN apt update
# RUN apt install -y python3 python3-pip
# RUN pip install --no-cache-dir --upgrade -r ./requirements.txt --break-system-packages
# Create a directory named 'my_app_data' with specific permissions
RUN rm -rvf /.cache ; mkdir -p /.cache && chmod 777 /.cache
COPY ./ ./
COPY --chown=user . /app
RUN ls -alh /root
# RUN ollama list /usr/bin/ollama /usr/lib/ollama
# RUN find / -name ollama*
# RUN /usr/bin/ollama list
# RUN /usr/bin/ollama pull gemma3:270m
# RUN /usr/bin/ollama --help
RUN /usr/bin/ollama serve --help
RUN echo "/usr/bin/ollama serve OLLAMA_HOST=0.0.0.0:7860" > ./ollama.sh
RUN cat ./ollama.sh
RUN chmod 777 ./ollama.sh
# RUN ollama ps
#Override the entrypoint to run the vLLM server with your model
# ENTRYPOINT ["/usr/bin/ollama", "serve", "OLLAMA_HOST=0.0.0.0:7860"]
# ENTRYPOINT ["/usr/bin/ollama", "serve OLLAMA_HOST=0.0.0.0:7860"]
# ENTRYPOINT ["/usr/bin/ollama serve OLLAMA_HOST=0.0.0.0:7860"]
# ENTRYPOINT ["./ollama.sh"]
ENV OLLAMA_HOST=0.0.0.0:7860
ENTRYPOINT ["/usr/bin/ollama", "serve"]