sclGPT/Dockerfile
2023-12-01 20:25:51 +01:00

19 lines
569 B
Docker

# Use an official Python runtime as a parent image
FROM python
# Set the working directory in the container
WORKDIR /app
# Copy the current directory contents into the container at /app
COPY ./llama_cpu_server.py /app/llama_cpu_server.py
#COPY ./llama-2-7b-chat.Q2_K.gguf /app/llama-2-7b-chat.Q2_K.gguf
# Install any needed packages specified in requirements.txt
RUN pip install llama-cpp-python
RUN pip install Flask
# Expose port 5000 to the world outside this container
EXPOSE 5000
# Run app.py when the container launches
CMD ["python", "llama_cpu_server.py"]