-
Notifications
You must be signed in to change notification settings - Fork 0
/
compose.yaml
30 lines (29 loc) · 1.01 KB
/
compose.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
services:
ollama: # New service for running the Dockerfile in /ollama
image: ollama/ollama:latest
pull_policy: always
container_name: ollama
ports: ["11435:11434"] # Expose Ollama on port 11435 externally, map it to 11434 inside the container
expose:
- 11435
volumes:
- ./model_files:/model_files # Mount the directory with the trained model
tty: true
entrypoint: ["/bin/sh", "/model_files/run_ollama.sh"] # Loading the finetuned Mistral with the GGUF file
# restart: unless-stopped
app:
# if to build the Dockerfile locally, uncomment lines #17-18, and comment #19
build:
context: . # Path to the Dockerfile
# image: ${DOCKERHUB_USERNAME}/logicgpt:${DOCKER_IMAGE_TAG} # Use the image from Docker Hub
container_name: logic_app
ports:
- 8501:8501
expose:
- 8501
volumes:
- ./app:/app
- ./model_files:/model_files
depends_on: # very important! otherwise ollama doesn't run
- ollama
# restart: unless-stoped