Compare commits
2 Commits
fdeff1e9b7
...
01e0c5bd8d
Author | SHA1 | Date |
---|---|---|
|
01e0c5bd8d | |
|
5e6769c783 |
|
@ -23,8 +23,9 @@ git clone https://git.brianfertig.com/brianfertig/LocalAIChat.git LocalAIChat
|
|||
2. Edit ```.env``` file to contain your ultimate secret passwords
|
||||
3. Copy ```config/mcpo/sample.config.json``` to ```config/mcpo/config.json```
|
||||
4. Note -- there is no need to edit the config.json file right away
|
||||
5. From the root directory (of this repo) run ```docker compose up -d```
|
||||
6. If you experience any errors, run it again until it stands completely up
|
||||
5. If you have an NVidia GPU, open ```docker-compose.yml``` and uncomment the lines from the Ollama container
|
||||
6. From the root directory (of this repo) run ```docker compose up -d```
|
||||
7. If you experience any errors, run it again until it stands completely up
|
||||
|
||||
## Next Steps
|
||||
|
||||
|
|
|
@ -3,6 +3,14 @@ services:
|
|||
volumes:
|
||||
- ./files/ollama:/root/.ollama
|
||||
container_name: ollama
|
||||
#UNCOMMENT THESE LINES IF YOU HAVE AN NVIDIA GPU
|
||||
#deploy:
|
||||
# resources:
|
||||
# reservations:
|
||||
# devices:
|
||||
# - driver: nvidia
|
||||
# count: all
|
||||
# capabilities: [gpu]
|
||||
pull_policy: always
|
||||
tty: true
|
||||
restart: unless-stopped
|
||||
|
|
Loading…
Reference in New Issue