Initial commit
This commit is contained in:
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/ollama-data
|
||||
/webui
|
||||
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"nr-codetools.localaipilot"
|
||||
]
|
||||
}
|
||||
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"localaipilot.standalone.ollamaCodeModel": "deepseek-coder:6.7b-base"
|
||||
}
|
||||
7
README.md
Normal file
7
README.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# Local ollama
|
||||
|
||||
* https://habr.com/ru/companies/minerva_media/articles/909130/
|
||||
* https://github.com/nagaraj-real/localaipilot-api/blob/main/README.md#chat-models
|
||||
* https://huggingface.co/google/gemma-2b
|
||||
* https://huggingface.co/deepseek-ai
|
||||
|
||||
4
down.sh
Executable file
4
down.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
# https://habr.com/ru/companies/minerva_media/articles/909130/
|
||||
|
||||
docker stop ai-ollama ai-webui
|
||||
10
ollama.code-workspace
Normal file
10
ollama.code-workspace
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "."
|
||||
}
|
||||
],
|
||||
"settings": {
|
||||
"localaipilot.standalone.ollamaCodeModel": "deepseek-coder:6.7b-base"
|
||||
}
|
||||
}
|
||||
5
run_ds1.5b.sh
Executable file
5
run_ds1.5b.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# docker exec -it ai-ollama ollama run llama3
|
||||
docker exec -it ai-ollama ollama pull deepseek-r1:1.5b
|
||||
docker exec -it ai-ollama ollama run deepseek-r1:1.5b
|
||||
5
run_ds7b.sh
Executable file
5
run_ds7b.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# docker exec -it ai-ollama ollama run llama3
|
||||
docker exec -it ai-ollama ollama pull deepseek-r1:7b
|
||||
docker exec -it ai-ollama ollama run deepseek-r1:7b
|
||||
5
run_dscoder6.7b.sh
Executable file
5
run_dscoder6.7b.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# docker exec -it ai-ollama ollama run llama3
|
||||
docker exec -it ai-ollama ollama pull deepseek-coder:6.7b-base
|
||||
docker exec -it ai-ollama ollama run deepseek-coder:6.7b-base
|
||||
6
run_gemma2b.sh
Executable file
6
run_gemma2b.sh
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
# docker exec -it ai-ollama ollama run llama3
|
||||
docker exec -it ai-ollama ollama pull codegemma:2b
|
||||
docker exec -it ai-ollama ollama pull gemma:2b
|
||||
docker exec -it ai-ollama ollama run gemma:2b
|
||||
22
run_ollama.sh
Executable file
22
run_ollama.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
docker run \
|
||||
--detach \
|
||||
--volume ./ollama-data:/root/.ollama \
|
||||
--publish 11434:11434 \
|
||||
--restart unless-stopped \
|
||||
--name ai-ollama \
|
||||
ollama/ollama
|
||||
|
||||
docker run \
|
||||
--detach \
|
||||
--volume ./webui:/app/backend/data \
|
||||
--publish 9999:8080 \
|
||||
--restart unless-stopped \
|
||||
--add-host=host.docker.internal:host-gateway \
|
||||
--name ai-webui \
|
||||
ghcr.io/open-webui/open-webui:main
|
||||
|
||||
echo
|
||||
echo "Ready, opening http://localhost:9999/"
|
||||
echo
|
||||
Reference in New Issue
Block a user