From e0aeb9f8bcea0083f3d4010a28453f42329fef72 Mon Sep 17 00:00:00 2001 From: AnthonyAxenov Date: Sat, 24 May 2025 10:51:09 +0800 Subject: [PATCH] Initial commit --- .gitignore | 2 ++ .vscode/extensions.json | 5 +++++ .vscode/settings.json | 3 +++ README.md | 7 +++++++ down.sh | 4 ++++ ollama.code-workspace | 10 ++++++++++ run_ds1.5b.sh | 5 +++++ run_ds7b.sh | 5 +++++ run_dscoder6.7b.sh | 5 +++++ run_gemma2b.sh | 6 ++++++ run_ollama.sh | 22 ++++++++++++++++++++++ 11 files changed, 74 insertions(+) create mode 100644 .gitignore create mode 100644 .vscode/extensions.json create mode 100644 .vscode/settings.json create mode 100644 README.md create mode 100755 down.sh create mode 100644 ollama.code-workspace create mode 100755 run_ds1.5b.sh create mode 100755 run_ds7b.sh create mode 100755 run_dscoder6.7b.sh create mode 100755 run_gemma2b.sh create mode 100755 run_ollama.sh diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..bc61a74 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/ollama-data +/webui diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..52adcdd --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,5 @@ +{ + "recommendations": [ + "nr-codetools.localaipilot" + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..1bb2b85 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "localaipilot.standalone.ollamaCodeModel": "deepseek-coder:6.7b-base" +} diff --git a/README.md b/README.md new file mode 100644 index 0000000..d51a2f3 --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +# Local ollama + +* https://habr.com/ru/companies/minerva_media/articles/909130/ +* https://github.com/nagaraj-real/localaipilot-api/blob/main/README.md#chat-models +* https://huggingface.co/google/gemma-2b +* https://huggingface.co/deepseek-ai + diff --git a/down.sh b/down.sh new file mode 100755 index 0000000..76b3c90 --- /dev/null +++ b/down.sh @@ -0,0 +1,4 @@ +#!/bin/bash +# https://habr.com/ru/companies/minerva_media/articles/909130/ + +docker stop ai-ollama ai-webui diff --git a/ollama.code-workspace b/ollama.code-workspace new file mode 100644 index 0000000..b068ec1 --- /dev/null +++ b/ollama.code-workspace @@ -0,0 +1,10 @@ +{ + "folders": [ + { + "path": "." + } + ], + "settings": { + "localaipilot.standalone.ollamaCodeModel": "deepseek-coder:6.7b-base" + } +} diff --git a/run_ds1.5b.sh b/run_ds1.5b.sh new file mode 100755 index 0000000..2e4b9bb --- /dev/null +++ b/run_ds1.5b.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +# docker exec -it ai-ollama ollama run llama3 +docker exec -it ai-ollama ollama pull deepseek-r1:1.5b +docker exec -it ai-ollama ollama run deepseek-r1:1.5b diff --git a/run_ds7b.sh b/run_ds7b.sh new file mode 100755 index 0000000..fb11a77 --- /dev/null +++ b/run_ds7b.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +# docker exec -it ai-ollama ollama run llama3 +docker exec -it ai-ollama ollama pull deepseek-r1:7b +docker exec -it ai-ollama ollama run deepseek-r1:7b diff --git a/run_dscoder6.7b.sh b/run_dscoder6.7b.sh new file mode 100755 index 0000000..31a53cf --- /dev/null +++ b/run_dscoder6.7b.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +# docker exec -it ai-ollama ollama run llama3 +docker exec -it ai-ollama ollama pull deepseek-coder:6.7b-base +docker exec -it ai-ollama ollama run deepseek-coder:6.7b-base diff --git a/run_gemma2b.sh b/run_gemma2b.sh new file mode 100755 index 0000000..b35dca2 --- /dev/null +++ b/run_gemma2b.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +# docker exec -it ai-ollama ollama run llama3 +docker exec -it ai-ollama ollama pull codegemma:2b +docker exec -it ai-ollama ollama pull gemma:2b +docker exec -it ai-ollama ollama run gemma:2b diff --git a/run_ollama.sh b/run_ollama.sh new file mode 100755 index 0000000..cb14ce9 --- /dev/null +++ b/run_ollama.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +docker run \ + --detach \ + --volume ./ollama-data:/root/.ollama \ + --publish 11434:11434 \ + --restart unless-stopped \ + --name ai-ollama \ + ollama/ollama + +docker run \ + --detach \ + --volume ./webui:/app/backend/data \ + --publish 9999:8080 \ + --restart unless-stopped \ + --add-host=host.docker.internal:host-gateway \ + --name ai-webui \ + ghcr.io/open-webui/open-webui:main + +echo +echo "Ready, opening http://localhost:9999/" +echo