1
0

Переложил и добавил модели по папкам, обновил README

This commit is contained in:
2025-05-25 14:45:00 +08:00
parent c0957e0a7b
commit a1fdd2717c
29 changed files with 97 additions and 35 deletions

View File

@@ -1,5 +1,6 @@
{
"recommendations": [
"nr-codetools.localaipilot"
"nr-codetools.localaipilot",
"continue.continue"
]
}

View File

@@ -1,7 +1,17 @@
# Local ollama
1. Запустить `./run.sh`
2. Запустить скрипт из любой и поддиректорий для скачивания и запуска модели в терминале
Для настройки vscode поставить один из плагинов:
* [Continue](https://marketplace.visualstudio.com/items?itemName=Continue.continue)
* [Local AI Pilot](https://marketplace.visualstudio.com/items?itemName=nr-codetools.localaipilot)
Плагин должен соединиться с `localhost:11434` и подгрузить доступные модели из контейнера.
Есть веб-морда по адресу [localhost:9999](http://localhost:9999).
## Дополнительные материалы
* https://habr.com/ru/companies/minerva_media/articles/909130/
* https://github.com/nagaraj-real/localaipilot-api/blob/main/README.md#chat-models
* https://huggingface.co/google/gemma-2b
* https://huggingface.co/deepseek-ai

4
codellama/34b-i Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/codellama/tags
docker exec -it ai-ollama ollama run codellama:34b-instruct --verbose

4
codellama/7b-i Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/codellama/tags
docker exec -it ai-ollama ollama run codellama:7b-instruct --verbose

4
deepseek-coder/1.3b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/deepseek-coder/tags
docker exec -it ai-ollama ollama run deepseek-coder:1.3b --verbose

4
deepseek-coder/33b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/deepseek-coder/tags
docker exec -it ai-ollama ollama run deepseek-coder:33b --verbose

4
deepseek-coder/6.7b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/deepseek-coder/tags
docker exec -it ai-ollama ollama run deepseek-coder:6.7b --verbose

4
deepseek-r1/1.5b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/deepseek-r1/tags
docker exec -it ai-ollama ollama run deepseek-r1:1.5b --verbose

4
deepseek-r1/14b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/deepseek-r1/tags
docker exec -it ai-ollama ollama run deepseek-r1:14b --verbose

4
deepseek-r1/32b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/deepseek-r1/tags
docker exec -it ai-ollama ollama run deepseek-r1:32b --verbose

4
deepseek-r1/7b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/deepseek-r1/tags
docker exec -it ai-ollama ollama run deepseek-r1:7b --verbose

4
deepseek-r1/8b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/deepseek-r1/tags
docker exec -it ai-ollama ollama run deepseek-r1:8b --verbose

4
gemma/2b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/gemma/tags
docker exec -it ai-ollama ollama run gemma:2b --verbose

4
gemma/7b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/gemma/tags
docker exec -it ai-ollama ollama run gemma:7b --verbose

4
llama2/13b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/llama2/tags
docker exec -it ai-ollama ollama run llama2:13b --verbose

4
llama2/7b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/llama2/tags
docker exec -it ai-ollama ollama run llama2:7b --verbose

4
llama3.1/8b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/llama3.1/tags
docker exec -it ai-ollama ollama run llama3.1:8b --verbose

4
llama3.2/1b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/llama3.2/tags
docker exec -it ai-ollama ollama run llama3.2:1b --verbose

4
llama3.2/3b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/llama3.2/tags
docker exec -it ai-ollama ollama run llama3.2:1b --verbose

4
llama3/8b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/llama3/tags
docker exec -it ai-ollama ollama run llama3:8b --verbose

4
phi3/14b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/phi3/tags
docker exec -it ai-ollama ollama run phi3:14b --verbose

4
phi3/3.8b Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
# https://ollama.com/library/phi3/tags
docker exec -it ai-ollama ollama run phi3:3.8b --verbose

View File

@@ -20,3 +20,5 @@ docker run \
echo
echo "Ready, opening http://localhost:9999/"
echo
open http://localhost:9999/

View File

@@ -1,5 +0,0 @@
#!/bin/bash
# docker exec -it ai-ollama ollama run llama3
docker exec -it ai-ollama ollama pull codellama:34b-instruct
docker exec -it ai-ollama ollama run codellama:34b-instruct --verbose

View File

@@ -1,5 +0,0 @@
#!/bin/bash
# docker exec -it ai-ollama ollama run llama3
docker exec -it ai-ollama ollama pull codellama:7b-instruct
docker exec -it ai-ollama ollama run codellama:7b-instruct --verbose

View File

@@ -1,5 +0,0 @@
#!/bin/bash
# docker exec -it ai-ollama ollama run llama3
docker exec -it ai-ollama ollama pull deepseek-r1:1.5b
docker exec -it ai-ollama ollama run deepseek-r1:1.5b --verbose

View File

@@ -1,5 +0,0 @@
#!/bin/bash
# docker exec -it ai-ollama ollama run llama3
docker exec -it ai-ollama ollama pull deepseek-r1:7b
docker exec -it ai-ollama ollama run deepseek-r1:7b --verbose

View File

@@ -1,5 +0,0 @@
#!/bin/bash
# docker exec -it ai-ollama ollama run llama3
docker exec -it ai-ollama ollama pull deepseek-coder:6.7b-base
docker exec -it ai-ollama ollama run deepseek-coder:6.7b-base --verbose

View File

@@ -1,6 +0,0 @@
#!/bin/bash
# docker exec -it ai-ollama ollama run llama3
docker exec -it ai-ollama ollama pull codegemma:2b
docker exec -it ai-ollama ollama pull gemma:2b
docker exec -it ai-ollama ollama run gemma:2b --verbose