install-ollama-models.ps1
· 737 B · PowerShell
Bruto
# Define the list of models
$models = @(
"gpt-oss:20b",
"coney_/gpt-oss_claude-sonnet4.6:latest",
"DedeProgames/Astral-2.7:20b",
"qwen3.5:9b",
"qwen3-vl:8b"
"qwen2.5:14b",
"qwen2.5:7b",
"qwen2.5:3b",
"ministral-3:8b",
"ministral-3:14b",
"mistral-nemo:12b",
"llama-pro:text",
"llama3.1:8b",
"gemma3:4b",
"gemma3:12b",
"granite3.3:8b",
"deepseek-r1:14b",
"deepseek-r1:8b",
"deepseek-v2:16b",
"deepcoder:14b",
"cogito:14b",
"cogito:8b"
)
Write-Host "Starting Ollama model downloads..." -ForegroundColor Cyan
foreach ($model in $models) {
Write-Host "`n--- Pulling $model ---" -ForegroundColor Yellow
ollama pull $model
}
Write-Host "`nAll transfers complete!" -ForegroundColor Green
| 1 | # Define the list of models |
| 2 | $models = @( |
| 3 | "gpt-oss:20b", |
| 4 | "coney_/gpt-oss_claude-sonnet4.6:latest", |
| 5 | "DedeProgames/Astral-2.7:20b", |
| 6 | "qwen3.5:9b", |
| 7 | "qwen3-vl:8b" |
| 8 | "qwen2.5:14b", |
| 9 | "qwen2.5:7b", |
| 10 | "qwen2.5:3b", |
| 11 | "ministral-3:8b", |
| 12 | "ministral-3:14b", |
| 13 | "mistral-nemo:12b", |
| 14 | "llama-pro:text", |
| 15 | "llama3.1:8b", |
| 16 | "gemma3:4b", |
| 17 | "gemma3:12b", |
| 18 | "granite3.3:8b", |
| 19 | "deepseek-r1:14b", |
| 20 | "deepseek-r1:8b", |
| 21 | "deepseek-v2:16b", |
| 22 | "deepcoder:14b", |
| 23 | "cogito:14b", |
| 24 | "cogito:8b" |
| 25 | ) |
| 26 | |
| 27 | Write-Host "Starting Ollama model downloads..." -ForegroundColor Cyan |
| 28 | |
| 29 | foreach ($model in $models) { |
| 30 | Write-Host "`n--- Pulling $model ---" -ForegroundColor Yellow |
| 31 | ollama pull $model |
| 32 | } |
| 33 | |
| 34 | Write-Host "`nAll transfers complete!" -ForegroundColor Green |
| 35 |