dotfiles.nix/nixos/fuchsia/services/ollama/default.nix
2024-12-21 21:56:12 +08:00

19 lines
359 B
Nix

{...}: {
# Get up and running with large language models locally.
services.ollama = {
enable = true;
# AMD GPU Support
acceleration = "rocm";
# 5700xt Support
rocmOverrideGfx = "10.1.0";
# Language models to install
loadModels = [
"deepseek-coder-v2"
"llama3"
"mannix/llama3.1-8b-abliterated"
];
};
}