dotfiles.nix/nixos/fuchsia/services/ollama/default.nix

19 lines
359 B
Nix
Raw Normal View History

2024-08-08 09:02:42 +08:00
{...}: {
2024-07-26 09:01:05 +08:00
# Get up and running with large language models locally.
services.ollama = {
enable = true;
2024-12-21 21:56:12 +08:00
# AMD GPU Support
2024-07-26 09:01:05 +08:00
acceleration = "rocm";
2024-12-21 21:56:12 +08:00
# 5700xt Support
rocmOverrideGfx = "10.1.0";
# Language models to install
loadModels = [
"deepseek-coder-v2"
"llama3"
"mannix/llama3.1-8b-abliterated"
];
2024-07-26 09:01:05 +08:00
};
}