update configuration of our ollama service
This commit is contained in:
		
							parent
							
								
									b0be0f9042
								
							
						
					
					
						commit
						2f545a818f
					
				
					 1 changed files with 25 additions and 1 deletions
				
			
		| 
						 | 
				
			
			@ -3,6 +3,10 @@
 | 
			
		|||
  services.ollama = {
 | 
			
		||||
    enable = true;
 | 
			
		||||
 | 
			
		||||
    # User and group under which to run ollama
 | 
			
		||||
    user = "ollama";
 | 
			
		||||
    group = "ollama";
 | 
			
		||||
 | 
			
		||||
    # AMD GPU Support
 | 
			
		||||
    acceleration = "rocm";
 | 
			
		||||
    # 5700xt Support
 | 
			
		||||
| 
						 | 
				
			
			@ -11,9 +15,29 @@
 | 
			
		|||
    # Language models to install
 | 
			
		||||
    loadModels = [
 | 
			
		||||
      "deepseek-r1:8b"
 | 
			
		||||
      "gemma3:12b"
 | 
			
		||||
      "gemma3:4b"
 | 
			
		||||
      "qwen3:8b"
 | 
			
		||||
      "llama3:8b"
 | 
			
		||||
 | 
			
		||||
      # Coding models
 | 
			
		||||
      "qwen2.5-coder:7b"
 | 
			
		||||
    ];
 | 
			
		||||
 | 
			
		||||
    # Location to store models
 | 
			
		||||
    models = "/srv/ollama/models";
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  # Enable the Open-WebUI server 
 | 
			
		||||
  services.open-webui = {
 | 
			
		||||
    enable = true;
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  # Mount our subvolume for storage of models
 | 
			
		||||
  fileSystems = {
 | 
			
		||||
    "/srv/ollama" = {
 | 
			
		||||
      device = "/dev/disk/by-label/data";
 | 
			
		||||
      fsType = "btrfs";
 | 
			
		||||
      options = ["subvol=srv-ollama" "compress=zstd"];
 | 
			
		||||
    };
 | 
			
		||||
  };
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue