remove ollama service from fuchsia host
- Remove ollama service configuration and dependencies - Clean up traefik routing for ollama web interface - Comment out traefik service examples for clarity
This commit is contained in:
		
							parent
							
								
									5b4944696e
								
							
						
					
					
						commit
						e5d1ba38d4
					
				
					 4 changed files with 6 additions and 64 deletions
				
			
		| 
						 | 
				
			
			@ -3,7 +3,6 @@
 | 
			
		|||
    ./amdgpu-clocks
 | 
			
		||||
    ./flatpak
 | 
			
		||||
    ./libinput
 | 
			
		||||
    ./ollama
 | 
			
		||||
    ./pipewire
 | 
			
		||||
    ./printing
 | 
			
		||||
    ./udev
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,50 +0,0 @@
 | 
			
		|||
{...}: {
 | 
			
		||||
  # Get up and running with large language models locally.
 | 
			
		||||
  services.ollama = {
 | 
			
		||||
    enable = true;
 | 
			
		||||
 | 
			
		||||
    # User and group under which to run ollama
 | 
			
		||||
    user = "ollama";
 | 
			
		||||
    group = "ollama";
 | 
			
		||||
 | 
			
		||||
    # AMD GPU Support
 | 
			
		||||
    acceleration = "rocm";
 | 
			
		||||
    # 5700xt Support
 | 
			
		||||
    rocmOverrideGfx = "10.1.0";
 | 
			
		||||
 | 
			
		||||
    # Language models to install
 | 
			
		||||
    loadModels = [
 | 
			
		||||
      "deepseek-r1:8b"
 | 
			
		||||
      "gemma3:4b"
 | 
			
		||||
      "qwen3:8b"
 | 
			
		||||
      "llama3:8b"
 | 
			
		||||
 | 
			
		||||
      # Coding models
 | 
			
		||||
      "qwen2.5-coder:7b"
 | 
			
		||||
    ];
 | 
			
		||||
 | 
			
		||||
    # Location to store models
 | 
			
		||||
    models = "/srv/ollama/models";
 | 
			
		||||
 | 
			
		||||
    # Increase context window
 | 
			
		||||
    environmentVariables = {
 | 
			
		||||
      OLLAMA_CONTEXT_LENGTH = "8192";
 | 
			
		||||
    };
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  # Enable the Open-WebUI server 
 | 
			
		||||
  services.open-webui = {
 | 
			
		||||
    enable = true;
 | 
			
		||||
    host = "fuchsia.home.arpa";
 | 
			
		||||
    openFirewall = true;
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  # Mount our subvolume for storage of models
 | 
			
		||||
  fileSystems = {
 | 
			
		||||
    "/srv/ollama" = {
 | 
			
		||||
      device = "/dev/disk/by-label/data";
 | 
			
		||||
      fsType = "btrfs";
 | 
			
		||||
      options = ["subvol=srv-ollama" "compress=zstd"];
 | 
			
		||||
    };
 | 
			
		||||
  };
 | 
			
		||||
}
 | 
			
		||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue