Compare commits

...

2 commits

Author SHA1 Message Date
e5d1ba38d4
remove ollama service from fuchsia host
- Remove ollama service configuration and dependencies
- Clean up traefik routing for ollama web interface
- Comment out traefik service examples for clarity
2025-09-29 18:30:59 +08:00
5b4944696e
enhance wezterm: add intelligent development layout with Claude integration
- Create 3-pane layout automatically: editor (main), terminal (bottom 30%), Claude sidebar (left 25%)
- Implement smart focus management between editor and terminal panes
- Add zoom toggling for distraction-free editing sessions
- Launch Claude Code automatically in dedicated sidebar pane
2025-09-29 13:12:12 +08:00
5 changed files with 21 additions and 71 deletions

View file

@ -135,23 +135,31 @@ config.keys = {
action = wezterm.action.SplitHorizontal({ domain = "CurrentPaneDomain" }),
},
{ -- If there is only one pane, split it vertically, otherwise toggle zoom on the first pane.
{ -- Dynamic pane management
key = "`",
mods = "LEADER",
action = wezterm.action_callback(function(_, pane)
local tab = pane:tab()
local panes = tab:panes_with_info()
if #panes == 1 then
pane:split({
direction = "Bottom",
size = 0.3,
size = 0.30,
})
elseif not panes[1].is_zoomed then
panes[1].pane:activate()
tab:set_zoomed(true)
elseif panes[1].is_zoomed then
tab:set_zoomed(false)
pane:split({
direction = "Left",
size = 0.26,
args = { "claude" }
})
elseif not panes[2].is_zoomed then
panes[2].pane:activate()
tab:set_zoomed(true)
elseif panes[2].is_zoomed then
tab:set_zoomed(false)
panes[3].pane:activate()
end
end),
},

View file

@ -3,7 +3,6 @@
./amdgpu-clocks
./flatpak
./libinput
./ollama
./pipewire
./printing
./udev

View file

@ -1,50 +0,0 @@
{...}: {
# Get up and running with large language models locally.
services.ollama = {
enable = true;
# User and group under which to run ollama
user = "ollama";
group = "ollama";
# AMD GPU Support
acceleration = "rocm";
# 5700xt Support
rocmOverrideGfx = "10.1.0";
# Language models to install
loadModels = [
"deepseek-r1:8b"
"gemma3:4b"
"qwen3:8b"
"llama3:8b"
# Coding models
"qwen2.5-coder:7b"
];
# Location to store models
models = "/srv/ollama/models";
# Increase context window
environmentVariables = {
OLLAMA_CONTEXT_LENGTH = "8192";
};
};
# Enable the Open-WebUI server
services.open-webui = {
enable = true;
host = "fuchsia.home.arpa";
openFirewall = true;
};
# Mount our subvolume for storage of models
fileSystems = {
"/srv/ollama" = {
device = "/dev/disk/by-label/data";
fsType = "btrfs";
options = ["subvol=srv-ollama" "compress=zstd"];
};
};
}

View file

@ -7,13 +7,5 @@
];
service = "api@internal";
};
open-webui = {
rule = "Host(`ollama.home.arpa`)";
entryPoints = [
"websecure"
];
service = "open-webui";
};
};
}

View file

@ -1,7 +1,8 @@
{...}: {
services.traefik.dynamicConfigOptions.http.services = {
open-webui.loadBalancer.servers = [
{url = "http://fuchsia.home.arpa:8080";}
];
};
# # Example
# services.traefik.dynamicConfigOptions.http.services = {
# open-webui.loadBalancer.servers = [
# {url = "http://fuchsia.home.arpa:8080";}
# ];
# };
}