From ec15ebb26275ae3a40410368497e54a0a77b9a79 Mon Sep 17 00:00:00 2001 From: Evan Reichard Date: Fri, 6 Feb 2026 08:33:01 -0500 Subject: [PATCH] refactor(terminal): filter models by coding type Change opencode and pi model filtering to use 'coding' type instead of more generic 'text-generation' type. Update llama-swap model configs to include 'coding' in metadata type list for relevant models (deepseek-coder, qwen-coder, mistral, codellama, llama3-8b-instruct-q5). --- .../home/programs/terminal/opencode/lib.nix | 8 ++--- modules/home/programs/terminal/pi/lib.nix | 8 ++--- modules/nixos/services/llama-swap/config.nix | 30 +++++++++++++++---- 3 files changed, 30 insertions(+), 16 deletions(-) diff --git a/modules/home/programs/terminal/opencode/lib.nix b/modules/home/programs/terminal/opencode/lib.nix index 95572f7..1cb9b72 100644 --- a/modules/home/programs/terminal/opencode/lib.nix +++ b/modules/home/programs/terminal/opencode/lib.nix @@ -14,11 +14,9 @@ in toOpencodeModels = llamaSwapConfig: let - textGenModels = filterAttrs - ( - name: model: any (t: t == "text-generation") (model.metadata.type or [ ]) - ) - (llamaSwapConfig.models or { }); + textGenModels = filterAttrs (name: model: any (t: t == "coding") (model.metadata.type or [ ])) ( + llamaSwapConfig.models or { } + ); localModels = mapAttrs ( diff --git a/modules/home/programs/terminal/pi/lib.nix b/modules/home/programs/terminal/pi/lib.nix index f9c258a..d478e76 100644 --- a/modules/home/programs/terminal/pi/lib.nix +++ b/modules/home/programs/terminal/pi/lib.nix @@ -13,11 +13,9 @@ in toPiModels = llamaSwapConfig: let - textGenModels = filterAttrs - ( - name: model: any (t: t == "text-generation") (model.metadata.type or [ ]) - ) - (llamaSwapConfig.models or { }); + textGenModels = filterAttrs (name: model: any (t: t == "coding") (model.metadata.type or [ ])) ( + llamaSwapConfig.models or { } + ); localModels = mapAttrs ( diff --git a/modules/nixos/services/llama-swap/config.nix b/modules/nixos/services/llama-swap/config.nix index 4de022e..edffa7c 100644 --- a/modules/nixos/services/llama-swap/config.nix +++ b/modules/nixos/services/llama-swap/config.nix @@ -29,7 +29,10 @@ in -dev CUDA0 ''; metadata = { - type = [ "text-generation" ]; + type = [ + "text-generation" + "coding" + ]; }; env = [ "GGML_CUDA_ENABLE_UNIFIED_MEMORY=1" ]; }; @@ -56,7 +59,10 @@ in -dev CUDA0 ''; metadata = { - type = [ "text-generation" ]; + type = [ + "text-generation" + "coding" + ]; }; env = [ "GGML_CUDA_ENABLE_UNIFIED_MEMORY=1" ]; }; @@ -142,7 +148,10 @@ in -fit off ''; metadata = { - type = [ "text-generation" ]; + type = [ + "text-generation" + "coding" + ]; }; env = [ "GGML_CUDA_ENABLE_UNIFIED_MEMORY=1" ]; }; @@ -165,7 +174,10 @@ in -fit off ''; metadata = { - type = [ "text-generation" ]; + type = [ + "text-generation" + "coding" + ]; }; env = [ "GGML_CUDA_ENABLE_UNIFIED_MEMORY=1" ]; }; @@ -216,7 +228,10 @@ in -fit off ''; metadata = { - type = [ "text-generation" ]; + type = [ + "text-generation" + "coding" + ]; }; env = [ "GGML_CUDA_ENABLE_UNIFIED_MEMORY=1" ]; }; @@ -261,7 +276,10 @@ in -fit off ''; metadata = { - type = [ "text-generation" ]; + type = [ + "text-generation" + "coding" + ]; }; env = [ "GGML_CUDA_ENABLE_UNIFIED_MEMORY=1" ]; };