feat(nvim): integrate CodeCompanion with LlamaSwap and Copilot

- Remove old code-companion-config.lua file
- Move LLM configuration to llm.lua with LlamaSwap and Copilot integration
- Add copilot-vim plugin to default.nix
- Update which-key bindings to include new CodeCompanion commands
- Configure CodeCompanion with chat, inline, and command strategies using LlamaSwap adapter
- Add memory configuration with default project files
- Update LSP configuration for CodeCompanion markdown rendering
This commit is contained in:
2025-12-04 17:21:38 -05:00
parent 50b03508e1
commit d64cc0e47b
6 changed files with 85 additions and 48 deletions

View File

@@ -1,27 +0,0 @@
require("plugins.codecompanion.fidget-spinner"):init()
require("codecompanion").setup({
opts = { log_level = "DEBUG" },
adapters = {
http = {
["llama-swap"] = function()
return require("codecompanion.adapters").extend("openai_compatible", {
name = "llama-swap",
formatted_name = "LlamaSwap",
schema = {
model = {
default = "qwen3-coder-30b-instruct",
},
},
env = {
url = "http://10.0.20.100:8080",
api_key = "none",
},
})
end,
},
},
strategies = {
chat = { adapter = "llama-swap" },
inline = { adapter = "llama-swap" },
},
})

View File

@@ -2,7 +2,6 @@ require("base")
require("aerial-config") require("aerial-config")
require("autopairs-config") require("autopairs-config")
require("cmp-config") require("cmp-config")
require("code-companion-config")
require("comment-config") require("comment-config")
require("dap-config") require("dap-config")
require("diffview-config") require("diffview-config")

View File

@@ -1,21 +1,77 @@
-- Configure LLama LLM local llm_endpoint = "http://10.0.20.100:8080"
vim.g.llama_config = { local llm_model = "qwen3-coder-30b-instruct"
endpoint = "http://10.0.20.100:8080/infill",
-- model = "qwen2.5-coder-7b-instruct",
model = "qwen3-coder-30b-instruct",
n_predict = 1024,
-- api_key = "", -- Default Llama - Toggle Llama & Copilot
-- n_prefix = 256, vim.g.copilot_filetypes = { ["*"] = false }
-- n_suffix = 64, local current_mode = "llama"
-- t_max_prompt_ms = 500, local function toggle_llm_fim_provider()
-- t_max_predict_ms = 500, if current_mode == "llama" then
-- show_info = 2, vim.g.copilot_filetypes = { ["*"] = true }
-- auto_fim = true, vim.cmd("Copilot enable")
-- max_line_suffix = 8, vim.cmd("LlamaDisable")
-- max_cache_keys = 256, current_mode = "copilot"
-- ring_n_chunks = 8, vim.notify("Copilot FIM enabled", vim.log.levels.INFO)
-- ring_chunk_size = 32, else
-- ring_scope = 512, vim.g.copilot_filetypes = { ["*"] = true }
-- ring_update_ms = 1000, vim.cmd("Copilot disable")
vim.cmd("LlamaEnable")
current_mode = "llama"
vim.notify("Llama FIM enabled", vim.log.levels.INFO)
end
end
vim.keymap.set("n", "<leader>cf", toggle_llm_fim_provider, { desc = "Toggle FIM (Llama / Copilot)" })
-- Configure LLama LLM FIM
vim.g.llama_config = {
endpoint = llm_endpoint .. "/infill",
model = llm_model,
n_predict = 1024,
} }
-- Configure Code Companion
require("plugins.codecompanion.fidget-spinner"):init()
require("codecompanion").setup({
display = { chat = { window = { layout = "float", width = 0.6 } } },
adapters = {
http = {
opts = { show_defaults = false, },
["llama-swap"] = function()
return require("codecompanion.adapters").extend("openai_compatible", {
name = "llama-swap",
formatted_name = "LlamaSwap",
schema = { model = { default = llm_model } },
env = { url = llm_endpoint },
})
end,
copilot = require("codecompanion.adapters.http.copilot"),
},
acp = { opts = { show_defaults = false } },
},
strategies = {
chat = { adapter = "llama-swap" },
inline = { adapter = "llama-swap" },
cmd = { adapter = "llama-swap" },
},
chat = { dispay = "telescope" },
memory = {
opts = { chat = { enabled = true } },
default = {
description = "Collection of common files for all projects",
files = {
".clinerules",
".cursorrules",
".goosehints",
".rules",
".windsurfrules",
".github/copilot-instructions.md",
"AGENT.md",
"AGENTS.md",
".cursor/rules/",
{ path = "CLAUDE.md", parser = "claude" },
{ path = "CLAUDE.local.md", parser = "claude" },
{ path = "~/.claude/CLAUDE.md", parser = "claude" },
},
is_default = true,
},
},
})

View File

@@ -11,6 +11,13 @@ vim.api.nvim_create_autocmd("FileType", {
require('render-markdown').setup({ require('render-markdown').setup({
completions = { lsp = { enabled = true } }, completions = { lsp = { enabled = true } },
file_types = { 'markdown', 'codecompanion' }, file_types = { 'markdown', 'codecompanion' },
html = {
-- CodeCompanion Markdown Tweaks
tag = {
file = { icon = '󰨸 ', highlight = 'Normal' },
buf = { icon = '󰂥 ', highlight = 'Normal' },
},
},
}) })
------------------------------------------------------ ------------------------------------------------------

View File

@@ -7,7 +7,8 @@ wk.add({
{ "K", desc = "Definition Hover" }, { "K", desc = "Definition Hover" },
{ "<leader>a", desc = "Aerial" }, { "<leader>a", desc = "Aerial" },
{ "<leader>c", desc = "CodeCompanion" }, { "<leader>c", desc = "CodeCompanion" },
{ "<leader>cc", "<cmd>CodeCompanionChat Toggle<cr>", desc = "Toggle Chat" }, { "<leader>cn", "<cmd>CodeCompanionChat<cr>", desc = "New Chat", mode = { "v", "n" } },
{ "<leader>ct", "<cmd>CodeCompanionChat Toggle<cr>", desc = "Toggle Chat" },
{ "<leader>ci", "<cmd>CodeCompanion<cr>", desc = "Inline Prompt", mode = "v" }, { "<leader>ci", "<cmd>CodeCompanion<cr>", desc = "Inline Prompt", mode = "v" },
{ "<leader>db", desc = "Toggle Breakpoint" }, { "<leader>db", desc = "Toggle Breakpoint" },
{ "<leader>dc", desc = "Continue" }, { "<leader>dc", desc = "Continue" },

View File

@@ -42,6 +42,7 @@ in
aerial-nvim # Code Outline aerial-nvim # Code Outline
codecompanion-nvim # CodeCompanion codecompanion-nvim # CodeCompanion
comment-nvim # Code Comments comment-nvim # Code Comments
copilot-vim # GitHub Copilot
diffview-nvim # Diff View diffview-nvim # Diff View
fidget-nvim # Notification Helper fidget-nvim # Notification Helper
gitsigns-nvim # Git Blame gitsigns-nvim # Git Blame