chore(llm): clean up models & fix llama-cpp issue
This commit is contained in:
@@ -3,67 +3,67 @@ require("luasnip.loaders.from_vscode").lazy_load()
|
||||
|
||||
-- Check Tab Completion
|
||||
local has_words_before = function()
|
||||
local line, col = unpack(vim.api.nvim_win_get_cursor(0))
|
||||
return col ~= 0 and
|
||||
vim.api.nvim_buf_get_lines(0, line - 1, line, true)[1]:sub(col,
|
||||
col)
|
||||
:match("%s") == nil
|
||||
local line, col = unpack(vim.api.nvim_win_get_cursor(0))
|
||||
return col ~= 0 and
|
||||
vim.api.nvim_buf_get_lines(0, line - 1, line, true)[1]:sub(col,
|
||||
col)
|
||||
:match("%s") == nil
|
||||
end
|
||||
|
||||
cmp.setup({
|
||||
snippet = {
|
||||
expand = function(args) require'luasnip'.lsp_expand(args.body) end
|
||||
},
|
||||
snippet = {
|
||||
expand = function(args) require 'luasnip'.lsp_expand(args.body) end
|
||||
},
|
||||
|
||||
mapping = cmp.mapping.preset.insert({
|
||||
mapping = cmp.mapping.preset.insert({
|
||||
|
||||
-- Tab Completion
|
||||
["<Tab>"] = cmp.mapping(function(fallback)
|
||||
if cmp.visible() then
|
||||
cmp.select_next_item()
|
||||
elseif has_words_before() then
|
||||
cmp.complete()
|
||||
else
|
||||
fallback()
|
||||
end
|
||||
end, {"i", "s"}),
|
||||
-- Tab Completion
|
||||
["<Tab>"] = cmp.mapping(function(fallback)
|
||||
if cmp.visible() then
|
||||
cmp.select_next_item()
|
||||
elseif has_words_before() then
|
||||
cmp.complete()
|
||||
else
|
||||
fallback()
|
||||
end
|
||||
end, { "i", "s" }),
|
||||
|
||||
-- Reverse Tab Completion
|
||||
["<S-Tab>"] = cmp.mapping(function(fallback)
|
||||
if cmp.visible() then
|
||||
cmp.select_prev_item()
|
||||
else
|
||||
fallback()
|
||||
end
|
||||
end, {"i", "s"}),
|
||||
-- Reverse Tab Completion
|
||||
["<S-Tab>"] = cmp.mapping(function(fallback)
|
||||
if cmp.visible() then
|
||||
cmp.select_prev_item()
|
||||
else
|
||||
fallback()
|
||||
end
|
||||
end, { "i", "s" }),
|
||||
|
||||
-- Misc Mappings
|
||||
['<C-b>'] = cmp.mapping.scroll_docs(-4),
|
||||
['<C-f>'] = cmp.mapping.scroll_docs(4),
|
||||
['<C-Space>'] = cmp.mapping.complete(),
|
||||
['<C-e>'] = cmp.mapping.abort(),
|
||||
['<CR>'] = cmp.mapping.confirm({select = true})
|
||||
-- Misc Mappings
|
||||
['<C-b>'] = cmp.mapping.scroll_docs(-4),
|
||||
['<C-f>'] = cmp.mapping.scroll_docs(4),
|
||||
['<C-Space>'] = cmp.mapping.complete(),
|
||||
['<C-e>'] = cmp.mapping.abort(),
|
||||
['<CR>'] = cmp.mapping.confirm({ select = true })
|
||||
|
||||
}),
|
||||
}),
|
||||
|
||||
-- Default Sources
|
||||
sources = cmp.config.sources({
|
||||
{name = 'nvim_lsp'}, {name = 'luasnip'}, {name = 'path'},
|
||||
{name = 'buffer'}
|
||||
})
|
||||
-- Default Sources
|
||||
sources = cmp.config.sources({
|
||||
{ name = 'nvim_lsp' }, { name = 'luasnip' }, { name = 'path' },
|
||||
{ name = 'buffer' }
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
-- Completion - `/` and `?`
|
||||
cmp.setup.cmdline({'/', '?'}, {
|
||||
mapping = cmp.mapping.preset.cmdline(),
|
||||
sources = {{name = 'buffer'}}
|
||||
cmp.setup.cmdline({ '/', '?' }, {
|
||||
mapping = cmp.mapping.preset.cmdline(),
|
||||
sources = { { name = 'buffer' } }
|
||||
})
|
||||
|
||||
-- Completion = `:`
|
||||
cmp.setup.cmdline(':', {
|
||||
mapping = cmp.mapping.preset.cmdline(),
|
||||
sources = cmp.config.sources({{name = 'path'}, {name = 'cmdline'}})
|
||||
mapping = cmp.mapping.preset.cmdline(),
|
||||
sources = cmp.config.sources({ { name = 'path' }, { name = 'cmdline' } })
|
||||
})
|
||||
|
||||
-- Autopairs
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
local llm_endpoint = "https://llm-api.va.reichard.io"
|
||||
local llm_model = "qwen3-coder-30b-instruct"
|
||||
local llm_assistant_model = "gpt-oss-20b-thinking"
|
||||
local llm_infill_model = "qwen2.5-coder-3b-instruct"
|
||||
|
||||
-- Default Llama - Toggle Llama & Copilot
|
||||
vim.g.copilot_filetypes = { ["*"] = false }
|
||||
local current_mode = "llama"
|
||||
-- vim.g.copilot_filetypes = { ["*"] = false }
|
||||
local current_mode = "copilot"
|
||||
local function toggle_llm_fim_provider()
|
||||
if current_mode == "llama" then
|
||||
vim.g.copilot_filetypes = { ["*"] = true }
|
||||
@@ -24,8 +25,10 @@ vim.keymap.set("n", "<leader>cf", toggle_llm_fim_provider, { desc = "Toggle FIM
|
||||
-- Configure LLama LLM FIM
|
||||
vim.g.llama_config = {
|
||||
endpoint = llm_endpoint .. "/infill",
|
||||
model = llm_model,
|
||||
n_predict = 1024,
|
||||
model = llm_infill_model,
|
||||
n_predict = 2048,
|
||||
ring_n_chunks = 32,
|
||||
enable_at_startup = false,
|
||||
}
|
||||
|
||||
-- Configure Code Companion
|
||||
@@ -39,7 +42,7 @@ require("codecompanion").setup({
|
||||
return require("codecompanion.adapters").extend("openai_compatible", {
|
||||
name = "llama-swap",
|
||||
formatted_name = "LlamaSwap",
|
||||
schema = { model = { default = llm_model } },
|
||||
schema = { model = { default = llm_assistant_model } },
|
||||
env = { url = llm_endpoint },
|
||||
})
|
||||
end,
|
||||
|
||||
Reference in New Issue
Block a user