[Nvim] update

This commit is contained in:
2025-08-04 02:27:47 +02:00
parent 8d1ad42d68
commit ca4806267c
7 changed files with 1043 additions and 72 deletions

View File

@@ -5,7 +5,6 @@ return {
opts = require "configs.conform",
},
-- These are some examples, uncomment them if you want to see them work!
{
"neovim/nvim-lspconfig",
config = function()
@@ -13,18 +12,85 @@ return {
end,
},
-- Custom Parameters (with defaults)
{
"nvim-treesitter/nvim-treesitter",
"David-Kunz/gen.nvim",
opts = {
ensure_installed = {
"vim",
"lua",
"vimdoc",
"html",
"css",
"typescript",
"javascript",
},
-- model = "llama3.2:3b", -- The default model to use.
model = "mistral-small:22b",
-- host = "192.168.0.17",
quit_map = "q", -- set keymap to close the response window
retry_map = "<c-r>", -- set keymap to re-send the current prompt
accept_map = "<c-cr>", -- set keymap to replace the previous selection with the last result
-- host = "localhost", -- The host running the Ollama service.
host = "192.168.0.204", -- The host running the Ollama service.
port = "11434", -- The port on which the Ollama service is listening.
display_mode = "vertical-split", -- The display mode. Can be "float" or "split" or "horizontal-split" or "vertical-split".
show_prompt = true, -- Shows the prompt submitted to Ollama. Can be true (3 lines) or "full".
show_model = true, -- Displays which model you are using at the beginning of your chat session.
no_auto_close = false, -- Never closes the window automatically.
file = false, -- Write the payload to a temporary file to keep the command short.
hidden = false, -- Hide the generation window (if true, will implicitly set `prompt.replace = true`), requires Neovim >= 0.10
init = function(options)
pcall(io.popen, "ollama serve > /dev/null 2>&1 &")
end,
-- Function to initialize Ollama
command = function(options)
local body = { model = options.model, stream = true }
return "curl --silent --no-buffer -X POST http://"
.. options.host
.. ":"
.. options.port
.. "/api/chat -d $body"
end,
-- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped).
-- This can also be a command string.
-- The executed command must return a JSON object with { response, context }
-- (context property is optional).
-- list_models = '<omitted lua function>', -- Retrieves a list of model names
result_filetype = "markdown", -- Configure filetype of the result buffer
debug = false, -- Prints errors and the command which is run.
},
},
{
"MeanderingProgrammer/render-markdown.nvim",
dependencies = { "nvim-treesitter/nvim-treesitter", "echasnovski/mini.nvim" }, -- if you use the mini.nvim suite
-- dependencies = { 'nvim-treesitter/nvim-treesitter', 'echasnovski/mini.icons' }, -- if you use standalone mini plugins
-- dependencies = { 'nvim-treesitter/nvim-treesitter', 'nvim-tree/nvim-web-devicons' }, -- if you prefer nvim-web-devicons
---@module 'render-markdown'
---@type render.md.UserConfig
opts = {},
},
-- {
-- "jacob411/Ollama-Copilot",
-- opts = {
-- model_name = "llama3:3b",
-- stream_suggestion = false,
-- python_command = "python3",
-- filetypes = { "python", "lua", "vim", "markdown", "javascript", "typescript" },
-- ollama_model_opts = {
-- num_predict = 40,
-- temperature = 0.1,
-- },
-- keymaps = {
-- suggestion = "<leader>os",
-- reject = "<leader>or",
-- insert_accept = "<Tab>",
-- },
-- },
-- },
-- These are some examples, uncomment them if you want to see them work!
--
-- {
-- "nvim-treesitter/nvim-treesitter",
-- opts = {
-- ensure_installed = {
-- "vim", "lua", "vimdoc",
-- "html", "css"
-- },
-- },
-- },
--
}