return { { "stevearc/conform.nvim", -- event = 'BufWritePre', -- uncomment for format on save opts = require "configs.conform", }, { "neovim/nvim-lspconfig", config = function() require "configs.lspconfig" end, }, -- Custom Parameters (with defaults) { "David-Kunz/gen.nvim", opts = { -- model = "llama3.2:3b", -- The default model to use. model = "mistral-small:22b", -- host = "192.168.0.17", quit_map = "q", -- set keymap to close the response window retry_map = "", -- set keymap to re-send the current prompt accept_map = "", -- set keymap to replace the previous selection with the last result -- host = "localhost", -- The host running the Ollama service. host = "192.168.0.204", -- The host running the Ollama service. port = "11434", -- The port on which the Ollama service is listening. display_mode = "vertical-split", -- The display mode. Can be "float" or "split" or "horizontal-split" or "vertical-split". show_prompt = true, -- Shows the prompt submitted to Ollama. Can be true (3 lines) or "full". show_model = true, -- Displays which model you are using at the beginning of your chat session. no_auto_close = false, -- Never closes the window automatically. file = false, -- Write the payload to a temporary file to keep the command short. hidden = false, -- Hide the generation window (if true, will implicitly set `prompt.replace = true`), requires Neovim >= 0.10 init = function(options) pcall(io.popen, "ollama serve > /dev/null 2>&1 &") end, -- Function to initialize Ollama command = function(options) local body = { model = options.model, stream = true } return "curl --silent --no-buffer -X POST http://" .. options.host .. ":" .. options.port .. "/api/chat -d $body" end, -- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped). -- This can also be a command string. -- The executed command must return a JSON object with { response, context } -- (context property is optional). -- list_models = '', -- Retrieves a list of model names result_filetype = "markdown", -- Configure filetype of the result buffer debug = false, -- Prints errors and the command which is run. }, }, { "MeanderingProgrammer/render-markdown.nvim", dependencies = { "nvim-treesitter/nvim-treesitter", "echasnovski/mini.nvim" }, -- if you use the mini.nvim suite -- dependencies = { 'nvim-treesitter/nvim-treesitter', 'echasnovski/mini.icons' }, -- if you use standalone mini plugins -- dependencies = { 'nvim-treesitter/nvim-treesitter', 'nvim-tree/nvim-web-devicons' }, -- if you prefer nvim-web-devicons ---@module 'render-markdown' ---@type render.md.UserConfig opts = {}, }, -- { -- "jacob411/Ollama-Copilot", -- opts = { -- model_name = "llama3:3b", -- stream_suggestion = false, -- python_command = "python3", -- filetypes = { "python", "lua", "vim", "markdown", "javascript", "typescript" }, -- ollama_model_opts = { -- num_predict = 40, -- temperature = 0.1, -- }, -- keymaps = { -- suggestion = "os", -- reject = "or", -- insert_accept = "", -- }, -- }, -- }, -- These are some examples, uncomment them if you want to see them work! -- -- { -- "nvim-treesitter/nvim-treesitter", -- opts = { -- ensure_installed = { -- "vim", "lua", "vimdoc", -- "html", "css" -- }, -- }, -- }, -- }