diff options
author | Michaël Ball <michael.ball@krotosaudio.com> | 2025-02-11 10:12:50 +0000 |
---|---|---|
committer | Michaël Ball <michael.ball@krotosaudio.com> | 2025-02-11 10:12:50 +0000 |
commit | 19e419b41ad66dc252ac2a7ff168bd3fd6330e8d (patch) | |
tree | 562984ce46c79badb099af2dd78106d6e3dd3d9a /neovim/.config/nvim | |
parent | 2b4146c747bf7c8ef44df4368f0270c3322e7e92 (diff) |
Remove gen
Diffstat (limited to 'neovim/.config/nvim')
-rw-r--r-- | neovim/.config/nvim/lua/plugins/gen_nvim.lua | 32 |
1 files changed, 0 insertions, 32 deletions
diff --git a/neovim/.config/nvim/lua/plugins/gen_nvim.lua b/neovim/.config/nvim/lua/plugins/gen_nvim.lua deleted file mode 100644 index 52dd67d..0000000 --- a/neovim/.config/nvim/lua/plugins/gen_nvim.lua +++ /dev/null @@ -1,32 +0,0 @@ -return { - { - "David-Kunz/gen.nvim", - cmd = "Gen", - opts = { - model = "llama3.2:3b-instruct-fp16", -- The default model to use. - quit_map = "q", -- set keymap to close the response window - retry_map = "<c-r>", -- set keymap to re-send the current prompt - accept_map = "<c-cr>", -- set keymap to replace the previous selection with the last result - host = "localhost", -- The host running the Ollama service. - port = "11434", -- The port on which the Ollama service is listening. - display_mode = "split", -- The display mode. Can be "float" or "split" or "horizontal-split". - show_prompt = false, -- Shows the prompt submitted to Ollama. - show_model = true, -- Displays which model you are using at the beginning of your chat session. - no_auto_close = false, -- Never closes the window automatically. - file = false, -- Write the payload to a temporary file to keep the command short. - hidden = false, -- Hide the generation window (if true, will implicitly set `prompt.replace = true`), requires Neovim >= 0.10 - init = function(options) pcall(io.popen, "ollama serve > /dev/null 2>&1 &") end, - -- Function to initialize Ollama - command = function(options) - local body = { model = options.model, stream = true } - return "curl --silent --no-buffer -X POST http://" .. options.host .. ":" .. options.port .. "/api/chat -d $body" - end, - -- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped). - -- This can also be a command string. - -- The executed command must return a JSON object with { response, context } - -- (context property is optional). - -- list_models = '<omitted lua function>', -- Retrieves a list of model names - debug = false -- Prints errors and the command which is run. - }, - }, -} |