summaryrefslogtreecommitdiff
path: root/neovim/.config/nvim/lua/plugins
diff options
context:
space:
mode:
authorMichaël Ball <michael.ball@krotosaudio.com>2024-12-05 16:11:40 +0000
committerMichaël Ball <michael.ball@krotosaudio.com>2024-12-05 16:11:40 +0000
commit278736ec4eef5cfe2759a55d3c8a1da68353eec1 (patch)
treeb38c5ecea364e37b0f0757268c61c4bc7b6f3ca5 /neovim/.config/nvim/lua/plugins
parenta79097c60f697940351e8eb0721d77489664e79e (diff)
Updates for gen.nvim
Diffstat (limited to 'neovim/.config/nvim/lua/plugins')
-rw-r--r--neovim/.config/nvim/lua/plugins/gen_nvim.lua26
1 files changed, 13 insertions, 13 deletions
diff --git a/neovim/.config/nvim/lua/plugins/gen_nvim.lua b/neovim/.config/nvim/lua/plugins/gen_nvim.lua
index 074f863..52dd67d 100644
--- a/neovim/.config/nvim/lua/plugins/gen_nvim.lua
+++ b/neovim/.config/nvim/lua/plugins/gen_nvim.lua
@@ -3,18 +3,18 @@ return {
"David-Kunz/gen.nvim",
cmd = "Gen",
opts = {
- model = "llama3.2:3b-instruct-fp16", -- The default model to use.
- quit_map = "q", -- set keymap to close the response window
- retry_map = "<c-r>", -- set keymap to re-send the current prompt
- accept_map = "<c-cr>", -- set keymap to replace the previous selection with the last result
- host = "localhost", -- The host running the Ollama service.
- port = "11434", -- The port on which the Ollama service is listening.
- display_mode = "split", -- The display mode. Can be "float" or "split" or "horizontal-split".
- show_prompt = false, -- Shows the prompt submitted to Ollama.
- show_model = true, -- Displays which model you are using at the beginning of your chat session.
- no_auto_close = false, -- Never closes the window automatically.
- file = false, -- Write the payload to a temporary file to keep the command short.
- hidden = false, -- Hide the generation window (if true, will implicitly set `prompt.replace = true`), requires Neovim >= 0.10
+ model = "llama3.2:3b-instruct-fp16", -- The default model to use.
+ quit_map = "q", -- set keymap to close the response window
+ retry_map = "<c-r>", -- set keymap to re-send the current prompt
+ accept_map = "<c-cr>", -- set keymap to replace the previous selection with the last result
+ host = "localhost", -- The host running the Ollama service.
+ port = "11434", -- The port on which the Ollama service is listening.
+ display_mode = "split", -- The display mode. Can be "float" or "split" or "horizontal-split".
+ show_prompt = false, -- Shows the prompt submitted to Ollama.
+ show_model = true, -- Displays which model you are using at the beginning of your chat session.
+ no_auto_close = false, -- Never closes the window automatically.
+ file = false, -- Write the payload to a temporary file to keep the command short.
+ hidden = false, -- Hide the generation window (if true, will implicitly set `prompt.replace = true`), requires Neovim >= 0.10
init = function(options) pcall(io.popen, "ollama serve > /dev/null 2>&1 &") end,
-- Function to initialize Ollama
command = function(options)
@@ -26,7 +26,7 @@ return {
-- The executed command must return a JSON object with { response, context }
-- (context property is optional).
-- list_models = '<omitted lua function>', -- Retrieves a list of model names
- debug = false -- Prints errors and the command which is run.
+ debug = false -- Prints errors and the command which is run.
},
},
}