David-Kunz / gen.nvim

Neovim plugin to generate text using LLMs with customizable prompts
The Unlicense
977 stars 62 forks source link

Unable to configure custom keymaps #60

Closed cramsted closed 3 months ago

cramsted commented 5 months ago

I use Lazy.nvim with the config for gen.nvim being set up in the module custom.plugins . I have it in a file called gen.lua and it contains the following:

return {
    "David-Kunz/gen.nvim",
    opts = {
        model = "mistral",      -- The default model to use.
        display_mode = "split", -- The display mode. Can be "float" or "split".
        show_prompt = false,    -- Shows the Prompt submitted to Ollama.
        show_model = true,     -- Displays which model you are using at the beginning of your chat session.
        no_auto_close = false,  -- Never closes the window automatically.
        init = function(options) pcall(io.popen, "ollama serve > /dev/null 2>&1 &") end,
        -- Function to initialize Ollama
        command = "curl --silent --no-buffer -X POST http://<some non-localhost ip>:11434/api/generate -d $body",
        -- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped).
        -- This can also be a lua function returning a command string, with options as the input parameter.
        -- The executed command must return a JSON object with { response, context }
        -- (context property is optional).
        list_models = '<omitted lua function>', -- Retrieves a list of model names
        debug = false                           -- Prints errors and the command which is run.
    },
    config = function()
        vim.keymap.set({ 'n', 'v' }, '<leader>ia', ':Gen Ask<CR>', { desc = "A[I] [A]sk" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ic', ':Gen Change<CR>', { desc = "A[I] [C]hange" })
        vim.keymap.set({ 'n', 'v' }, '<leader>icc', ':Gen Change_Code<CR>', { desc = "A[I] [C]hange [C]ode" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ih', ':Gen Chat`<CR>', { desc = "A[I] C[h]at" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ie', ':Gen Enhance_Code<CR>', { desc = "A[I] [E]nhance code" })
        vim.keymap.set({ 'n', 'v' }, '<leader>iew', ':Gen Enhance_Wording<CR>', { desc = "A[I] [E]nhace [W]ording" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ieg', ':Gen Enhance_Grammar_Spelling<CR>', { desc = "A[I] [E]nhance [G]rammar" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ig', ':Gen Generate<CR>', { desc = "A[I] [G]enerate" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ir', ':Gen Review_Code<CR>', { desc = "A[I] [R]eview Code" })
        vim.keymap.set({ 'n', 'v' }, '<leader>is', ':Gen Summarize<CR>', { desc = "A[I] [S]ummarize" })
    end
}

If I run it as is, the keymaps will work, but the window will never populate with a response from Ollama. However, if I remove the config section, the keymaps don't work, but the CLI commands do and I start getting text back from the Ollama server.

Admittedly my Lua skills are not amazing, but based on how my other plugins are configured and some Googling around, I believe this should work. Can anyone tell me what's up?

cramsted commented 5 months ago

Follow up, moving the keymaps out of the config and into another file works just fine.

juanlopez4691 commented 4 months ago

@cramsted I think you could modify your code as:

return {
    "David-Kunz/gen.nvim",
    opts = {
        model = "mistral",      -- The default model to use.
        display_mode = "split", -- The display mode. Can be "float" or "split".
        show_prompt = false,    -- Shows the Prompt submitted to Ollama.
        show_model = true,     -- Displays which model you are using at the beginning of your chat session.
        no_auto_close = false,  -- Never closes the window automatically.
        init = function(options) pcall(io.popen, "ollama serve > /dev/null 2>&1 &") end,
        -- Function to initialize Ollama
        command = "curl --silent --no-buffer -X POST http://<some non-localhost ip>:11434/api/generate -d $body",
        -- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped).
        -- This can also be a lua function returning a command string, with options as the input parameter.
        -- The executed command must return a JSON object with { response, context }
        -- (context property is optional).
        list_models = '<omitted lua function>', -- Retrieves a list of model names
        debug = false                           -- Prints errors and the command which is run.
    },
    config = function(opts)
        require("gen").setup(opts)

        vim.keymap.set({ 'n', 'v' }, '<leader>ia', ':Gen Ask<CR>', { desc = "A[I] [A]sk" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ic', ':Gen Change<CR>', { desc = "A[I] [C]hange" })
        vim.keymap.set({ 'n', 'v' }, '<leader>icc', ':Gen Change_Code<CR>', { desc = "A[I] [C]hange [C]ode" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ih', ':Gen Chat`<CR>', { desc = "A[I] C[h]at" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ie', ':Gen Enhance_Code<CR>', { desc = "A[I] [E]nhance code" })
        vim.keymap.set({ 'n', 'v' }, '<leader>iew', ':Gen Enhance_Wording<CR>', { desc = "A[I] [E]nhace [W]ording" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ieg', ':Gen Enhance_Grammar_Spelling<CR>', { desc = "A[I] [E]nhance [G]rammar" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ig', ':Gen Generate<CR>', { desc = "A[I] [G]enerate" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ir', ':Gen Review_Code<CR>', { desc = "A[I] [R]eview Code" })
        vim.keymap.set({ 'n', 'v' }, '<leader>is', ':Gen Summarize<CR>', { desc = "A[I] [S]ummarize" })
    end,
}

Or

return {
    "David-Kunz/gen.nvim",
    config = function()
        require("gen").setup({
            model = "mistral",      -- The default model to use.
            display_mode = "split", -- The display mode. Can be "float" or "split".
            show_prompt = false,    -- Shows the Prompt submitted to Ollama.
            show_model = true,     -- Displays which model you are using at the beginning of your chat session.
            no_auto_close = false,  -- Never closes the window automatically.
            init = function(options) pcall(io.popen, "ollama serve > /dev/null 2>&1 &") end,
            -- Function to initialize Ollama
            command = "curl --silent --no-buffer -X POST http://<some non-localhost ip>:11434/api/generate -d $body",
            -- The command for the Ollama service. You can use placeholders $prompt, $model and $body (shellescaped).
            -- This can also be a lua function returning a command string, with options as the input parameter.
            -- The executed command must return a JSON object with { response, context }
            -- (context property is optional).
            list_models = '<omitted lua function>', -- Retrieves a list of model names
            debug = false                           -- Prints errors and the command which is run.
        })

        vim.keymap.set({ 'n', 'v' }, '<leader>ia', ':Gen Ask<CR>', { desc = "A[I] [A]sk" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ic', ':Gen Change<CR>', { desc = "A[I] [C]hange" })
        vim.keymap.set({ 'n', 'v' }, '<leader>icc', ':Gen Change_Code<CR>', { desc = "A[I] [C]hange [C]ode" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ih', ':Gen Chat`<CR>', { desc = "A[I] C[h]at" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ie', ':Gen Enhance_Code<CR>', { desc = "A[I] [E]nhance code" })
        vim.keymap.set({ 'n', 'v' }, '<leader>iew', ':Gen Enhance_Wording<CR>', { desc = "A[I] [E]nhace [W]ording" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ieg', ':Gen Enhance_Grammar_Spelling<CR>', { desc = "A[I] [E]nhance [G]rammar" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ig', ':Gen Generate<CR>', { desc = "A[I] [G]enerate" })
        vim.keymap.set({ 'n', 'v' }, '<leader>ir', ':Gen Review_Code<CR>', { desc = "A[I] [R]eview Code" })
        vim.keymap.set({ 'n', 'v' }, '<leader>is', ':Gen Summarize<CR>', { desc = "A[I] [S]ummarize" })
    end,
}
David-Kunz commented 3 months ago

Thank you @cramsted and @juanlopez4691 , I assume your configuration suggestion works.