gsuuon / model.nvim

Neovim plugin for interacting with LLM's and building editor integrated prompts.
MIT License
293 stars 21 forks source link

[PaLM]: filters.reason = "OTHER" #19

Closed orhnk closed 9 months ago

orhnk commented 9 months ago

Error Message

{
  filters = { {
      reason = "OTHER"
    } },
  messages = { {
      author = "0",
      content = 'enum App {\n    Browser,       // JavaScript\n    Discord,       // Go\n    HackerNews,    // Rust\n    IRC,           // C\n    Lynx,          // C\n    Mail,          // Rust\n    MusicPlayer,
   // C++\n    Reddit,        // Python\n    StackOverflow, // Rust\n    WhatsApp,      // Go\n    WorldMap,      // TypeScript\n}\n\n#[derive(Debug)]\nstruct Options {\n    apps: App,\n    quite: bool,\n    ver
bose: bool,\n}\n\nstruct Installer {\n    options: Options,\n}\n\npub fn main() {\n    println!("Hello, world!");\n}\n\n\n// Hello!'
    } }
}

Config

  {
    "gsuuon/llm.nvim",

    cmd = "Llm", -- Others cmds are ignored for now

    keys = {
      {
        "<leader>al",
        "<cmd> Llm<CR>",
        mode = { "n", "v" },
        desc = "LLM Generate",
      },
    },

    config = function(_, opts)
      require("llm").setup(opts)
    end,

    opts = function()
      return {
        default_prompt = require("llm.prompts.starters").palm,
        hl_group = "",
        -- prompts = {},
      }
    end,
    -- opts = {
    --   default = {
    --   },
    -- },
  }
gsuuon commented 9 months ago

This is the response you're getting from the palm API. You can try the text-bison model instead of chat-bison for less stringent filtering from what I understand - check the palm api docs for info.