Closed orhnk closed 9 months ago
{ filters = { { reason = "OTHER" } }, messages = { { author = "0", content = 'enum App {\n Browser, // JavaScript\n Discord, // Go\n HackerNews, // Rust\n IRC, // C\n Lynx, // C\n Mail, // Rust\n MusicPlayer, // C++\n Reddit, // Python\n StackOverflow, // Rust\n WhatsApp, // Go\n WorldMap, // TypeScript\n}\n\n#[derive(Debug)]\nstruct Options {\n apps: App,\n quite: bool,\n ver bose: bool,\n}\n\nstruct Installer {\n options: Options,\n}\n\npub fn main() {\n println!("Hello, world!");\n}\n\n\n// Hello!' } } }
{ "gsuuon/llm.nvim", cmd = "Llm", -- Others cmds are ignored for now keys = { { "<leader>al", "<cmd> Llm<CR>", mode = { "n", "v" }, desc = "LLM Generate", }, }, config = function(_, opts) require("llm").setup(opts) end, opts = function() return { default_prompt = require("llm.prompts.starters").palm, hl_group = "", -- prompts = {}, } end, -- opts = { -- default = { -- }, -- }, }
This is the response you're getting from the palm API. You can try the text-bison model instead of chat-bison for less stringent filtering from what I understand - check the palm api docs for info.
Error Message
Config