s-kostyaev / ellama

Ellama is a tool for interacting with large language models from Emacs.
GNU General Public License v3.0
378 stars 27 forks source link

error in process filter: Wrong number of arguments: mapconcat, 2 #9

Closed veschin closed 8 months ago

veschin commented 8 months ago

on any call of ellama- fn

get this error error in process filter: Wrong number of arguments: mapconcat, 2

specs

EmacsDoom 28.2 MacOS m2

my configuration is

(use-package ellama
  :init
  (setq ellama-language "Russian")
  (require 'llm-ollama)
  (setq ellama-provider
        (make-llm-ollama
         :chat-model "mistral"
         :embedding-model "mistral")))

btw setopt doesnt work for me with this error

Symbol’s function definition is void: setopt

full error trace

Debugger entered--Lisp error: (wrong-number-of-arguments mapconcat 2)
  (mapconcat #'(lambda (line) (assoc-default 'response (json-read-from-string line))) (seq-subseq all-lines last-response))
  (concat current-response (mapconcat #'(lambda (line) (assoc-default 'response (json-read-from-string line))) (seq-subseq all-lines last-response)))
  (setq current-response (concat current-response (mapconcat #'(lambda (line) (assoc-default 'response (json-read-from-string line))) (seq-subseq all-lines last-response))))
  (let ((all-lines (seq-filter #'(lambda (line) (string-match-p "\\`{" line)) (split-string (buffer-substring-no-properties 1 end-pos) "\n" t)))) (setq current-response (concat current-response (mapconcat #'(lambda (line) (assoc-default 'response (json-read-from-string line))) (seq-subseq all-lines last-response)))) (setq last-response (length all-lines)))
  (progn (let ((all-lines (seq-filter #'(lambda (line) (string-match-p "\\`{" line)) (split-string (buffer-substring-no-properties 1 end-pos) "\n" t)))) (setq current-response (concat current-response (mapconcat #'(lambda (line) (assoc-default ... ...)) (seq-subseq all-lines last-response)))) (setq last-response (length all-lines))))
  (if end-pos (progn (let ((all-lines (seq-filter #'(lambda ... ...) (split-string (buffer-substring-no-properties 1 end-pos) "\n" t)))) (setq current-response (concat current-response (mapconcat #'(lambda ... ...) (seq-subseq all-lines last-response)))) (setq last-response (length all-lines)))))
  (let* ((end-pos (save-excursion (goto-char (point-max)) (if (search-backward-regexp "done\":false}$" nil t) (progn (pos-eol)))))) (if end-pos (progn (let ((all-lines (seq-filter #'... (split-string ... "\n" t)))) (setq current-response (concat current-response (mapconcat #'... (seq-subseq all-lines last-response)))) (setq last-response (length all-lines))))))
  (progn (insert response) (let* ((end-pos (save-excursion (goto-char (point-max)) (if (search-backward-regexp "done\":false}$" nil t) (progn (pos-eol)))))) (if end-pos (progn (let ((all-lines (seq-filter ... ...))) (setq current-response (concat current-response (mapconcat ... ...))) (setq last-response (length all-lines)))))))
  (unwind-protect (progn (insert response) (let* ((end-pos (save-excursion (goto-char (point-max)) (if (search-backward-regexp "done\":false}$" nil t) (progn ...))))) (if end-pos (progn (let ((all-lines ...)) (setq current-response (concat current-response ...)) (setq last-response (length all-lines))))))) (and (buffer-name temp-buffer) (kill-buffer temp-buffer)))
  (save-current-buffer (set-buffer temp-buffer) (unwind-protect (progn (insert response) (let* ((end-pos (save-excursion (goto-char ...) (if ... ...)))) (if end-pos (progn (let (...) (setq current-response ...) (setq last-response ...)))))) (and (buffer-name temp-buffer) (kill-buffer temp-buffer))))
  (let ((temp-buffer (generate-new-buffer " *temp*" t))) (save-current-buffer (set-buffer temp-buffer) (unwind-protect (progn (insert response) (let* ((end-pos (save-excursion ... ...))) (if end-pos (progn (let ... ... ...))))) (and (buffer-name temp-buffer) (kill-buffer temp-buffer)))))
  (let ((current-response llm-ollama-current-response) (last-response llm-ollama-last-response)) (let ((temp-buffer (generate-new-buffer " *temp*" t))) (save-current-buffer (set-buffer temp-buffer) (unwind-protect (progn (insert response) (let* ((end-pos ...)) (if end-pos (progn ...)))) (and (buffer-name temp-buffer) (kill-buffer temp-buffer))))) (if (> (length current-response) (length llm-ollama-current-response)) (progn (setq llm-ollama-last-response last-response) (setq llm-ollama-current-response current-response))) current-response)
  llm-ollama--get-partial-chat-response("\n{\"model\":\"mistral\",\"created_at\":\"2023-11-04T21:52...")
  (and t (llm-ollama--get-partial-chat-response data))
  (let* ((response (and t (llm-ollama--get-partial-chat-response data)))) (if response (llm-request-callback-in-buffer buf partial-callback response) nil))
  (closure ((buf . #<buffer *ellama*>) (error-callback closure ((insert-text closure ((point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (text) (save-current-buffer (set-buffer ...) (save-excursion ... ... ...))) (point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (_ msg) (error "Error calling the LLM: %s" msg)) (response-callback closure ((insert-text closure ((point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (text) (save-current-buffer (set-buffer ...) (save-excursion ... ... ...))) (point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (text) (funcall insert-text text) (save-current-buffer (set-buffer ellama-buffer) (save-excursion (goto-char (point-max)) (insert "\n\n")) (spinner-stop))) (partial-callback closure ((point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (text) (save-current-buffer (set-buffer (marker-buffer start)) (save-excursion (goto-char start) (delete-region start end) (insert text)))) (prompt . #s(llm-chat-prompt :context nil :examples nil :interactions (#s(llm-chat-prompt-interaction :role user :content "ss")) :temperature nil :max-tokens nil)) (provider . #s(llm-ollama :host nil :port nil :chat-model "mistral" :embedding-model "mistral")) cl-struct-llm-ollama-tags t) (data) (let* ((response (and t (llm-ollama--get-partial-chat-response data)))) (if response (llm-request-callback-in-buffer buf partial-callback response) nil)))("\n{\"model\":\"mistral\",\"created_at\":\"2023-11-04T21:52...")
  funcall((closure ((buf . #<buffer *ellama*>) (error-callback closure ((insert-text closure ((point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (text) (save-current-buffer (set-buffer ...) (save-excursion ... ... ...))) (point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (_ msg) (error "Error calling the LLM: %s" msg)) (response-callback closure ((insert-text closure ((point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (text) (save-current-buffer (set-buffer ...) (save-excursion ... ... ...))) (point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (text) (funcall insert-text text) (save-current-buffer (set-buffer ellama-buffer) (save-excursion (goto-char (point-max)) (insert "\n\n")) (spinner-stop))) (partial-callback closure ((point . 25) (end . #<marker (moves after insertion) at 25 in *ellama*>) (start . #<marker at 25 in *ellama*>) (prompt . "ss") t) (text) (save-current-buffer (set-buffer (marker-buffer start)) (save-excursion (goto-char start) (delete-region start end) (insert text)))) (prompt . #s(llm-chat-prompt :context nil :examples nil :interactions (#s(llm-chat-prompt-interaction :role user :content "ss")) :temperature nil :max-tokens nil)) (provider . #s(llm-ollama :host nil :port nil :chat-model "mistral" :embedding-model "mistral")) cl-struct-llm-ollama-tags t) (data) (let* ((response (and t (llm-ollama--get-partial-chat-response data)))) (if response (llm-request-callback-in-buffer buf partial-callback response) nil))) "\n{\"model\":\"mistral\",\"created_at\":\"2023-11-04T21:52...")
  (progn (funcall llm-request--partial-callback (llm-request--content)))
  (if (and llm-request--partial-callback (boundp 'url-http-end-of-headers) url-http-end-of-headers) (progn (funcall llm-request--partial-callback (llm-request--content))))
  (save-excursion (if (and llm-request--partial-callback (boundp 'url-http-end-of-headers) url-http-end-of-headers) (progn (funcall llm-request--partial-callback (llm-request--content)))))
  (progn (save-excursion (if (and llm-request--partial-callback (boundp 'url-http-end-of-headers) url-http-end-of-headers) (progn (funcall llm-request--partial-callback (llm-request--content))))))
  (unwind-protect (progn (save-excursion (if (and llm-request--partial-callback (boundp 'url-http-end-of-headers) url-http-end-of-headers) (progn (funcall llm-request--partial-callback (llm-request--content)))))) (set-match-data save-match-data-internal 'evaporate))
  (let ((save-match-data-internal (match-data))) (unwind-protect (progn (save-excursion (if (and llm-request--partial-callback (boundp 'url-http-end-of-headers) url-http-end-of-headers) (progn (funcall llm-request--partial-callback (llm-request--content)))))) (set-match-data save-match-data-internal 'evaporate)))
  (progn (let ((save-match-data-internal (match-data))) (unwind-protect (progn (save-excursion (if (and llm-request--partial-callback (boundp ...) url-http-end-of-headers) (progn (funcall llm-request--partial-callback ...))))) (set-match-data save-match-data-internal 'evaporate))))
  (if (= 0 pre-change) (progn (let ((save-match-data-internal (match-data))) (unwind-protect (progn (save-excursion (if (and llm-request--partial-callback ... url-http-end-of-headers) (progn ...)))) (set-match-data save-match-data-internal 'evaporate)))))
  llm-request--handle-new-content(210 310 0)
  url-http-generic-filter(#<process localhost> "5e\15\n{\"model\":\"mistral\",\"created_at\":\"2023-11-04T21...")
s-kostyaev commented 8 months ago

Setopt introduced in emacs 29. @ahyatt Could you please see what's wrong in process filter with llm-ollama?

ahyatt commented 8 months ago

I've fixed the issue in the lastest commit, and I'll make a bugfix release tonight.

s-kostyaev commented 8 months ago

@veschin thank you for report @ahyatt thank you for fast fix