diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c7649f..4611125 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how ## 0.1.1 (Unreleased) > Released N/A -* N/A +* Add support for ChatGPT (#12) ## 0.1.0 > Released Feb 05, 2023 diff --git a/Eask b/Eask index 38ee975..df132eb 100644 --- a/Eask +++ b/Eask @@ -15,5 +15,6 @@ (depends-on "emacs" "26.1") (depends-on "openai") +(depends-on "spinner") (setq network-security-level 'low) ; see https://github.com/jcs090218/setup-emacs-windows/issues/156#issuecomment-932956432 diff --git a/README.md b/README.md index 8d25dc2..f4cf5f7 100644 --- a/README.md +++ b/README.md @@ -86,10 +86,21 @@ List of supported commands, | `codegpt-explain` | Explain the selected code | | `codegpt-improve` | Improve, refactor or optimize it | +## ๐ŸŒŸ Using ChatGPT + +The default is completing through the [Completions](https://platform.openai.com/docs/api-reference/completions) +tunnel. If you want to use ChatGPT, do the following: + +```elisp +(setq codegpt-tunnel 'chat ; The default is 'completion + codegpt-model "gpt-3.5-turbo") ; You can pick any model you want! +``` + ## ๐Ÿ“ Customization #### ๐Ÿงช Variables +- `codegpt-tunnel`- Completion channel you want to use. (Default: `completion`) - `codegpt-model` - ID of the model to use. - `codegpt-max-tokens` - The maximum number of tokens to generate in the completion. - `codegpt-temperature` - What sampling temperature to use. diff --git a/codegpt.el b/codegpt.el index 16de5f1..28a218b 100644 --- a/codegpt.el +++ b/codegpt.el @@ -6,7 +6,7 @@ ;; Maintainer: Shen, Jen-Chieh ;; URL: https://github.com/emacs-openai/codegpt ;; Version: 0.1.0 -;; Package-Requires: ((emacs "26.1") (openai "0.1.0")) +;; Package-Requires: ((emacs "26.1") (openai "0.1.0") (spinner "1.7.4")) ;; Keywords: convenience codegpt ;; This file is not part of GNU Emacs. @@ -31,8 +31,12 @@ ;;; Code: +(require 'cl-lib) + (require 'openai) +(require 'openai-chat) (require 'openai-completion) +(require 'spinner) (defgroup codegpt nil "Use GPT-3 tp help you write code." @@ -58,6 +62,12 @@ :type 'list :group 'codegpt) +(defcustom codegpt-tunnel 'completion + "Tunnel to use for the tasks." + :type '(choice (const :tag "Through Completion" completion) + (const :tag "Through ChatGPT" chat)) + :group 'codegpt) + (defcustom codegpt-model "text-davinci-003" "ID of the model to use." :type 'string @@ -73,6 +83,58 @@ :type 'number :group 'openai) +(defcustom codegpt-spinner-type 'moon + "The type of the spinner." + :type '(choice (const :tag "Key to variable `spinner-types'" symbol) + (const :tag "Vector of characters" vector)) + :group 'openai) + +(defvar codegpt-requesting-p nil + "Non-nil if sitll requesting.") + +(defvar codegpt-spinner-counter 0 + "Spinner counter.") + +(defvar codegpt-spinner-timer nil + "Spinner timer.") + +;; +;;; Major Mode + +(defun codegpt-header-line () + "Header line for CodeGPT." + (format " %s[Tunnel] %s, [Model] %s" + (if codegpt-requesting-p + (let* ((spinner (if (symbolp codegpt-spinner-type) + (cdr (assoc codegpt-spinner-type spinner-types)) + codegpt-spinner-type)) + (len (length spinner))) + (when (<= len codegpt-spinner-counter) + (setq codegpt-spinner-counter 0)) + (format "%s " (elt spinner codegpt-spinner-counter))) + "") + codegpt-tunnel codegpt-model)) + +(defun codegpt-mode--cancel-timer () + "Cancel spinner timer." + (when (timerp codegpt-spinner-timer) + (cancel-timer codegpt-spinner-timer))) + +;;;###autoload +(define-derived-mode codegpt-mode fundamental-mode "CodeGPT" + "Major mode for `codegpt-mode'. + +\\" + (setq codegpt-spinner-counter 0) + (setq-local header-line-format `((:eval (codegpt-header-line)))) + (add-hook 'kill-buffer-hook #'codegpt-mode--cancel-timer nil t) + (codegpt-mode--cancel-timer) + (setq codegpt-spinner-timer (run-with-timer 0.1 + 0.1 + (lambda () + (cl-incf codegpt-spinner-counter) + (force-mode-line-update))))) + ;; ;;; Application @@ -82,6 +144,7 @@ `(progn (openai--pop-to-buffer codegpt-buffer-name) ; create it (openai--with-buffer codegpt-buffer-name + (codegpt-mode) (erase-buffer) (insert ,instruction "\n\n") ,@body))) @@ -102,19 +165,39 @@ The partial code is defined in with the region, and the START nad END are boundaries of that region in buffer." + (setq codegpt-requesting-p t) (let ((text (string-trim (buffer-substring start end))) (original-window (selected-window))) (codegpt--ask-in-buffer instruction (insert text "\n\n") - (openai-completion - (buffer-string) + (funcall + (cl-case codegpt-tunnel + (`completion #'openai-completion) + (`chat #'openai-chat)) + (cl-case codegpt-tunnel + (`completion (buffer-string)) + (`chat `[(("role" . "user") + ("content" . ,(buffer-string)))])) (lambda (data) + (setq codegpt-requesting-p nil) + (codegpt-mode--cancel-timer) (openai--with-buffer codegpt-buffer-name (openai--pop-to-buffer codegpt-buffer-name) - (let* ((choices (openai--data-choices data)) - (result (openai--get-choice choices)) - (original-point (point))) - (insert (string-trim result) "\n") + (let ((original-point (point))) + (cl-case codegpt-tunnel + (`completion + (let* ((choices (openai--data-choices data)) + (result (openai--get-choice choices))) + (insert (string-trim result) "\n"))) + (`chat + (let ((choices (let-alist data .choices)) + (result)) + (mapc (lambda (choice) + (let-alist choice + (let-alist .message + (setq result (string-trim .content))))) + choices) + (insert (string-trim result) "\n")))) (codegpt--fill-region original-point (point)))) (unless codegpt-focus-p (select-window original-window)))