Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 8 additions & 9 deletions dailalib/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "3.15.6"
__version__ = "3.16.0"

import os
# stop LiteLLM from querying at all to the remote server
Expand All @@ -11,13 +11,17 @@


def create_plugin(*args, **kwargs):
from libbs.api import DecompilerInterface

from libbs.api import DecompilerInterface
#
# LLM API (through LiteLLM api)
#

litellm_api = LiteLLMAIAPI(delay_init=True)

# load config before creating context menus. if not, the config is only be load after "OK" button is
# clicked in ask_settings :X
litellm_api.load_or_create_config()

# create context menus for prompts
gui_ctx_menu_actions = {
f"DAILA/LLM/{prompt_name}": (prompt.desc, getattr(litellm_api, prompt_name))
Expand All @@ -27,12 +31,7 @@ def create_plugin(*args, **kwargs):
gui_ctx_menu_actions["DAILA/LLM/chat"] = ("Open LLM Chat...", get_llm_chat_creator(litellm_api))

# create context menus for others
gui_ctx_menu_actions["DAILA/LLM/Settings/update_api_key"] = ("Update API key...", litellm_api.ask_api_key)
gui_ctx_menu_actions["DAILA/LLM/Settings/update_pmpt_style"] = ("Change prompt style...", litellm_api.ask_prompt_style)
gui_ctx_menu_actions["DAILA/LLM/Settings/update_model"] = ("Change model...", litellm_api.ask_model)
gui_ctx_menu_actions["DAILA/LLM/Settings/update_custom_url"] = ("Set Custom OpenAI Endpoint...", litellm_api.ask_custom_endpoint)
gui_ctx_menu_actions["DAILA/LLM/Settings/update_custom_model"] = ("Set Custom OpenAI Model...", litellm_api.ask_custom_model)

gui_ctx_menu_actions["DAILA/LLM/Settings"] = ("Settings...", litellm_api.ask_settings)
#
# VarModel API (local variable renaming)
#
Expand Down
40 changes: 40 additions & 0 deletions dailalib/api/litellm/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,41 @@
DEFAULT_MODEL = "gpt-4o"
OPENAI_MODELS = {"gpt-4", "gpt-4o", "gpt-4-turbo", "gpt-3.5-turbo", "o1-mini", "o1-preview"}
# TODO: How can I get this MODEL_TO_TOKENS in the future, without hardcopy to `configuration`
MODEL_TO_TOKENS = {
# TODO: update the token values for o1
"o1-mini": 8_000,
"o1-preview": 8_000,
"gpt-4o": 8_000,
"gpt-4o-mini": 16_000,
"gpt-4-turbo": 128_000,
"claude-3-5-sonnet-20240620": 200_000,
"gemini/gemini-pro": 12_288,
"vertex_ai_beta/gemini-pro": 12_288,
# perplex is on legacy mode :(
"perplexity/llama-3.1-sonar-small-128k-online": 127_072,
"perplexity/llama-3.1-sonar-medium-128k-online": 127_072,
"perplexity/llama-3.1-sonar-large-128k-online": 127_072,
"sonar-pro": 127_072,
"sonar": 127_072,
}

LLM_COST = {
"gpt-4o": {"prompt_price": 2.5, "completion_price": 10},
"gpt-4o-mini": {"prompt_price": 0.150, "completion_price": 0.600},
"gpt-4-turbo": {"prompt_price": 10, "completion_price": 30},
"claude-3.5-sonnet-20240620": {"prompt_price": 3, "completion_price": 15},
"gemini/gemini-pro": {"prompt_price": 0.150, "completion_price": 0.600},
"vertex_ai_beta/gemini-pro": {"prompt_price": 0.150, "completion_price": 0.600},
# perplex is on legacy mode not available from 02/22/25:(
"perplexity/llama-3.1-sonar-small-128k-online": {"prompt_price": 0.150, "completion_price": 0.600},
"perplexity/llama-3.1-sonar-large-128k-online": {"prompt_price": 0.150, "completion_price": 0.600},
"perplexity/llama-3.1-sonar-huge-128k-online": {"prompt_price": 0.150, "completion_price": 0.600},
# introduced the new sonar-pro/sonar
"sonar": {"prompt_price": 0.150, "completion_price": 0.600},
"sonar-pro": {"prompt_price": 0.150, "completion_price": 0.600},
}

# delay import for const creation
from .litellm_api import LiteLLMAIAPI
from .prompt_type import PromptType, ALL_STYLES, DEFAULT_STYLE

160 changes: 160 additions & 0 deletions dailalib/api/litellm/config_dialog.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
import logging
from typing import Optional

from dailalib.configuration import DAILAConfig
from .prompt_type import ALL_STYLES
from . import MODEL_TO_TOKENS

from libbs.ui.qt_objects import (
QDialog,
QGridLayout,
QHBoxLayout,
QLabel,
QLineEdit,
QPushButton,
QVBoxLayout,
QComboBox,
)

_l = logging.getLogger(__name__)
AVAILABLE_MODELS = MODEL_TO_TOKENS.keys()


class DAILAConfigDialog(QDialog):
TITLE = "DAILA Configuration"

def __init__(self, config: DAILAConfig, parent=None):
"""
Constructor for the DAILA configuration dialog.
params:
+ config: config object, passed from litellm_api when calling this dialog
"""

super().__init__(parent)
self.configured = False
self.DAILAConfig = config

self.setWindowTitle(self.TITLE)
self._main_layout = QVBoxLayout()
self._grid_layout = QGridLayout()
self.row = 0

self._init_middle_widgets()
self._main_layout.addLayout(self._grid_layout)

self._init_close_btn_widgets()

self.setLayout(self._main_layout)

def _init_middle_widgets(self):
"""
"""

# LLM Model
llm_model = self.DAILAConfig.model
llm_model_label = QLabel("LLM Model:")
llm_model_label.setToolTip("The model to use for LiteLLM.")

# using dropdown for LLM model
self._llm_model_edit = QComboBox(self)
self._llm_model_edit.addItems(AVAILABLE_MODELS)
self._llm_model_edit.setCurrentText(llm_model)
self._grid_layout.addWidget(llm_model_label, self.row, 0)
self._grid_layout.addWidget(self._llm_model_edit, self.row, 1)
self.row += 1

# API Key

api_key = self.DAILAConfig.api_key
api_key_label = QLabel("API Key:")
api_key_label.setToolTip("The API key to use for LiteLLM, for the selected model.")
self._api_key_edit = QLineEdit(self)
self._api_key_edit.setText(api_key)
self._grid_layout.addWidget(api_key_label, self.row, 0)
self._grid_layout.addWidget(self._api_key_edit, self.row, 1)
self.row += 1

# Prompt Style

prompt_style = self.DAILAConfig.prompt_style
prompt_style_label = QLabel("Prompt Style:")
prompt_style_label.setToolTip("The prompt style for DAILA to use, refer to dailalib/litellm/prompts for details.")

# using dropdown for prompt style
self._prompt_style_edit = QComboBox(self)
self._prompt_style_edit.addItems(ALL_STYLES)
self._prompt_style_edit.setCurrentText(prompt_style)
self._grid_layout.addWidget(prompt_style_label, self.row, 0)
self._grid_layout.addWidget(self._prompt_style_edit, self.row, 1)
self.row += 1

# Custom OpenAI Endpoint

custom_endpoint = self.DAILAConfig.custom_endpoint
custom_endpoint_label = QLabel("Custom OpenAI Endpoint:")
custom_endpoint_label.setToolTip("The custom OpenAI endpoint to use for LiteLLM.")
self._custom_endpoint_edit = QLineEdit(self)
self._custom_endpoint_edit.setText(custom_endpoint)
self._grid_layout.addWidget(custom_endpoint_label, self.row, 0)
self._grid_layout.addWidget(self._custom_endpoint_edit, self.row, 1)
self.row += 1

# Custom OpenAI Model

custom_model = self.DAILAConfig.custom_model
custom_model_label = QLabel("Custom OpenAI Model:")
custom_model_label.setToolTip("The custom OpenAI model to use for LiteLLM.")
self._custom_model_edit = QLineEdit(self)
self._custom_model_edit.setText(custom_model)
self._grid_layout.addWidget(custom_model_label, self.row, 0)
self._grid_layout.addWidget(self._custom_model_edit, self.row, 1)
self.row += 1

def _init_close_btn_widgets(self):
# buttons
self._ok_button = QPushButton(self)
self._ok_button.setText("OK")
self._ok_button.setDefault(True)
self._ok_button.clicked.connect(self._on_ok_clicked)

cancel_button = QPushButton(self)
cancel_button.setText("Cancel")
cancel_button.clicked.connect(self._on_cancel_clicked)

buttons_layout = QHBoxLayout()
buttons_layout.addWidget(self._ok_button)
buttons_layout.addWidget(cancel_button)

self._main_layout.addLayout(buttons_layout)

def _on_cancel_clicked(self):
self.close()

def parse_api_key(self, api_key_or_path: str) -> Optional[str]:
"""
Parse the API key from the input string.
"""
if "/" in api_key_or_path or "\\" in api_key_or_path:
# treat as path
with open(api_key_or_path, "r") as f:
api_key = f.read().strip()
else:
api_key = api_key_or_path
return api_key

def _on_ok_clicked(self):
self.DAILAConfig.model = self._llm_model_edit.currentText()
self.DAILAConfig.api_key = self.parse_api_key(self._api_key_edit.text())
self.DAILAConfig.prompt_style = self._prompt_style_edit.currentText()
self.DAILAConfig.custom_endpoint = self._custom_endpoint_edit.text()
self.DAILAConfig.custom_model = self._custom_model_edit.text()
self.configured = True
self.close()

def config_dialog_exec(self):
self.exec()
if not self.configured:
_l.warning("DAILA Configuration dialog was closed without saving changes.")
else:
_l.info("DAILA Configuration dialog was closed and changes were saved.")
return self.DAILAConfig
Loading