Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dailalib/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "3.16.1"
__version__ = "3.16.2"

import os
# stop LiteLLM from querying at all to the remote server
Expand Down
3 changes: 1 addition & 2 deletions dailalib/llm_chat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,8 @@ def get_llm_chat_creator(ai_api: AIAPI) -> callable:
add_llm_chat_to_ui = lambda *args, **kwargs: None
if current_decompiler == IDA_DECOMPILER:
from dailalib.llm_chat.ida import add_llm_chat_to_ui
elif current_decompiler == BINJA_DECOMPILER:
from dailalib.llm_chat.binja import add_llm_chat_to_ui
else:
# TODO we had Binja support, but needed to disable until https://github.com/mahaloz/DAILA/issues/85
_l.warning(f"LLM Chat not supported for decompiler %s", current_decompiler)

def llm_chat_creator_wrapper(*args, **kwargs):
Expand Down