Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 0b891f4

Browse files
committed
Re-order multimodal chat formats
1 parent dd47dda commit 0b891f4

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

llama_cpp/server/model.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -112,32 +112,32 @@ def load_llama_from_model_settings(settings: ModelSettings) -> llama_cpp.Llama:
112112
chat_handler = llama_cpp.llama_chat_format.Llava16ChatHandler(
113113
clip_model_path=settings.clip_model_path, verbose=settings.verbose
114114
)
115-
elif settings.chat_format == "nanollava":
115+
elif settings.chat_format == "moondream":
116116
assert settings.clip_model_path is not None, "clip model not found"
117117
if settings.hf_model_repo_id is not None:
118118
chat_handler = (
119-
llama_cpp.llama_chat_format.NanoLlavaChatHandler.from_pretrained(
119+
llama_cpp.llama_chat_format.MoondreamChatHanlder.from_pretrained(
120120
repo_id=settings.hf_model_repo_id,
121121
filename=settings.clip_model_path,
122122
verbose=settings.verbose,
123123
)
124124
)
125125
else:
126-
chat_handler = llama_cpp.llama_chat_format.NanoLlavaChatHandler(
126+
chat_handler = llama_cpp.llama_chat_format.MoondreamChatHanlder(
127127
clip_model_path=settings.clip_model_path, verbose=settings.verbose
128128
)
129-
elif settings.chat_format == "moondream":
129+
elif settings.chat_format == "nanollava":
130130
assert settings.clip_model_path is not None, "clip model not found"
131131
if settings.hf_model_repo_id is not None:
132132
chat_handler = (
133-
llama_cpp.llama_chat_format.MoondreamChatHanlder.from_pretrained(
133+
llama_cpp.llama_chat_format.NanoLlavaChatHandler.from_pretrained(
134134
repo_id=settings.hf_model_repo_id,
135135
filename=settings.clip_model_path,
136136
verbose=settings.verbose,
137137
)
138138
)
139139
else:
140-
chat_handler = llama_cpp.llama_chat_format.MoondreamChatHanlder(
140+
chat_handler = llama_cpp.llama_chat_format.NanoLlavaChatHandler(
141141
clip_model_path=settings.clip_model_path, verbose=settings.verbose
142142
)
143143
elif settings.chat_format == "hf-autotokenizer":

0 commit comments

Comments
 (0)