File tree Expand file tree Collapse file tree 1 file changed +14
-0
lines changed
Expand file tree Collapse file tree 1 file changed +14
-0
lines changed Original file line number Diff line number Diff line change @@ -171,6 +171,20 @@ def load_llama_from_model_settings(settings: ModelSettings) -> llama_cpp.Llama:
171171 chat_handler = llama_cpp .llama_chat_format .MiniCPMv26ChatHandler (
172172 clip_model_path = settings .clip_model_path , verbose = settings .verbose
173173 )
174+ elif settings .chat_format == "gemma3" :
175+ assert settings .clip_model_path is not None , "clip model not found"
176+ if settings .hf_model_repo_id is not None :
177+ chat_handler = (
178+ llama_cpp .llama_chat_format .Gemma3ChatHandler .from_pretrained (
179+ repo_id = settings .hf_model_repo_id ,
180+ filename = settings .clip_model_path ,
181+ verbose = settings .verbose ,
182+ )
183+ )
184+ else :
185+ chat_handler = llama_cpp .llama_chat_format .Gemma3ChatHandler (
186+ clip_model_path = settings .clip_model_path , verbose = settings .verbose
187+ )
174188 elif settings .chat_format == "qwen2.5-vl" :
175189 assert settings .clip_model_path is not None , "clip model not found"
176190 if settings .hf_model_repo_id is not None :
You can’t perform that action at this time.
0 commit comments