We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 46a6256 commit b5c182eCopy full SHA for b5c182e
1 file changed
llama_cpp/llama_chat_format.py
@@ -4166,8 +4166,7 @@ def gguf_function_calling(
4166
4167
function_calling_template = None
4168
if hasattr(llama, 'model_path'):
4169
- from llama_cpp.llama import Llama
4170
- metadata = Llama.metadata
+ metadata = llama.metadata
4171
if metadata and "tokenizer.chat_template" in metadata:
4172
function_calling_template = metadata["tokenizer.chat_template"]
4173
0 commit comments