We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 9ae7a7c commit 46a6256Copy full SHA for 46a6256
llama_cpp/llama_chat_format.py
@@ -4167,9 +4167,9 @@ def gguf_function_calling(
4167
function_calling_template = None
4168
if hasattr(llama, 'model_path'):
4169
from llama_cpp.llama import Llama
4170
- metadata = Llama.get_metadata(llama.model_path)
4171
- if metadata and "tokenizer.chat.template" in metadata:
4172
- function_calling_template = metadata["tokenizer.chat.template"]
+ metadata = Llama.metadata
+ if metadata and "tokenizer.chat_template" in metadata:
+ function_calling_template = metadata["tokenizer.chat_template"]
4173
4174
4175
function_calling_template = (
0 commit comments