We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 22a16bd commit b2ca084Copy full SHA for b2ca084
llama_cpp/llama_chat_format.py
@@ -4158,9 +4158,9 @@ def gguf_function_calling(
4158
function_calling_template = None
4159
if hasattr(llama, 'model_path'):
4160
from llama_cpp.llama import Llama
4161
- metadata = Llama.get_metadata(llama.model_path)
4162
- if metadata and "tokenizer.chat.template" in metadata:
4163
- function_calling_template = metadata["tokenizer.chat.template"]
+ metadata = Llama.metadata
+ if metadata and "tokenizer.chat_template" in metadata:
+ function_calling_template = metadata["tokenizer.chat_template"]
4164
4165
4166
function_calling_template = (
0 commit comments