We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f9471b6 commit 6f59c3eCopy full SHA for 6f59c3e
llama_cpp/llama_chat_format.py
@@ -4020,9 +4020,9 @@ def gguf_function_calling(
4020
function_calling_template = None
4021
if hasattr(llama, 'model_path'):
4022
from llama_cpp.llama import Llama
4023
- metadata = Llama.get_metadata(llama.model_path)
4024
- if metadata and "tokenizer.chat.template" in metadata:
4025
- function_calling_template = metadata["tokenizer.chat.template"]
+ metadata = Llama.metadata
+ if metadata and "tokenizer.chat_template" in metadata:
+ function_calling_template = metadata["tokenizer.chat_template"]
4026
4027
4028
function_calling_template = (
0 commit comments