From e36a20cc29f9289b470e3cf0dd67e3eef0ff1160 Mon Sep 17 00:00:00 2001 From: Ilia Kulikov Date: Thu, 26 Sep 2024 14:51:32 -0700 Subject: [PATCH] Update ram/data_utils.py Co-authored-by: Golovneva <103262907+Golovneva@users.noreply.github.com> --- ram/data_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ram/data_utils.py b/ram/data_utils.py index b429f34..f017acc 100644 --- a/ram/data_utils.py +++ b/ram/data_utils.py @@ -481,7 +481,7 @@ def wrap_text_with_chat_template( Args: text (str): text of the user prompt tokenizer (AutoTokenizer): Instantiated tokenizer. - Example: AutoTokenizer.from_pretrained("/fsx-ram/shared/llama2_hf/llama-2-7b-chat-hf/") + Example: AutoTokenizer.from_pretrained("") system_prompt (str): added following the model if tokenizer supports that, otherwise the error will be returned. Default is None. """