diff --git a/ram/data_utils.py b/ram/data_utils.py index b429f34..f017acc 100644 --- a/ram/data_utils.py +++ b/ram/data_utils.py @@ -481,7 +481,7 @@ def wrap_text_with_chat_template( Args: text (str): text of the user prompt tokenizer (AutoTokenizer): Instantiated tokenizer. - Example: AutoTokenizer.from_pretrained("/fsx-ram/shared/llama2_hf/llama-2-7b-chat-hf/") + Example: AutoTokenizer.from_pretrained("") system_prompt (str): added following the model if tokenizer supports that, otherwise the error will be returned. Default is None. """