Skip to content

Commit

Permalink
[Transformers] Add an argument trust_remote_code on transformers pipe…
Browse files Browse the repository at this point in the history
…line initialization. (#1173)
  • Loading branch information
dbogunowicz committed Aug 8, 2023
1 parent ffeb98f commit c8f6b0e
Showing 1 changed file with 9 additions and 1 deletion.
10 changes: 9 additions & 1 deletion src/deepsparse/transformers/pipelines/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,16 +66,22 @@ class TransformersPipeline(Pipeline, Bucketable):
If a list of lengths is provided, then for each length, a model and
tokenizer will be compiled capable of handling that sequence length
(also known as a bucket). Default is 128
:param trust_remote_code: if True, will trust remote code. This option
should only be set to `True` for repositories you trust and in which
you have read the code, as it will execute possibly unsafe code
on your local machine. Default is False
"""

def __init__(
self,
*,
sequence_length: Union[int, List[int]] = 128,
trust_remote_code: bool = False,
**kwargs,
):

self._sequence_length = sequence_length
self._trust_remote_code = trust_remote_code

self.config = None
self.tokenizer = None
Expand Down Expand Up @@ -111,7 +117,9 @@ def setup_onnx_file_path(self) -> str:
)

self.config = AutoConfig.from_pretrained(
config_path, finetuning_task=self.task if hasattr(self, "task") else None
config_path,
trust_remote_code=self._trust_remote_code,
finetuning_task=self.task if hasattr(self, "task") else None,
)
self.tokenizer = AutoTokenizer.from_pretrained(
tokenizer_path, model_max_length=self.sequence_length
Expand Down

0 comments on commit c8f6b0e

Please sign in to comment.