Skip to content

Commit

Permalink
select largest tokenizer
Browse files Browse the repository at this point in the history
  • Loading branch information
horheynm committed Aug 8, 2023
1 parent a674414 commit 7a7df94
Show file tree
Hide file tree
Showing 5 changed files with 5 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ def route_input_to_bucket(
:param pipelines: Different buckets to be used
:return: The correct Pipeline object (or Bucket) to route input to
"""
tokenizer = pipelines[0].tokenizer
tokenizer = pipelines[-1].tokenizer
tokens = tokenizer(
input_schema.inputs,
add_special_tokens=True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -493,7 +493,7 @@ def route_input_to_bucket(
:param pipelines: Different buckets to be used
:return: The correct Pipeline object (or Bucket) to route input to
"""
tokenizer = pipelines[0].tokenizer
tokenizer = pipelines[-1].tokenizer
tokens = tokenizer(
" ".join((input_schema.context, input_schema.question)),
add_special_tokens=True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ def route_input_to_bucket(
:param pipelines: Different buckets to be used
:return: The correct Pipeline object (or Bucket) to route input to
"""
tokenizer = pipelines[0].tokenizer
tokenizer = pipelines[-1].tokenizer
tokens = tokenizer(
input_schema.sequences,
add_special_tokens=True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ def route_input_to_bucket(
:param pipelines: Different buckets to be used
:return: The correct Pipeline object (or Bucket) to route input to
"""
tokenizer = pipelines[0].tokenizer
tokenizer = pipelines[-1].tokenizer
tokens = tokenizer(
input_schema.inputs,
add_special_tokens=True,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ def route_input_to_bucket(
:param pipelines: Different buckets to be used
:return: The correct Pipeline object (or Bucket) to route input to
"""
tokenizer = pipelines[0].tokenizer
tokenizer = pipelines[-1].tokenizer
tokens = tokenizer(
input_schema.sequences,
add_special_tokens=True,
Expand Down

0 comments on commit 7a7df94

Please sign in to comment.