From ace6f3960d0597619ff39e3b9f560e4534408071 Mon Sep 17 00:00:00 2001 From: Max Hniebergall Date: Wed, 12 Jul 2023 11:08:02 -0400 Subject: [PATCH] [NLP] Support the different mask tokens used by NLP models for Fill Mask --- specification/ml/_types/inference.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/specification/ml/_types/inference.ts b/specification/ml/_types/inference.ts index 0959b46366..c75a3430de 100644 --- a/specification/ml/_types/inference.ts +++ b/specification/ml/_types/inference.ts @@ -265,6 +265,12 @@ export class NerInferenceOptions { /** Fill mask inference options */ export class FillMaskInferenceOptions { + /** The string/token which will be removed from incoming documents and replaced with the inference prediction(s). + * In a response, this field contains the mask token for the specified model/tokenizer. Each model and tokenizer + * has a predefined mask token which cannot be changed. Thus, it is recommended not to set this value in requests. + * However, if this field is present in a request, its value must match the predefined value for that model/tokenizer, + * otherwise the request will fail. */ + mask_token?: string /** Specifies the number of top class predictions to return. Defaults to 0. */ num_top_classes?: integer /** The tokenization options to update when inferring */