Skip to content

Commit

Permalink
Skip ConstantFolding for ops producing an optional type output
Browse files Browse the repository at this point in the history
  • Loading branch information
hariharans29 committed Jun 13, 2022
1 parent 63c483a commit 98fff18
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
6 changes: 3 additions & 3 deletions onnxruntime/core/optimizer/constant_folding.cc
Original file line number Diff line number Diff line change
Expand Up @@ -196,10 +196,10 @@ Status ConstantFolding::ApplyImpl(Graph& graph, bool& modified, int graph_level,
ORT_ENFORCE(fetches.size() == node->OutputDefs().size());
converted_to_constant = true;
for (size_t fetch_idx = 0; fetch_idx < fetches.size(); ++fetch_idx) {
OrtValue& ort_value = fetches[fetch_idx];
auto* constant_arg_out = node->MutableOutputDefs()[fetch_idx];
// XXX: Add support for SparseTensors outputs when we have sparse outputs
if (!ort_value.IsTensor()) {
LOGS(logger, WARNING) << "Unsupported output type of " << ort_value.Type()
if (!utils::HasTensorType(*constant_arg_out->TypeAsProto())) {
LOGS(logger, INFO) << "Unsupported output type of " << constant_arg_out->Type()
<< ". Can't constant fold " << node->OpType() << " node '" << node->Name() << "'";
converted_to_constant = false;
break;
Expand Down
6 changes: 5 additions & 1 deletion onnxruntime/core/providers/cpu/optional/optional_ops.cc
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,11 @@ Status Optional::Compute(OpKernelContext* ctx) const {

} else { // No input was provided - we use the type proto to construct the output OrtValue

CheckValidTypeProto(*type_proto_);
if (!CheckValidTypeProto(*type_proto_)) {
return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT,
"The TypeProto attribute in the Optional op ",
"can only be of type(tensor) or (seq(tensor))");
}

// type is either Tensor or TensorSeq (we have validated this already in CheckValidTypeProto())
if (type_proto_->has_tensor_type()) {
Expand Down

0 comments on commit 98fff18

Please sign in to comment.