Skip to content

Commit

Permalink
Update sample Colab notebooks following TF importer changes. (#13311)
Browse files Browse the repository at this point in the history
Tentatively fixes #13148

Major changes:

* Output MLIR from MHLO to StableHLO (outputs changed automatically so
updated some comments)
* Update import API usage (remove `import_extra_args` and
`decode('utf-8')`)
* variables_and_state
  * don't call one exported function from another exported function
* commented out broken Python runtime code for now (C runtime code still
works)
  • Loading branch information
ScottTodd authored Apr 26, 2023
1 parent ebf8490 commit 5cd6e1d
Show file tree
Hide file tree
Showing 6 changed files with 317 additions and 320 deletions.
151 changes: 70 additions & 81 deletions samples/colab/edge_detection.ipynb

Large diffs are not rendered by default.

91 changes: 59 additions & 32 deletions samples/colab/mnist_training.ipynb

Large diffs are not rendered by default.

102 changes: 43 additions & 59 deletions samples/colab/resnet.ipynb

Large diffs are not rendered by default.

73 changes: 38 additions & 35 deletions samples/colab/tensorflow_hub_import.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
"# See https://llvm.org/LICENSE.txt for license information.\n",
"# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception"
],
"execution_count": 1,
"execution_count": 10,
"outputs": []
},
{
Expand Down Expand Up @@ -79,7 +79,7 @@
"%%capture\n",
"!python -m pip install iree-compiler iree-runtime iree-tools-tf -f https://openxla.github.io/iree/pip-release-links.html"
],
"execution_count": 2,
"execution_count": 11,
"outputs": []
},
{
Expand All @@ -89,7 +89,7 @@
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "0e9e1cc3-c97f-4a5c-c980-a43897fc6703"
"outputId": "8d3bf1f1-1843-4fe9-80e0-a9fc5b194778"
},
"source": [
"import os\n",
Expand All @@ -107,13 +107,13 @@
"os.makedirs(ARTIFACTS_DIR, exist_ok=True)\n",
"print(f\"Using artifacts directory '{ARTIFACTS_DIR}'\")"
],
"execution_count": 3,
"execution_count": 12,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"TensorFlow version: 2.8.2\n",
"TensorFlow version: 2.12.0\n",
"Using artifacts directory '/tmp/iree/colab_artifacts'\n"
]
}
Expand All @@ -139,7 +139,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "7fd0vmnloZo9",
"outputId": "f3c075d8-0422-40b2-c9f8-bdfd865fd4c2"
"outputId": "dabea3a2-d312-4729-c947-b24216a6c25b"
},
"source": [
"#@title Download the pretrained model\n",
Expand All @@ -150,7 +150,7 @@
"model_path = hub.resolve(HUB_PATH)\n",
"print(f\"Downloaded model from tfhub to path: '{model_path}'\")"
],
"execution_count": 4,
"execution_count": 13,
"outputs": [
{
"output_type": "stream",
Expand Down Expand Up @@ -184,7 +184,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "qiO66oEYQmsd",
"outputId": "95950642-7225-4378-f3d6-ffeb8aedbcd3"
"outputId": "91f724db-01cd-4dd3-c55c-ba4431233cfa"
},
"source": [
"#@title Check for serving signatures\n",
Expand All @@ -201,7 +201,7 @@
"print(\"Checking for signature_defs using saved_model_cli:\\n\")\n",
"!saved_model_cli show --dir {model_path} --tag_set serve --signature_def serving_default"
],
"execution_count": 5,
"execution_count": 14,
"outputs": [
{
"output_type": "stream",
Expand All @@ -214,17 +214,24 @@
"\n",
"Checking for signature_defs using saved_model_cli:\n",
"\n",
"2023-04-26 17:12:32.367522: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n",
"Traceback (most recent call last):\n",
" File \"/usr/local/bin/saved_model_cli\", line 8, in <module>\n",
" sys.exit(main())\n",
" File \"/usr/local/lib/python3.7/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 1260, in main\n",
" args.func(args)\n",
" File \"/usr/local/lib/python3.7/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 745, in show\n",
" _show_inputs_outputs(args.dir, args.tag_set, args.signature_def)\n",
" File \"/usr/local/lib/python3.7/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 154, in _show_inputs_outputs\n",
" meta_graph_def, signature_def_key)\n",
" File \"/usr/local/lib/python3.7/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 115, in _get_inputs_tensor_info_from_meta_graph_def\n",
" f'Could not find signature \"{signature_def_key}\". Please choose from: '\n",
" File \"/usr/local/lib/python3.9/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 1284, in main\n",
" app.run(smcli_main)\n",
" File \"/usr/local/lib/python3.9/dist-packages/absl/app.py\", line 308, in run\n",
" _run_main(main, args)\n",
" File \"/usr/local/lib/python3.9/dist-packages/absl/app.py\", line 254, in _run_main\n",
" sys.exit(main(argv))\n",
" File \"/usr/local/lib/python3.9/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 1282, in smcli_main\n",
" args.func()\n",
" File \"/usr/local/lib/python3.9/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 961, in show\n",
" _show_inputs_outputs(\n",
" File \"/usr/local/lib/python3.9/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 345, in _show_inputs_outputs\n",
" inputs_tensor_info = _get_inputs_tensor_info_from_meta_graph_def(\n",
" File \"/usr/local/lib/python3.9/dist-packages/tensorflow/python/tools/saved_model_cli.py\", line 306, in _get_inputs_tensor_info_from_meta_graph_def\n",
" raise ValueError(\n",
"ValueError: Could not find signature \"serving_default\". Please choose from: __saved_model_init_op\n"
]
}
Expand All @@ -248,7 +255,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "OlDG2OuqOBGC",
"outputId": "8296a409-c630-4d03-c81c-58aa95cc0f77"
"outputId": "c25d0e59-3a42-4f43-804c-c607eb9fc84c"
},
"source": [
"#@title Look up input signatures to use when exporting\n",
Expand All @@ -260,7 +267,7 @@
"!saved_model_cli show --dir {model_path} --all \\\n",
" 2> /dev/null | grep \"inputs: TensorSpec\" | tail -n 1"
],
"execution_count": 6,
"execution_count": 15,
"outputs": [
{
"output_type": "stream",
Expand All @@ -278,7 +285,7 @@
"base_uri": "https://localhost:8080/"
},
"id": "gnb4HhMmkgiT",
"outputId": "f8cf1fe0-0bc4-4c2f-9622-325c15cb923c"
"outputId": "d5ff3d4a-0483-476e-af6e-0b3c827d4938"
},
"source": [
"#@title Re-export the model using the known signature\n",
Expand Down Expand Up @@ -308,7 +315,7 @@
"print(\"Checking for signature_defs using saved_model_cli:\\n\")\n",
"!saved_model_cli show --dir {resaved_model_path} --tag_set serve --signature_def serving_default"
],
"execution_count": 7,
"execution_count": 16,
"outputs": [
{
"output_type": "stream",
Expand All @@ -323,6 +330,7 @@
"\n",
"Checking for signature_defs using saved_model_cli:\n",
"\n",
"2023-04-26 17:13:06.873761: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n",
"The given SavedModel SignatureDef contains the following input(s):\n",
" inputs['inputs'] tensor_info:\n",
" dtype: DT_FLOAT\n",
Expand Down Expand Up @@ -354,16 +362,15 @@
"base_uri": "https://localhost:8080/"
},
"id": "GLkjlHE5mdmg",
"outputId": "f6a7718a-456b-4eb1-ea0f-3465e658f3c9"
"outputId": "c67419f8-94de-4335-ddbc-f062b7d2e48a"
},
"source": [
"#@title Import from SavedModel\n",
"\n",
"# The main output file from compilation is a .vmfb \"VM FlatBuffer\". This file\n",
"# can used to run the compiled model with IREE's runtime.\n",
"output_file = os.path.join(ARTIFACTS_DIR, \"mobilenet_v2.vmfb\")\n",
"# As compilation runs, dump some intermediate .mlir files for future inspection.\n",
"tf_input = os.path.join(ARTIFACTS_DIR, \"mobilenet_v2_tf_input.mlir\")\n",
"# As compilation runs, dump an intermediate .mlir file for future inspection.\n",
"iree_input = os.path.join(ARTIFACTS_DIR, \"mobilenet_v2_iree_input.mlir\")\n",
"\n",
"# Since our SavedModel uses signature defs, we use `saved_model_tags` with\n",
Expand All @@ -377,25 +384,22 @@
"tfc.compile_saved_model(\n",
" resaved_model_path,\n",
" output_file=output_file,\n",
" save_temp_tf_input=tf_input,\n",
" save_temp_iree_input=iree_input,\n",
" import_type=\"SIGNATURE_DEF\",\n",
" saved_model_tags=set([\"serve\"]),\n",
" target_backends=[\"vmvx\"])\n",
"clear_output() # Skip over TensorFlow's output.\n",
"\n",
"print(f\"Saved compiled output to '{output_file}'\")\n",
"print(f\"Saved tf_input to '{tf_input}'\")\n",
"print(f\"Saved iree_input to '{iree_input}'\")"
],
"execution_count": 8,
"execution_count": 17,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Saved compiled output to '/tmp/iree/colab_artifacts/mobilenet_v2.vmfb'\n",
"Saved tf_input to '/tmp/iree/colab_artifacts/mobilenet_v2_tf_input.mlir'\n",
"Saved iree_input to '/tmp/iree/colab_artifacts/mobilenet_v2_iree_input.mlir'\n"
]
}
Expand All @@ -406,10 +410,10 @@
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 104
"height": 86
},
"id": "IEJAzOb5qASI",
"outputId": "d5a6ec5e-a583-47c9-b1eb-3bd81c02b50f"
"outputId": "9a29aa51-b99d-4acd-dae8-0d97cf9786e6"
},
"source": [
"#@title Download compilation artifacts\n",
Expand All @@ -427,16 +431,15 @@
"except ImportError:\n",
" print(\"Missing google_colab Python package, can't download files\")"
],
"execution_count": 9,
"execution_count": 18,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Zipping '/tmp/iree/colab_artifacts' to '/tmp/mobilenet_colab_artifacts.zip' for download...\n",
" adding: mobilenet_v2.vmfb (deflated 8%)\n",
" adding: mobilenet_v2_tf_input.mlir (deflated 8%)\n",
" adding: mobilenet_v2_iree_input.mlir (deflated 8%)\n",
" adding: mobilenet_v2_iree_input.mlir (deflated 46%)\n",
"Downloading the artifacts zip file...\n"
]
},
Expand Down Expand Up @@ -499,12 +502,12 @@
"<IPython.core.display.Javascript object>"
],
"application/javascript": [
"download(\"download_e515e31a-0819-4066-9236-710263bad3e5\", \"mobilenet_colab_artifacts.zip\", 39551293)"
"download(\"download_18545900-47df-4250-9a14-8453ca4b6fc2\", \"mobilenet_colab_artifacts.zip\", 41434352)"
]
},
"metadata": {}
}
]
}
]
}
}
Loading

0 comments on commit 5cd6e1d

Please sign in to comment.