From 24c8c90efdb7cc51381af5ce0205b23567c3cd21 Mon Sep 17 00:00:00 2001 From: Karanbir Chahal Date: Fri, 12 Jul 2019 14:41:21 +0530 Subject: [PATCH] Pruning Demo added (#964) --- demo/shapes_pruning.ipynb | 2679 +++++++++++++++++++++++++++++++++++++ 1 file changed, 2679 insertions(+) create mode 100644 demo/shapes_pruning.ipynb diff --git a/demo/shapes_pruning.ipynb b/demo/shapes_pruning.ipynb new file mode 100644 index 000000000..ce143da15 --- /dev/null +++ b/demo/shapes_pruning.ipynb @@ -0,0 +1,2679 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "shapes-pruning.ipynb", + "version": "0.3.2", + "provenance": [], + "collapsed_sections": [ + "LUQbRTRocPNN", + "aiLvxXRpDbiq", + "xnr8tbDz7WjS", + "5DC0K7tW7d-M", + "BI2ncK7kATEh", + "hbzY16ocEdrg", + "If8z4OZfDHmC", + "mOo-0LGFEAmc", + "bbCBInqHFUg7", + "tAn3omCjTFGI", + "xs_KL1R1aGSA", + "NVjPYFN1Pz6D", + "tHq9j1HENMMw" + ] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "268x1mG64rCy", + "colab_type": "text" + }, + "source": [ + "# Installation" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "VNvKG2TF3Y0B", + "colab_type": "code", + "outputId": "62ad88fa-0b36-4ebe-f6f6-77e4b9c4e56f", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "%%writefile setup.sh\n", + "\n", + "# maskrcnn_benchmark and coco api dependencies\n", + "pip install ninja yacs cython matplotlib tqdm opencv-python\n", + "\n", + "# follow PyTorch installation in https://pytorch.org/get-started/locally/\n", + "# we give the instructions for CUDA 9.0\n", + "pip install -c pytorch pytorch-nightly torchvision cudatoolkit=9.0\n", + "\n", + "\n", + "git clone https://github.com/cocodataset/cocoapi.git\n", + "cd cocoapi/PythonAPI\n", + "python setup.py build_ext install\n", + "cd ../../\n", + "\n", + "# install apex\n", + "rm -rf apex\n", + "git clone https://github.com/NVIDIA/apex.git\n", + "cd apex\n", + "git pull\n", + "python setup.py install --cuda_ext --cpp_ext\n", + "cd ../\n", + "\n", + "# install PyTorch Detection\n", + "git clone https://github.com/facebookresearch/maskrcnn-benchmark.git\n", + "cd maskrcnn-benchmark\n", + "\n", + "# the following will install the lib with\n", + "# symbolic links, so that you can modify\n", + "# the files if you want and won't need to\n", + "# re-build it\n", + "python setup.py build develop\n" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Writing setup.sh\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "NYzsp3Ng3mOy", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!sh setup.sh" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1uoPMGDl49Wk", + "colab_type": "text" + }, + "source": [ + "### Checking our Installation\n", + "\n", + "If a module not found error appears, restart the runtime. The libraries should be loaded after restarting" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "3q-n76S95KA3", + "colab_type": "code", + "colab": {} + }, + "source": [ + "import maskrcnn_benchmark" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "aiLvxXRpDbiq", + "colab_type": "text" + }, + "source": [ + "# Imports" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "kLzesfGNX9O2", + "colab_type": "code", + "outputId": "62004ff6-534f-4181-ca2c-d166e34f97a9", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + } + }, + "source": [ + "import torch\n", + "from torch import nn\n", + "import torch.nn.functional as Fx\n", + "import datetime\n", + "\n", + "# Set up custom environment before nearly anything else is imported\n", + "# NOTE: this should be the first import (no not reorder)\n", + "from maskrcnn_benchmark.utils.env import setup_environment # noqa F401 isort:skip\n", + "\n", + "from maskrcnn_benchmark.data.build import *\n", + "from maskrcnn_benchmark.structures.bounding_box import BoxList\n", + "from maskrcnn_benchmark.structures.segmentation_mask import SegmentationMask\n", + "from maskrcnn_benchmark.modeling.detector import build_detection_model\n", + "from maskrcnn_benchmark.utils.checkpoint import DetectronCheckpointer\n", + "from maskrcnn_benchmark.structures.image_list import to_image_list\n", + "from maskrcnn_benchmark.modeling.roi_heads.mask_head.inference import Masker\n", + "from maskrcnn_benchmark import layers as L\n", + "from maskrcnn_benchmark.utils import cv2_util\n", + "from maskrcnn_benchmark.utils.miscellaneous import mkdir\n", + "from maskrcnn_benchmark.utils.logger import setup_logger\n", + "from maskrcnn_benchmark.utils.comm import synchronize, get_rank\n", + "from maskrcnn_benchmark.config import cfg\n", + "from maskrcnn_benchmark.config import cfg\n", + "from maskrcnn_benchmark.data import make_data_loader\n", + "from maskrcnn_benchmark.solver import make_lr_scheduler\n", + "from maskrcnn_benchmark.solver import make_optimizer\n", + "from maskrcnn_benchmark.engine.inference import inference\n", + "from maskrcnn_benchmark.engine.trainer import do_train\n", + "from maskrcnn_benchmark.modeling.detector import build_detection_model\n", + "from maskrcnn_benchmark.utils.checkpoint import DetectronCheckpointer\n", + "from maskrcnn_benchmark.utils.collect_env import collect_env_info\n", + "from maskrcnn_benchmark.utils.comm import synchronize, get_rank\n", + "from maskrcnn_benchmark.utils.imports import import_file\n", + "from maskrcnn_benchmark.data.datasets.evaluation import evaluate\n", + "from maskrcnn_benchmark.utils.comm import is_main_process, get_world_size\n", + "from maskrcnn_benchmark.utils.comm import all_gather\n", + "from maskrcnn_benchmark.utils.timer import Timer, get_time_str\n", + "from maskrcnn_benchmark.engine.inference import compute_on_dataset, _accumulate_predictions_from_multiple_gpus\n", + "from maskrcnn_benchmark.data.datasets.evaluation.coco import coco_evaluation\n", + "from maskrcnn_benchmark.modeling.utils import cat\n", + "from maskrcnn_benchmark.structures.image_list import to_image_list\n", + "\n", + "from maskrcnn_benchmark.modeling.backbone import build_backbone\n", + "from maskrcnn_benchmark.modeling.rpn.rpn import build_rpn\n", + "from maskrcnn_benchmark.modeling.roi_heads.roi_heads import build_roi_heads\n", + "from maskrcnn_benchmark.modeling.make_layers import make_conv3x3\n", + "from maskrcnn_benchmark.structures.image_list import to_image_list\n", + "from maskrcnn_benchmark.modeling.backbone import build_backbone\n", + "from maskrcnn_benchmark.modeling.rpn.rpn import build_rpn\n", + "from maskrcnn_benchmark.modeling.roi_heads.roi_heads import build_roi_heads\n", + "\n", + "import torch.distributed as dist\n", + "\n", + "from maskrcnn_benchmark.utils.comm import get_world_size\n", + "from maskrcnn_benchmark.utils.metric_logger import MetricLogger\n", + "\n", + "\n", + "from PIL import Image\n", + "import json\n", + "import logging\n", + "import torch\n", + "import numpy as np\n", + "import skimage.draw as draw\n", + "import tempfile\n", + "from pycocotools.coco import COCO\n", + "import os\n", + "import sys\n", + "import random\n", + "import math\n", + "import re\n", + "import time\n", + "import cv2\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from tqdm import tqdm\n", + "\n", + "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n", + "from torchvision import transforms as T\n", + "from torchvision.transforms import functional as F\n", + "from google.colab.patches import cv2_imshow\n", + "\n", + " \n", + "logger_dir = 'log'\n", + "\n", + "if logger_dir:\n", + " mkdir(logger_dir)\n", + "\n", + "logger = setup_logger(\"maskrcnn_benchmark\", logger_dir, get_rank())\n", + "logger.info(\"Using {} GPUs\".format(1))\n" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "2019-07-12 03:39:26,768 maskrcnn_benchmark INFO: Using 1 GPUs\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DvU-NYKJ3uzb", + "colab_type": "text" + }, + "source": [ + "# Loading Our Dataset\n", + "\n", + "To train a network using the MaskRCNN repo, we first need to define our dataset. The dataset needs to a class of type object and should extend 3 things. \n", + "\n", + "1. **__getitem__(self, idx)**: This function should return a PIL Image, a BoxList and the idx. The Boxlist is an abstraction for our bounding boxes, segmentation masks, class lables and also people keypoints. Please check ABSTRACTIONS.ms for more details on this. \n", + "\n", + "2. **__len__()**: returns the length of the dataset. \n", + "\n", + "3. **get_img_info(self, idx)**: Return a dict of img info with the fields \"height\" and \"width\" filled in with the idx's image's height and width.\n", + "\n", + "4. **self.coco**: Should be a variable that holds the COCO object for your annotations so that you can perform evaluations of your dataset. \n", + "\n", + "5. **self.id_to_img_map**: Is a dictionary that maps the ids to coco image ids. Almost in all cases just map the idxs to idxs. This is simply a requirement for the coco evaluation. \n", + "\n", + "6. **self.contiguous_category_id_to_json_id**: Another requirement for coco evaluation. It maps the categpry to json category id. Again, for almost all purposes category id and json id should be same. \n", + "\n", + "Given below is a sample fo a dataset. It is the Shape Dataset taken from the Matterport Mask RCNN Repo. One important detail is that the constructor if the dataset should have the variable transforms that is set inside the constructor. It should thgen be used inside **__get__item(idx)** as shown below." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "xnr8tbDz7WjS", + "colab_type": "text" + }, + "source": [ + "## Helper Functions" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "tb_5MERf7c_1", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Helper Functions for the Shapes Dataset\n", + "\n", + "def non_max_suppression(boxes, scores, threshold):\n", + " \"\"\"Performs non-maximum suppression and returns indices of kept boxes.\n", + " boxes: [N, (y1, x1, y2, x2)]. Notice that (y2, x2) lays outside the box.\n", + " scores: 1-D array of box scores.\n", + " threshold: Float. IoU threshold to use for filtering.\n", + " \"\"\"\n", + " assert boxes.shape[0] > 0\n", + " if boxes.dtype.kind != \"f\":\n", + " boxes = boxes.astype(np.float32)\n", + "\n", + " # Compute box areas\n", + " y1 = boxes[:, 0]\n", + " x1 = boxes[:, 1]\n", + " y2 = boxes[:, 2]\n", + " x2 = boxes[:, 3]\n", + " area = (y2 - y1) * (x2 - x1)\n", + "\n", + " # Get indicies of boxes sorted by scores (highest first)\n", + " ixs = scores.argsort()[::-1]\n", + "\n", + " pick = []\n", + " while len(ixs) > 0:\n", + " # Pick top box and add its index to the list\n", + " i = ixs[0]\n", + " pick.append(i)\n", + " # Compute IoU of the picked box with the rest\n", + " iou = compute_iou(boxes[i], boxes[ixs[1:]], area[i], area[ixs[1:]])\n", + " # Identify boxes with IoU over the threshold. This\n", + " # returns indices into ixs[1:], so add 1 to get\n", + " # indices into ixs.\n", + " remove_ixs = np.where(iou > threshold)[0] + 1\n", + " # Remove indices of the picked and overlapped boxes.\n", + " ixs = np.delete(ixs, remove_ixs)\n", + " ixs = np.delete(ixs, 0)\n", + " return np.array(pick, dtype=np.int32)\n", + "\n", + "def compute_iou(box, boxes, box_area, boxes_area):\n", + " \"\"\"Calculates IoU of the given box with the array of the given boxes.\n", + " box: 1D vector [y1, x1, y2, x2]\n", + " boxes: [boxes_count, (y1, x1, y2, x2)]\n", + " box_area: float. the area of 'box'\n", + " boxes_area: array of length boxes_count.\n", + " Note: the areas are passed in rather than calculated here for\n", + " efficiency. Calculate once in the caller to avoid duplicate work.\n", + " \"\"\"\n", + " # Calculate intersection areas\n", + " y1 = np.maximum(box[0], boxes[:, 0])\n", + " y2 = np.minimum(box[2], boxes[:, 2])\n", + " x1 = np.maximum(box[1], boxes[:, 1])\n", + " x2 = np.minimum(box[3], boxes[:, 3])\n", + " intersection = np.maximum(x2 - x1, 0) * np.maximum(y2 - y1, 0)\n", + " union = box_area + boxes_area[:] - intersection[:]\n", + " iou = intersection / union\n", + " return iou" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5DC0K7tW7d-M", + "colab_type": "text" + }, + "source": [ + "## Dataset" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "WhG_Tu9ELAsj", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class ShapeDataset(object):\n", + " \n", + " def __init__(self, num_examples, transforms=None):\n", + " \n", + " self.height = 128\n", + " self.width = 128\n", + " \n", + " self.num_examples = num_examples\n", + " self.transforms = transforms # IMPORTANT, DON'T MISS\n", + " self.image_info = []\n", + " self.logger = logging.getLogger(__name__)\n", + " \n", + " # Class Names: Note that the ids start fromm 1 not 0. This repo uses the 0 index for background\n", + " self.class_names = {\"square\": 1, \"circle\": 2, \"triangle\": 3}\n", + " \n", + " # Add images\n", + " # Generate random specifications of images (i.e. color and\n", + " # list of shapes sizes and locations). This is more compact than\n", + " # actual images. Images are generated on the fly in load_image().\n", + " for i in range(num_examples):\n", + " bg_color, shapes = self.random_image(self.height, self.width)\n", + " self.image_info.append({ \"path\":None,\n", + " \"width\": self.width, \"height\": self.height,\n", + " \"bg_color\": bg_color, \"shapes\": shapes\n", + " })\n", + " \n", + " # Fills in the self.coco varibale for evaluation.\n", + " self.get_gt()\n", + " \n", + " # Variables needed for coco mAP evaluation\n", + " self.id_to_img_map = {}\n", + " for i, _ in enumerate(self.image_info):\n", + " self.id_to_img_map[i] = i\n", + "\n", + " self.contiguous_category_id_to_json_id = { 0:0 ,1:1, 2:2, 3:3 }\n", + " \n", + "\n", + " def random_shape(self, height, width):\n", + " \"\"\"Generates specifications of a random shape that lies within\n", + " the given height and width boundaries.\n", + " Returns a tuple of three valus:\n", + " * The shape name (square, circle, ...)\n", + " * Shape color: a tuple of 3 values, RGB.\n", + " * Shape dimensions: A tuple of values that define the shape size\n", + " and location. Differs per shape type.\n", + " \"\"\"\n", + " # Shape\n", + " shape = random.choice([\"square\", \"circle\", \"triangle\"])\n", + " # Color\n", + " color = tuple([random.randint(0, 255) for _ in range(3)])\n", + " # Center x, y\n", + " buffer = 20\n", + " y = random.randint(buffer, height - buffer - 1)\n", + " x = random.randint(buffer, width - buffer - 1)\n", + " # Size\n", + " s = random.randint(buffer, height//4)\n", + " return shape, color, (x, y, s)\n", + "\n", + " def random_image(self, height, width):\n", + " \"\"\"Creates random specifications of an image with multiple shapes.\n", + " Returns the background color of the image and a list of shape\n", + " specifications that can be used to draw the image.\n", + " \"\"\"\n", + " # Pick random background color\n", + " bg_color = np.array([random.randint(0, 255) for _ in range(3)])\n", + " # Generate a few random shapes and record their\n", + " # bounding boxes\n", + " shapes = []\n", + " boxes = []\n", + " N = random.randint(1, 4)\n", + " labels = {}\n", + " for _ in range(N):\n", + " shape, color, dims = self.random_shape(height, width)\n", + " shapes.append((shape, color, dims))\n", + " x, y, s = dims\n", + " boxes.append([y-s, x-s, y+s, x+s])\n", + "\n", + " # Apply non-max suppression wit 0.3 threshold to avoid\n", + " # shapes covering each other\n", + " keep_ixs = non_max_suppression(np.array(boxes), np.arange(N), 0.3)\n", + " shapes = [s for i, s in enumerate(shapes) if i in keep_ixs]\n", + " \n", + " return bg_color, shapes\n", + " \n", + " \n", + " def draw_shape(self, image, shape, dims, color):\n", + " \"\"\"Draws a shape from the given specs.\"\"\"\n", + " # Get the center x, y and the size s\n", + " x, y, s = dims\n", + " if shape == 'square':\n", + " cv2.rectangle(image, (x-s, y-s), (x+s, y+s), color, -1)\n", + " elif shape == \"circle\":\n", + " cv2.circle(image, (x, y), s, color, -1)\n", + " elif shape == \"triangle\":\n", + " points = np.array([[(x, y-s),\n", + " (x-s/math.sin(math.radians(60)), y+s),\n", + " (x+s/math.sin(math.radians(60)), y+s),\n", + " ]], dtype=np.int32)\n", + " cv2.fillPoly(image, points, color)\n", + " return image, [ x-s, y-s, x+s, y+s]\n", + "\n", + "\n", + " def load_mask(self, image_id):\n", + " \"\"\"\n", + " Generates instance masks for shapes of the given image ID.\n", + " \"\"\"\n", + " info = self.image_info[image_id]\n", + " shapes = info['shapes']\n", + " count = len(shapes)\n", + " mask = np.zeros([info['height'], info['width'], count], dtype=np.uint8)\n", + " boxes = []\n", + " \n", + " for i, (shape, _, dims) in enumerate(info['shapes']):\n", + " mask[:, :, i:i+1], box = self.draw_shape( mask[:, :, i:i+1].copy(),\n", + " shape, dims, 1)\n", + " boxes.append(box)\n", + " \n", + " \n", + " # Handle occlusions\n", + " occlusion = np.logical_not(mask[:, :, -1]).astype(np.uint8)\n", + " for i in range(count-2, -1, -1):\n", + " mask[:, :, i] = mask[:, :, i] * occlusion\n", + " occlusion = np.logical_and(occlusion, np.logical_not(mask[:, :, i]))\n", + " \n", + " segmentation_mask = mask.copy()\n", + " segmentation_mask = np.expand_dims(np.sum(segmentation_mask, axis=2), axis=2)\n", + " \n", + " # Map class names to class IDs.\n", + " class_ids = np.array([self.class_names[s[0]] for s in shapes])\n", + " return segmentation_mask.astype(np.uint8), mask.astype(np.uint8), class_ids.astype(np.int32), boxes\n", + " \n", + " def load_image(self, image_id):\n", + " \"\"\"Generate an image from the specs of the given image ID.\n", + " Typically this function loads the image from a file, but\n", + " in this case it generates the image on the fly from the\n", + " specs in image_info.\n", + " \"\"\"\n", + " info = self.image_info[image_id]\n", + " bg_color = np.array(info['bg_color']).reshape([1, 1, 3])\n", + " image = np.ones([info['height'], info['width'], 3], dtype=np.uint8)\n", + " image = image * bg_color.astype(np.uint8)\n", + " for shape, color, dims in info['shapes']:\n", + " image, _ = self.draw_shape(image, shape, dims, color)\n", + " return image\n", + " \n", + " def __getitem__(self, idx):\n", + " \n", + " \"\"\"Generate an image from the specs of the given image ID.\n", + " Typically this function loads the image from a file, but\n", + " in this case it generates the image on the fly from the\n", + " specs in image_info.\n", + " \"\"\"\n", + " image = Image.fromarray(self.load_image(idx))\n", + " segmentation_mask, masks, labels, boxes = self.load_mask(idx)\n", + " \n", + " # create a BoxList from the boxes\n", + " boxlist = BoxList(boxes, image.size, mode=\"xyxy\")\n", + "\n", + " # add the labels to the boxlist\n", + " boxlist.add_field(\"labels\", torch.tensor(labels))\n", + "\n", + " # Add masks to the boxlist\n", + " masks = np.transpose(masks, (2,0,1))\n", + " masks = SegmentationMask(torch.tensor(masks), image.size, \"mask\")\n", + " boxlist.add_field(\"masks\", masks)\n", + " \n", + " # Add semantic segmentation masks to the boxlist for panoptic segmentation\n", + " segmentation_mask = np.transpose(segmentation_mask, (2,0,1))\n", + " seg_masks = SegmentationMask(torch.tensor(segmentation_mask), image.size, \"mask\")\n", + " boxlist.add_field(\"seg_masks\", seg_masks)\n", + " \n", + " # Important line! dont forget to add this\n", + " if self.transforms:\n", + " image, boxlist = self.transforms(image, boxlist)\n", + "\n", + " # return the image, the boxlist and the idx in your dataset\n", + " return image, boxlist, idx\n", + " \n", + " \n", + " def __len__(self):\n", + " return self.num_examples\n", + " \n", + "\n", + " def get_img_info(self, idx):\n", + " # get img_height and img_width. This is used if\n", + " # we want to split the batches according to the aspect ratio\n", + " # of the image, as it can be more efficient than loading the\n", + " # image from disk\n", + "\n", + " return {\"height\": self.height, \"width\": self.width}\n", + " \n", + " def get_gt(self):\n", + " # Prepares dataset for coco eval\n", + " \n", + " \n", + " images = []\n", + " annotations = []\n", + " results = []\n", + " \n", + " # Define categories\n", + " categories = [ {\"id\": 1, \"name\": \"square\"}, {\"id\": 2, \"name\": \"circle\"}, {\"id\": 3, \"name\": \"triangle\"}]\n", + "\n", + "\n", + " i = 1\n", + " ann_id = 0\n", + "\n", + " for img_id, d in enumerate(self.image_info):\n", + "\n", + " images.append( {\"id\": img_id, 'height': self.height, 'width': self.width } )\n", + "\n", + " for (shape, color, dims) in d['shapes']:\n", + " \n", + " if shape == \"square\":\n", + " category_id = 1\n", + " elif shape == \"circle\":\n", + " category_id = 2\n", + " elif shape == \"triangle\":\n", + " category_id = 3\n", + " \n", + " x, y, s = dims\n", + " bbox = [ x - s, y - s, x+s, y +s ] \n", + " area = (bbox[0] - bbox[2]) * (bbox[1] - bbox[3])\n", + " \n", + " # Format for COCOC\n", + " annotations.append( {\n", + " \"id\": int(ann_id),\n", + " \"category_id\": category_id,\n", + " \"image_id\": int(img_id),\n", + " \"area\" : float(area),\n", + " \"bbox\": [ float(bbox[0]), float(bbox[1]), float(bbox[2]) - float(bbox[0]) + 1, float(bbox[3]) - float(bbox[1]) + 1 ], # note that the bboxes are in x, y , width, height format\n", + " \"iscrowd\" : 0\n", + " } )\n", + "\n", + " ann_id += 1\n", + "\n", + " # Save ground truth file\n", + " \n", + " with open(\"tmp_gt.json\", \"w\") as f:\n", + " json.dump({\"images\": images, \"annotations\": annotations, \"categories\": categories }, f)\n", + "\n", + " # Load gt for coco eval\n", + " self.coco = COCO(\"tmp_gt.json\") \n", + " " + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "2hpTvuSp830x", + "colab_type": "text" + }, + "source": [ + "## Visualise Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BI2ncK7kATEh", + "colab_type": "text" + }, + "source": [ + "### Load" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "6nsO_MRUbBpk", + "colab_type": "code", + "outputId": "1515ea47-d4bb-4014-e1d3-46b80d221f18", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 87 + } + }, + "source": [ + "train_dt = ShapeDataset(100)\n", + "im, boxlist, idx = train_dt[0]" + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "stream", + "text": [ + "loading annotations into memory...\n", + "Done (t=0.00s)\n", + "creating index...\n", + "index created!\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "F9njOSX0AU5-", + "colab_type": "text" + }, + "source": [ + "### Display some sample Images" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "nMXB9sAW994F", + "colab_type": "code", + "outputId": "9687b5a9-e67f-48c9-b265-a5597c85358d", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 485 + } + }, + "source": [ + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " im, boxlist, idx = train_dt[i]\n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(im)\n", + "plt.show()\n", + " " + ], + "execution_count": 0, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3X+w3HV97/HnOz8gNQaSEJsbklSw\nTe1Qpy3MmYiDtR3T2ojU4NTxwjgSNXcynWKrxY7GMndw5o4zUhWrMy1OKkjoIEhRhsytbaEp1mlH\n0IARAhGIKJIYElGUXCyQkPf9Y7/HLiGHnLPf7+73s7vPx8yZ893Pfvfse7/nvM/rfD773T2RmUiS\npHLMarsASZL0fIazJEmFMZwlSSqM4SxJUmEMZ0mSCmM4S5JUmL6Fc0SsjYgHImJ3RGzq1/1I6i97\nWRq86MfrnCNiNvAg8PvAHuAbwIWZeX/jdyapb+xlqR39mjmvBnZn5sOZ+SxwA7CuT/clqX/sZakF\nc/r0dZcDj3Zd3gO8eqqdZ50wP2fPW9inUlTXacsO93S77+3r14/XeDh88AePZ+bLWi5jRr0MMG/u\ngnzpiaf0tShpmPy/Z37E04cOxkxu09pvz4jYCGwEmDXvZBaffXFbpeg4PrXpQE+3e9dHf7HhSsbL\ngdsufaTtGqaru5/nn7CYda/63y1XJJXjlp3/Z8a36dey9l5gZdflFdXYz2Xm5sycyMyJWXPn96kM\n1fG5TQf4XI/BPHl7Db3j9jI8v5/nzV0wsOKkUdWvcP4GsCoiTo+IE4ALgK19ui9J/WMvSy3oy7J2\nZh6OiPcA/wLMBq7OzPv6cV+S+sdeltrRt+ecM/PLwJf79fXVX00tSU9+HZ9/Hl72sjR4vkOYJEmF\nMZwlSSqM4aznqXuG9ot9XUnS9PguEQIMT0kqiTNnSZIKYzhrYPq1ZC5Jo8Zw1sAD04CWpBdnOEuS\nVBjDWZKkwni29hhrc3nZdw6TpKk5c5YkqTCG85jypCxJKpfhLElSYQxntcoZvCS9kCeEjRnDUJLK\n58xZkqTCGM5qnW/rKUnPZziPkdIDsPT6JGlQDGdJkgrjCWFjwBmpJA0XZ84qis8/S5LhLElScXoO\n54hYGRG3R8T9EXFfRLy3Gl8cEbdFxEPV50XNlauZchaq6bCfpbLUmTkfBt6fmWcAZwMXR8QZwCZg\nW2auArZVl6UZcXl74OxnqSA9h3Nm7svMu6vtg8AuYDmwDthS7bYFOL9ukZL6y36WytLI2doRcRpw\nJnAnsDQz91VXPQYsbeI+NDPOOtUr+1lqX+0TwiLipcAXgfdl5pPd12VmAjnF7TZGxPaI2H7k0FN1\ny1CXUQrmUXosw6CJfn760MEBVCqNtlrhHBFz6TTydZn5pWp4f0Qsq65fBhzzt2tmbs7MicycmDV3\nfp0yJDWgqX6eN3fBYAqWRlids7UDuArYlZlXdF21FVhfba8Hbum9PMmTwwbBfpbKUuc553OAdwD3\nRsSOauwvgY8CN0bEBuAR4G31SpQ0APazVJCewzkz/wOIKa5e0+vXVe+cXapX9rNUFt8hTEPDPz4k\njQvDWZKkwvhfqUaAM0pJGi3OnDVUPHNb0jgwnCVJKozhPOTGdRY5ro9b0njwOechZThJ0uhy5ixJ\nUmEMZw0tTw6TNKoM5yFkIEnSaDOcJUkqjOGsoedKgqRR49naQ8QQkqTx4MxZkqTCGM5Dwlnzi/PM\nbUmjxHCWJKkwhrNGirNnSaPAE8IKZ9hI0vhx5ixJUmEMZ40cTw6TNOwM54IZMJI0ngxnSZIKU/uE\nsIiYDWwH9mbmeRFxOnADcApwF/COzHy27v2ME2fMzZg8ju/66C+2XMnwsJ+lMjQxc34vsKvr8uXA\nJzPzV4AngA0N3IekwbCfpQLUCueIWAG8CfhsdTmA1wM3VbtsAc6vcx+SBsN+lspRd+b818AHgCPV\n5VOAn2Tm4eryHmD5sW4YERsjYntEbD9y6KmaZYwOl7Sb5zGdtkb6+elDB/tfqTTieg7niDgPOJCZ\nd/Vy+8zcnJkTmTkxa+78XsuQpsWXV724Jvt53twFDVcnjZ86J4SdA7w5Is4F5gEnAZ8CFkbEnOqv\n7RXA3vplSuoz+1kqSM8z58z8UGauyMzTgAuAf8vMtwO3A2+tdlsP3FK7yjHgzE5tsp+lsvTjdc4f\nBC6JiN10nrO6qg/3IWkw7GepBY3844vM/ArwlWr7YWB1E193HDhbHqzPbTrg656Pw36W2uc7hEmS\nVBjDWZKkwhjOLXJJux2efCepdIazJEmFMZwlSSqM4dwCl1XL4PdAUqkaeSmVZsaX8kiSXowzZ0mS\nCmM4S5JUGMNZkqTCGM6SJBXGcJYkqTCGsyRJhTGcJUkqjK9zltSam+/8QdslFO8trz617RLUAmfO\nkiQVxnCWJKkwhrMkSYUxnCVJKozhLElSYQxnSZIKYzhLklSYWuEcEQsj4qaI+HZE7IqI10TE4oi4\nLSIeqj4vaqrYNly0+mttlyANxDj0szQs6s6cPwX8c2b+GvCbwC5gE7AtM1cB26rLQ+ei1V/7eTB3\nb0sjbGT7WRo2PYdzRJwMvA64CiAzn83MnwDrgC3VbluA8+sWKam/7GepLHVmzqcDPwQ+FxHfjIjP\nRsR8YGlm7qv2eQxYeqwbR8TGiNgeEduPHHqqRhmSGtBYPz996OCASpZGV51wngOcBVyZmWcCT3HU\nkldmJpDHunFmbs7MicycmDV3fo0ymjfVErZL2xphjfXzvLkL+l6sNOrqhPMeYE9m3lldvolOc++P\niGUA1ecD9UqUNAD2s1SQnsM5Mx8DHo2IV1ZDa4D7ga3A+mpsPXBLrQol9Z39LJWl7r+M/FPguog4\nAXgYeBedwL8xIjYAjwBvq3kfAzOdZevJfa79+mv6XY40aCPVz9IwqxXOmbkDmDjGVWvqfF1Jg2c/\nS+XwHcIkSSqM4SxJUmEMZ3p7BzBfViVJ6pexD+c6IevbekqS+mHsw1mSpNIYzpIkFWZsw7nJJWmX\ntiVJTRrbcJYkqVSGsyRJhRnLcO7HMrRnbkuSmjKW4SxJUskMZ0mSClP3v1INlUEsO/tfqyRJdTlz\nliSpMGMTzoM+WcuTwyRJvRqbcJYkaVgYzpIkFWbkTwhrc3nZk8MkSb1w5ixJUmEMZ0mSCjPS4VzK\nGdOl1CFJGg4jHc6SJA2jWuEcEX8eEfdFxM6IuD4i5kXE6RFxZ0TsjogvRMQJTRUrqX/sZ6kcPYdz\nRCwH/gyYyMxXAbOBC4DLgU9m5q8ATwAbmih0Jkr8D1El1iRNKrmfpXFUd1l7DvALETEHeAmwD3g9\ncFN1/Rbg/Jr3IWkw7GepED2Hc2buBT4OfJ9OE/8UuAv4SWYernbbAyw/1u0jYmNEbI+I7UcOPdVr\nGS9Q+uy09Po0nprs56cPHRxEydJIq7OsvQhYB5wOnArMB9ZO9/aZuTkzJzJzYtbc+b2WIakBTfbz\nvLkL+lSlND7qLGv/HvDdzPxhZh4CvgScAyyslsUAVgB7a9Yoqf/sZ6kgdcL5+8DZEfGSiAhgDXA/\ncDvw1mqf9cAt9UqcnmE64WqYatXYKKqfpXFX5znnO+mcKHI3cG/1tTYDHwQuiYjdwCnAVQ3UKamP\n7GepLLX+8UVmXgZcdtTww8DqOl9X0uDZz1I5hv4dwoZ5iXhY627aFecs5YpzlrZdhiQVY+T/ZaTa\nNZPQnc6+l/zn/jrlSNJQGPqZsyRJo2aoZ86jsCw8+Riu/fprWq6kOf1cou7+2s6iJY0qZ86SJBVm\nqGfOKkcbJ3Q5i5Y0qoYynEdhOftow7i8XdIZ1ga1pFHisrYkSYUZunAexVlzt2F4fKW/Lrn0+iTp\neIYunNWuYQo9Q1rSsDKcJUkqzNCcEDYMy71NKfHksGGegV5xzlJPEpM0VJw567iGOZgnucQtaZgY\nzpIkFWYownmclrS7tf24R3G2OWqPR9JoGopwliRpnBjOkiQVpuiztdte1i1BW2duj/Ly7+Rj8wxu\nSaUqduZsMD+fx0OSxkex4SxJ0rgynPU8o3iG9lTG5XFKGj6GsyRJhTluOEfE1RFxICJ2do0tjojb\nIuKh6vOiajwi4tMRsTsi7omIs2Za0EWrv+bzq1Pw2KiuQfezpN5MZ+Z8DbD2qLFNwLbMXAVsqy4D\nvBFYVX1sBK5spkz12zgtZ3cbw8d9DfazVLzjhnNmfhX48VHD64At1fYW4Pyu8Wuz4w5gYUQsa6pY\nSfXYz9Jw6PV1zkszc1+1/RgwOfVYDjzatd+eamwfR4mIjXT+GmfWvJMBXy40XRet/lpR/7FKQ6/R\nfp5/wuJp3/FbXn1qD+VKo6/2CWGZmUD2cLvNmTmRmROz5s6vW4akBjTRz/PmLuhDZdJ46TWc908u\nb1WfD1Tje4GVXfutqMYklct+lgrTazhvBdZX2+uBW7rGL6rO8jwb+GnXctmUTpn/lEvaM+SZ22pQ\no/0sqb7pvJTqeuBrwCsjYk9EbAA+Cvx+RDwE/F51GeDLwMPAbuDvgD/pS9X6OQNaM2E/S8PhuCeE\nZeaFU1y15hj7JnBx3aIk9Yf9LA0H3yFMkqTCGM4joO7zz2P2JhzH5DGQVBLDWZKkwhjO4pL/3N92\nCa3zGEgqieEsSVJhDOcR4mufJWk0GM6SJBXGcJYkqTCG8whyaVuShpvhPKJ8/lmShpfhLElSYQxn\nSZIKYziPuOkubV/yn/vH8o04xvVxSyqb4SxJUmEMZ0mSCmM4j4GZnLk9Tsu84/I4JQ0fw1mSpMIY\nzpIkFcZwHiMzeVOSUV7yHaele0nDyXCWJKkwc9ouAOBHT83n2q+/pu0yJEkqgjNnSZIKc9xwjoir\nI+JAROzsGvtYRHw7Iu6JiJsjYmHXdR+KiN0R8UBE/EG/Clf/jeJzs6P2eGbKfpaGw3RmztcAa48a\nuw14VWb+BvAg8CGAiDgDuAD49eo2fxsRsxurVq0YhUAbxT80enQN9rNUvOOGc2Z+FfjxUWO3Zubh\n6uIdwIpqex1wQ2Y+k5nfBXYDqxusV1IN9rM0HJp4zvndwD9V28uBR7uu21ONvUBEbIyI7RGx/cih\npxooQ/00zDPPYa27JbX7+elDB/tcojT6aoVzRFwKHAaum+ltM3NzZk5k5sSsufPrlKEBGqagG+Y/\nKNrQVD/Pm7ug+eKkMdPzS6ki4p3AecCazMxqeC+wsmu3FdWYpILZz1JZegrniFgLfAD4ncz8WddV\nW4HPR8QVwKnAKuDrtatUUSZno1ecs7TlSo7N2fLM2M9SeY4bzhFxPfC7wJKI2ANcRudszhOB2yIC\n4I7M/OPMvC8ibgTup7M8dnFmPtev4tWu7hBsO6gN5Omxn6XhcNxwzswLjzF81Yvs/xHgI3WKktQf\n9rM0HIp4+04NvzZm0c6WJY0qw1mN62dQG8iSxoHvrS1JUmGcOauvpjPTnZxdOyuWpA7DWa0zlCXp\n+VzWliSpMPHfbwbUYhERPwSeAh5vu5YpLKHM2kqtC8qtrdS64IW1vTwzX9ZWMb2KiIPAA23XMYVh\n+v6XotS6YHhqm3EvFxHOABGxPTMn2q7jWEqtrdS6oNzaSq0Lyq5tJkp+HNY2c6XWBaNdm8vakiQV\nxnCWJKkwJYXz5rYLeBGl1lZqXVBubaXWBWXXNhMlPw5rm7lS64IRrq2Y55wlSVJHSTNnSZJEAeEc\nEWsj4oGI2B0Rm1quZWVE3B4R90fEfRHx3mr8wxGxNyJ2VB/ntlTf9yLi3qqG7dXY4oi4LSIeqj4v\nGnBNr+w6Ljsi4smIeF9bxywiro6IAxGxs2vsmMcoOj5d/ezdExFntVDbxyLi29X93xwRC6vx0yLi\nv7qO32f6WVtTSulne7nnuuzn3utqtpczs7UPYDbwHeAVwAnAt4AzWqxnGXBWtb0AeBA4A/gw8Bdt\nHquqpu8BS44a+ytgU7W9Cbi85e/nY8DL2zpmwOuAs4CdxztGwLnAPwEBnA3c2UJtbwDmVNuXd9V2\nWvd+w/BRUj/by419P+3n6dfVaC+3PXNeDezOzIcz81ngBmBdW8Vk5r7MvLvaPgjsApa3Vc80rQO2\nVNtbgPNbrGUN8J3MfKStAjLzq8CPjxqe6hitA67NjjuAhRGxbJC1ZeatmXm4ungHsKJf9z8AxfSz\nvdwI+3kGdTXdy22H83Lg0a7LeyikgSLiNOBM4M5q6D3VcsXVbSw3VRK4NSLuioiN1djSzNxXbT8G\nDOafKR/bBcD1XZdLOGYw9TEq7efv3XT+8p90ekR8MyL+PSJ+u62iZqC04wnYyzXYz72r3ctth3OR\nIuKlwBeB92Xmk8CVwC8DvwXsAz7RUmmvzcyzgDcCF0fE67qvzM4aSiun30fECcCbgX+ohko5Zs/T\n5jF6MRFxKXAYuK4a2gf8UmaeCVwCfD4iTmqrvmFlL/fGfu5dU73cdjjvBVZ2XV5RjbUmIubSaebr\nMvNLAJm5PzOfy8wjwN/RWb4buMzcW30+ANxc1bF/cumm+nygjdro/JK5OzP3VzUWccwqUx2jIn7+\nIuKdwHnA26tfNmTmM5n5o2r7LjrP5f7qoGuboSKO5yR7uRb7uQdN9nLb4fwNYFVEnF79pXYBsLWt\nYiIigKuAXZl5Rdd49/MWbwF2Hn3bAdQ2PyIWTG7TOflgJ53jtb7abT1wy6Brq1xI1xJYCcesy1TH\naCtwUXWW59nAT7uWywYiItYCHwDenJk/6xp/WUTMrrZfAawCHh5kbT0opp/t5drs5xlqvJf7dTbb\ndD/onGH3IJ2/Ji5tuZbX0lkiuQfYUX2cC/w9cG81vhVY1kJtr6Bz9uu3gPsmjxVwCrANeAj4V2Bx\nC7XNB34EnNw11soxo/MLZR9wiM5zThumOkZ0zur8m+pn715gooXadtN5nmzy5+0z1b5/VH2fdwB3\nA3846O9rj4+xiH62l2vVZz/3Vlejvew7hEmSVJi2l7UlSdJRDGdJkgpjOEuSVBjDWZKkwhjOkiQV\nxnCWJKkwhrMkSYUxnCVJKozhLElSYQxnSZIKYzhLklQYw1mSpMIYzpIkFcZwliSpMIazJEmFMZwl\nSSqM4SxJUmEMZ0mSCmM4S5JUGMNZkqTCGM6SJBWmb+EcEWsj4oGI2B0Rm/p1P5L6y16WBi8ys/kv\nGjEbeBD4fWAP8A3gwsy8v/E7k9Q39rLUjjl9+rqrgd2Z+TBARNwArAOO2dCnzJqdL5/dr1Kk4fTN\nw88+npkva7mMGfUywLxYlPNnnTqg8qTyPXXkBzydT8RMbtOvRFwOPNp1eQ/w6u4dImIjsBFg5azZ\n/Pvi/9GnUqThdNKB7z/Sdg1Mo5fh+f08P5bxpnlfGEx10hD4x6f/54xv09oJYZm5OTMnMnNiyazZ\nbZUhqQHd/XxiLGq7HGno9Suc9wIruy6vqMYkDRd7WWpBv8L5G8CqiDg9Ik4ALgC29um+JPWPvSy1\noC/POWfm4Yh4D/AvwGzg6sy8rx/3Jal/7GWpHX07RTozvwx8uV9fX9Jg2MvS4PkOYZIkFcZwliSp\nMIazJEmFMZwlSSqM4SxJUmEMZ0mSCmM4S5JUGMNZkqTCGM6SJBXGcJYkqTCGsyRJhTGcJUkqjOEs\nSVJhDGdJkgpjOEuSVBjDWZKkwhjOkiQVxnCWJKkwhrMkSYUxnCVJKozhLElSYQxnSZIK03M4R8TK\niLg9Iu6PiPsi4r3V+OKIuC0iHqo+L2quXEn9YD9LZakzcz4MvD8zzwDOBi6OiDOATcC2zFwFbKsu\nSyqb/SwVpOdwzsx9mXl3tX0Q2AUsB9YBW6rdtgDn1y1SUn/Zz1JZGnnOOSJOA84E7gSWZua+6qrH\ngKVT3GZjRGyPiO2PH3muiTIkNaBuPz+TTwykTmmU1Q7niHgp8EXgfZn5ZPd1mZlAHut2mbk5Mycy\nc2LJrNl1y5DUgCb6+USflpZqqxXOETGXTiNfl5lfqob3R8Sy6vplwIF6JUoaBPtZKkeds7UDuArY\nlZlXdF21FVhfba8Hbum9PEmDYD9LZZlT47bnAO8A7o2IHdXYXwIfBW6MiA3AI8Db6pUoaQDsZ6kg\nPYdzZv4HEFNcvabXrytp8OxnqSy+Q5gkSYUxnCVJKozhLElSYQxnSZIKYzhLklQYw1mSpMIYzpIk\nFcZwliSpMIazJEmFMZwlSSqM4SxJUmEMZ0mSCmM4S5JUmDr/MlKSNIRuffIv2i6hSG846eNtl/Bz\nzpwlSSqM4SxJUmEMZ0mSCmM4S5JUGMNZkqTCGM6SJBXGcJYkqTC1wzkiZkfENyPi/1aXT4+IOyNi\nd0R8ISJOqF+mpEGwn6UyNDFzfi+wq+vy5cAnM/NXgCeADQ3ch6TBsJ+lAtQK54hYAbwJ+Gx1OYDX\nAzdVu2wBzq9zH5IGw36WylF35vzXwAeAI9XlU4CfZObh6vIeYHnN+5A0GPazVIiewzkizgMOZOZd\nPd5+Y0Rsj4jtjx95rtcyJDWgyX5+Jp9ouDpp/NT5xxfnAG+OiHOBecBJwKeAhRExp/prewWw91g3\nzszNwGaAs+aemDXqkFRfY/18yuxft5+lmnqeOWfmhzJzRWaeBlwA/Ftmvh24HXhrtdt64JbaVUrq\nK/tZKks/Xuf8QeCSiNhN5zmrq/pwH5IGw36WWtDI/3POzK8AX6m2HwZWN/F1JQ2e/Sy1z3cIkySp\nMIazJEmFMZwlSSqM4SxJUmEMZ0mSCtPI2dqD9Ft/9IO2S1CXHV88te0SJGnkOHOWJKkwhrMkSYUx\nnCVJKozhLElSYQxnSZIKYzhLklQYw1mSpMIYzpIkFcZwliSpMIazJEmFMZwlSSqM4SxJUmEMZ0mS\nCmM4S5JUGMNZkqTCGM6SJBWmVjhHxMKIuCkivh0RuyLiNRGxOCJui4iHqs+LmipWUv/Yz1I56s6c\nPwX8c2b+GvCbwC5gE7AtM1cB26rLkspnP0uF6DmcI+Jk4HXAVQCZ+Wxm/gRYB2ypdtsCnF+3SEn9\nZT9LZakzcz4d+CHwuYj4ZkR8NiLmA0szc1+1z2PA0rpFSuo7+1kqSJ1wngOcBVyZmWcCT3HUkldm\nJpDHunFEbIyI7RGx/fEjz9UoQ1IDGuvnZ/KJvhcrjbo64bwH2JOZd1aXb6LT3PsjYhlA9fnAsW6c\nmZszcyIzJ5bMml2jDEkNaKyfT/ScMam2nsM5Mx8DHo2IV1ZDa4D7ga3A+mpsPXBLrQol9Z39LJVl\nTs3b/ylwXUScADwMvItO4N8YERuAR4C31bwPSYNhP0uFqBXOmbkDmDjGVWvqfF1Jg2c/S+XwHcIk\nSSqM4SxJUmEMZ0mSCmM4S5JUGMNZkqTCGM6SJBXGcJYkqTCGsyRJhTGcJUkqjOEsSVJhDGdJkgpj\nOEuSVBjDWZKkwhjOkiQVxnCWJKkwhrMkSYUxnCVJKozhLElSYQxnSZIKYzhLklQYw1mSpMIYzpIk\nFWZOnRtHxJ8D/wtI4F7gXcAy4AbgFOAu4B2Z+WzNOiX1mf08Pt5w0sfbLkHH0XM4R8Ry4M+AMzLz\nvyLiRuAC4Fzgk5l5Q0R8BtgAXNlItZL6Yhz7ecMf/G3bJYy0q/7lT9ouYajVXdaeA/xCRMwBXgLs\nA14P3FRdvwU4v+Z9SBoM+1kqRM/hnJl7gY8D36fTxD+ls+z1k8w8XO22B1h+rNtHxMaI2B4R2x8/\n8lyvZUhqQJP9/Ew+MYiSpZHWczhHxCJgHXA6cCowH1g73dtn5ubMnMjMiSWzZvdahqQGNNnPJ8ai\nPlUpjY86y9q/B3w3M3+YmYeALwHnAAurZTGAFcDemjVK6j/7WSpInXD+PnB2RLwkIgJYA9wP3A68\ntdpnPXBLvRIlDYD9LBWkznPOd9I5UeRuOi+7mAVsBj4IXBIRu+m8/OKqBuqU1Ef2s1SWWq9zzszL\ngMuOGn4YWF3n60oaPPtZKofvECZJUmEMZ0mSCmM4S5JUGMNZkqTCGM6SJBXGcJYkqTCGsyRJhTGc\nJUkqjOEsSVJhDGdJkgpjOEuSVBjDWZKkwhjOkiQVxnCWJKkwhrMkSYUxnCVJKozhLElSYQxnSZIK\nYzhLklQYw1mSpMIYzpIkFcZwliSpMMcN54i4OiIORMTOrrHFEXFbRDxUfV5UjUdEfDoidkfEPRFx\nVj+LlzQz9rM0HKYzc74GWHvU2CZgW2auArZVlwHeCKyqPjYCVzZTpqSGXIP9LBXvuOGcmV8FfnzU\n8DpgS7W9BTi/a/za7LgDWBgRy5oqVlI99rM0HHp9znlpZu6rth8Dllbby4FHu/bbU429QERsjIjt\nEbH98SPP9ViGpAY02s/P5BP9q1QaE7VPCMvMBLKH223OzInMnFgya3bdMiQ1oIl+PrHzlLWkGnoN\n5/2Ty1vV5wPV+F5gZdd+K6oxSeWyn6XC9BrOW4H11fZ64Jau8YuqszzPBn7atVwmqUz2s1SYOcfb\nISKuB34XWBIRe4DLgI8CN0bEBuAR4G3V7l8GzgV2Az8D3tWHmiX1yH6WhsNxwzkzL5ziqjXH2DeB\ni+sWJak/7GdpOPgOYZIkFcZwliSpMIazJEmFMZwlSSqM4SxJUmEMZ0mSCmM4S5JUGMNZkqTCGM6S\nJBXGcJYkqTCGsyRJhTGcJUkqzHH/8UVpdnzx1J9vL/jEm1qsZPAOvv8f2y5BkjQAzpwlSSrM0M2c\nYfxmzJMmH7czaEkabc6cJUkqjOEsSVJhhi6cx3VJu5vHQJJG29A852wgPZ/PP0vS6Bq6mbMkSaPO\ncJYkqTCGsyRJhTluOEfE1RFxICJ2do19LCK+HRH3RMTNEbGw67oPRcTuiHggIv6giSJ9vnlqHhvN\nRAn9LOn4pjNzvgZYe9TYbcCrMvM3gAeBDwFExBnABcCvV7f524iY3Vi1kuq6BvtZKt5xwzkzvwr8\n+KixWzPzcHXxDmBFtb0OuCEzn8nM7wK7gdUN1iupBvtZGg5NvJTq3cAXqu3ldJp70p5q7AUiYiOw\nEWDlrGP/Me6S7fT4sio1qHY/z49l/axPGgu1TgiLiEuBw8B1M71tZm7OzInMnFhyjHBe8Ik3cfD9\n/2jgSAPSVD+fGIuaL04aMz3PnCPincB5wJrMzGp4L7Cya7cV1ZikgtnPUll6CueIWAt8APidzPxZ\n11Vbgc9HxBXAqcAq4Ou9Fucz8WNDAAAFGklEQVSy9sy4vK1eDKqfJU3fccM5Iq4HfhdYEhF7gMvo\nnM15InBbRADckZl/nJn3RcSNwP10lscuzsznZlKQgVzf5FMC0tEG3c+SenPccM7MC48xfNWL7P8R\n4CN1ipLUH/azNBx8hzBJkgpTVDi7pN2cBZ94k8dTkoZUUeEsSZIMZ0mSilNEOM9aebJLsH3icZWk\n4VNEOEuSpP8W//1mQC0WEfFD4Cng8bZrmcISyqyt1Lqg3NpKrQteWNvLM/NlbRXTq4g4CDzQdh1T\nGKbvfylKrQuGp7YZ93IR4QwQEdszc6LtOo6l1NpKrQvKra3UuqDs2mai5MdhbTNXal0w2rW5rC1J\nUmEMZ0mSClNSOG9uu4AXUWptpdYF5dZWal1Qdm0zUfLjsLaZK7UuGOHainnOWZIkdZQ0c5YkSRjO\nkiQVp/Vwjoi1EfFAROyOiE0t17IyIm6PiPsj4r6IeG81/uGI2BsRO6qPc1uq73sRcW9Vw/ZqbHFE\n3BYRD1WfFw24pld2HZcdEfFkRLyvrWMWEVdHxIGI2Nk1dsxjFB2frn727omIs1qo7WMR8e3q/m+O\niIXV+GkR8V9dx+8z/aytKaX0s73cc132c+91NdvLmdnaBzAb+A7wCuAE4FvAGS3Wsww4q9peADwI\nnAF8GPiLNo9VVdP3gCVHjf0VsKna3gRc3vL38zHg5W0dM+B1wFnAzuMdI+Bc4J+AAM4G7myhtjcA\nc6rty7tqO617v2H4KKmf7eXGvp/28/TrarSX2545rwZ2Z+bDmfkscAOwrq1iMnNfZt5dbR8EdgHL\n26pnmtYBW6rtLcD5LdayBvhOZj7SVgGZ+VXgx0cNT3WM1gHXZscdwMKIWDbI2jLz1sw8XF28A1jR\nr/sfgGL62V5uhP08g7qa7uW2w3k58GjX5T0U0kARcRpwJnBnNfSearni6jaWmyoJ3BoRd0XExmps\naWbuq7YfA5a2UxoAFwDXd10u4ZjB1MeotJ+/d9P5y3/S6RHxzYj494j47baKmoHSjidgL9dgP/eu\ndi+3Hc5FioiXAl8E3peZTwJXAr8M/BawD/hES6W9NjPPAt4IXBwRr+u+MjtrKK28Ni4iTgDeDPxD\nNVTKMXueNo/Ri4mIS4HDwHXV0D7glzLzTOAS4PMRcVJb9Q0re7k39nPvmurltsN5L7Cy6/KKaqw1\nETGXTjNfl5lfAsjM/Zn5XGYeAf6OzvLdwGXm3urzAeDmqo79k0s31ecDbdRG55fM3Zm5v6qxiGNW\nmeoYFfHzFxHvBM4D3l79siEzn8nMH1Xbd9F5LvdXB13bDBVxPCfZy7XYzz1ospfbDudvAKsi4vTq\nL7ULgK1tFRMRAVwF7MrMK7rGu5+3eAuw8+jbDqC2+RGxYHKbzskHO+kcr/XVbuuBWwZdW+VCupbA\nSjhmXaY6RluBi6qzPM8Gftq1XDYQEbEW+ADw5sz8Wdf4yyJidrX9CmAV8PAga+tBMf1sL9dmP89Q\n473cr7PZpvtB5wy7B+n8NXFpy7W8ls4SyT3AjurjXODvgXur8a3AshZqewWds1+/Bdw3eayAU4Bt\nwEPAvwKLW6htPvAj4OSusVaOGZ1fKPuAQ3Sec9ow1TGic1bn31Q/e/cCEy3UtpvO82STP2+fqfb9\no+r7vAO4G/jDQX9fe3yMRfSzvVyrPvu5t7oa7WXfvlOSpMK0vawtSZKOYjhLklQYw1mSpMIYzpIk\nFcZwliSpMIazJEmFMZwlSSrM/wfaVvuK+BCoMwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ORQXaa6k30yD", + "colab_type": "text" + }, + "source": [ + "# Training a Model\n", + "\n", + "Now we move on to training our very own model. Here we will be finetuning the base of a Mask RCNN, modifying it to support Semantic Segmentation and change the number of classes to support this dataset. To do this we need\n", + "\n", + "1. A base model that has the same amount of output classes as our dataset. In this case, we have need for only 3 classes instead of COCO's 80. Hence , we first need to do some model trimming. \n", + "\n", + "2. Second, we need to build a Panoptic FPN model. That means attaching the semantic segmentation branch to the FPN.\n", + "\n", + "3. FInally, we write a loss function to train the semantic segmentation head.\n", + "\n", + "4. Lastly, set to train !" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "SVaNqbpiAzwx", + "colab_type": "text" + }, + "source": [ + "## Model Trimming" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "hbzY16ocEdrg", + "colab_type": "text" + }, + "source": [ + "### Helper Functions for Visualising Detections" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "yk5a6RpsEdIt", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class Resize(object):\n", + " def __init__(self, min_size, max_size):\n", + " self.min_size = min_size\n", + " self.max_size = max_size\n", + "\n", + " # modified from torchvision to add support for max size\n", + " def get_size(self, image_size):\n", + " w, h = image_size\n", + " size = self.min_size\n", + " max_size = self.max_size\n", + " if max_size is not None:\n", + " min_original_size = float(min((w, h)))\n", + " max_original_size = float(max((w, h)))\n", + " if max_original_size / min_original_size * size > max_size:\n", + " size = int(round(max_size * min_original_size / max_original_size))\n", + "\n", + " if (w <= h and w == size) or (h <= w and h == size):\n", + " return (h, w)\n", + "\n", + " if w < h:\n", + " ow = size\n", + " oh = int(size * h / w)\n", + " else:\n", + " oh = size\n", + " ow = int(size * w / h)\n", + "\n", + " return (oh, ow)\n", + "\n", + " def __call__(self, image):\n", + " size = self.get_size(image.size)\n", + " image = F.resize(image, size)\n", + " return image\n", + " \n", + " \n", + "class COCODemo(object):\n", + " \n", + " def __init__(\n", + " self,\n", + " cfg,\n", + " confidence_threshold=0.7,\n", + " show_mask_heatmaps=False,\n", + " masks_per_dim=2,\n", + " min_image_size=224,\n", + " convert_model=False\n", + " ):\n", + " self.cfg = cfg.clone()\n", + " \n", + " self.model = build_detection_network(cfg)\n", + " self.training = False\n", + "\n", + " self.model.eval()\n", + " self.device = torch.device(cfg.MODEL.DEVICE)\n", + " self.model.to(self.device)\n", + " self.min_image_size = min_image_size\n", + "\n", + " save_dir = cfg.OUTPUT_DIR\n", + " checkpointer = DetectronCheckpointer(cfg, self.model, save_dir=save_dir)\n", + " _ = checkpointer.load(cfg.MODEL.WEIGHT)\n", + "\n", + " self.transforms = self.build_transform()\n", + "\n", + " mask_threshold = -1 if show_mask_heatmaps else 0.5\n", + " self.masker = Masker(threshold=mask_threshold, padding=1)\n", + "\n", + " # used to make colors for each class\n", + " self.palette = torch.tensor([2 ** 25 - 1, 2 ** 15 - 1, 2 ** 21 - 1])\n", + "\n", + " self.cpu_device = torch.device(\"cpu\")\n", + " self.confidence_threshold = confidence_threshold\n", + " self.show_mask_heatmaps = show_mask_heatmaps\n", + " self.masks_per_dim = masks_per_dim\n", + "\n", + " def build_transform(self):\n", + " \"\"\"\n", + " Creates a basic transformation that was used to train the models\n", + " \"\"\"\n", + " cfg = self.cfg\n", + "\n", + " # we are loading images with OpenCV, so we don't need to convert them\n", + " # to BGR, they are already! So all we need to do is to normalize\n", + " # by 255 if we want to convert to BGR255 format, or flip the channels\n", + " # if we want it to be in RGB in [0-1] range.\n", + " if cfg.INPUT.TO_BGR255:\n", + " to_bgr_transform = T.Lambda(lambda x: x * 255)\n", + " else:\n", + " to_bgr_transform = T.Lambda(lambda x: x[[2, 1, 0]])\n", + "\n", + " normalize_transform = T.Normalize(\n", + " mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL_STD\n", + " )\n", + " min_size = cfg.INPUT.MIN_SIZE_TEST\n", + " max_size = cfg.INPUT.MAX_SIZE_TEST\n", + " transform = T.Compose(\n", + " [\n", + " T.ToPILImage(),\n", + " Resize(min_size, max_size),\n", + " T.ToTensor(),\n", + " to_bgr_transform,\n", + " normalize_transform,\n", + " ]\n", + " )\n", + " return transform\n", + "\n", + " def run_on_opencv_image(self, image, panoptic=False, objDet=False, semantic=False):\n", + " \"\"\"\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " Returns:\n", + " prediction (BoxList): the detected objects. Additional information\n", + " of the detection properties can be found in the fields of\n", + " the BoxList via `prediction.fields()`\n", + " \"\"\"\n", + " predictions = self.compute_prediction(image)\n", + " top_predictions = self.select_top_predictions(predictions)\n", + " \n", + " \n", + " result = image.copy()\n", + " \n", + " if self.show_mask_heatmaps:\n", + " return self.create_mask_montage(result, top_predictions)\n", + " result = self.overlay_boxes(result, top_predictions)\n", + " if self.cfg.MODEL.MASK_ON:\n", + " result = self.overlay_mask(result, top_predictions)\n", + " if self.cfg.MODEL.KEYPOINT_ON:\n", + " result = self.overlay_keypoints(result, top_predictions)\n", + " result = self.overlay_class_names(result, top_predictions)\n", + "\n", + " return result\n", + "\n", + " def compute_prediction(self, original_image):\n", + " \"\"\"\n", + " Arguments:\n", + " original_image (np.ndarray): an image as returned by OpenCV\n", + " Returns:\n", + " prediction (BoxList): the detected objects. Additional information\n", + " of the detection properties can be found in the fields of\n", + " the BoxList via `prediction.fields()`\n", + " \"\"\"\n", + " # apply pre-processing to image\n", + " image = self.transforms(original_image)\n", + " # convert to an ImageList, padded so that it is divisible by\n", + " # cfg.DATALOADER.SIZE_DIVISIBILITY\n", + " image_list = to_image_list(image, self.cfg.DATALOADER.SIZE_DIVISIBILITY)\n", + " image_list = image_list.to(self.device)\n", + " # compute predictions\n", + " with torch.no_grad():\n", + " predictions = self.model(image_list)\n", + " predictions = [o.to(self.cpu_device) for o in predictions]\n", + "\n", + " # always single image is passed at a time\n", + " prediction = predictions[0]\n", + "\n", + " # reshape prediction (a BoxList) into the original image size\n", + " height, width = original_image.shape[:-1]\n", + " prediction = prediction.resize((width, height))\n", + "\n", + " if prediction.has_field(\"mask\"):\n", + " # if we have masks, paste the masks in the right position\n", + " # in the image, as defined by the bounding boxes\n", + " masks = prediction.get_field(\"mask\")\n", + " # always single image is passed at a time\n", + " masks = self.masker([masks], [prediction])[0]\n", + " prediction.add_field(\"mask\", masks)\n", + " return prediction\n", + "\n", + " def select_top_predictions(self, predictions):\n", + " \"\"\"\n", + " Select only predictions which have a `score` > self.confidence_threshold,\n", + " and returns the predictions in descending order of score\n", + " Arguments:\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `scores`.\n", + " Returns:\n", + " prediction (BoxList): the detected objects. Additional information\n", + " of the detection properties can be found in the fields of\n", + " the BoxList via `prediction.fields()`\n", + " \"\"\"\n", + " scores = predictions.get_field(\"scores\")\n", + " keep = torch.nonzero(scores > self.confidence_threshold).squeeze(1)\n", + " predictions = predictions[keep]\n", + " scores = predictions.get_field(\"scores\")\n", + " _, idx = scores.sort(0, descending=True)\n", + " return predictions[idx]\n", + "\n", + " def compute_colors_for_labels(self, labels):\n", + " \"\"\"\n", + " Simple function that adds fixed colors depending on the class\n", + " \"\"\"\n", + " colors = labels[:, None] * self.palette\n", + " colors = (colors % 255).numpy().astype(\"uint8\")\n", + " return colors\n", + "\n", + " def overlay_boxes(self, image, predictions):\n", + " \"\"\"\n", + " Adds the predicted boxes on top of the image\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `labels`.\n", + " \"\"\"\n", + " labels = predictions.get_field(\"labels\")\n", + " boxes = predictions.bbox\n", + "\n", + " colors = self.compute_colors_for_labels(labels).tolist()\n", + "\n", + " for box, color in zip(boxes, colors):\n", + " box = box.to(torch.int64)\n", + " top_left, bottom_right = box[:2].tolist(), box[2:].tolist()\n", + " image = cv2.rectangle(\n", + " image, tuple(top_left), tuple(bottom_right), tuple(color), 1\n", + " )\n", + "\n", + " return image\n", + "\n", + " def overlay_mask(self, image, predictions):\n", + " \"\"\"\n", + " Adds the instances contours for each predicted object.\n", + " Each label has a different color.\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `mask` and `labels`.\n", + " \"\"\"\n", + " masks = predictions.get_field(\"mask\").numpy()\n", + " labels = predictions.get_field(\"labels\")\n", + "\n", + " colors = self.compute_colors_for_labels(labels).tolist()\n", + "\n", + " for mask, color in zip(masks, colors):\n", + " thresh = mask[0, :, :, None]\n", + " contours, hierarchy = cv2_util.findContours(\n", + " thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE\n", + " )\n", + " image = cv2.drawContours(image, contours, -1, color, 3)\n", + "\n", + " composite = image\n", + "\n", + " return composite\n", + "\n", + " def overlay_keypoints(self, image, predictions):\n", + " keypoints = predictions.get_field(\"keypoints\")\n", + " kps = keypoints.keypoints\n", + " scores = keypoints.get_field(\"logits\")\n", + " kps = torch.cat((kps[:, :, 0:2], scores[:, :, None]), dim=2).numpy()\n", + " for region in kps:\n", + " image = vis_keypoints(image, region.transpose((1, 0)))\n", + " return image\n", + "\n", + " def create_mask_montage(self, image, predictions):\n", + " \"\"\"\n", + " Create a montage showing the probability heatmaps for each one one of the\n", + " detected objects\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `mask`.\n", + " \"\"\"\n", + " masks = predictions.get_field(\"mask\")\n", + " masks_per_dim = self.masks_per_dim\n", + " masks = L.interpolate(\n", + " masks.float(), scale_factor=1 / masks_per_dim\n", + " ).byte()\n", + " height, width = masks.shape[-2:]\n", + " max_masks = masks_per_dim ** 2\n", + " masks = masks[:max_masks]\n", + " # handle case where we have less detections than max_masks\n", + " if len(masks) < max_masks:\n", + " masks_padded = torch.zeros(max_masks, 1, height, width, dtype=torch.uint8)\n", + " masks_padded[: len(masks)] = masks\n", + " masks = masks_padded\n", + " masks = masks.reshape(masks_per_dim, masks_per_dim, height, width)\n", + " result = torch.zeros(\n", + " (masks_per_dim * height, masks_per_dim * width), dtype=torch.uint8\n", + " )\n", + " for y in range(masks_per_dim):\n", + " start_y = y * height\n", + " end_y = (y + 1) * height\n", + " for x in range(masks_per_dim):\n", + " start_x = x * width\n", + " end_x = (x + 1) * width\n", + " result[start_y:end_y, start_x:end_x] = masks[y, x]\n", + " return cv2.applyColorMap(result.numpy(), cv2.COLORMAP_JET)\n", + "\n", + " def overlay_class_names(self, image, predictions):\n", + " \"\"\"\n", + " Adds detected class names and scores in the positions defined by the\n", + " top-left corner of the predicted bounding box\n", + " Arguments:\n", + " image (np.ndarray): an image as returned by OpenCV\n", + " predictions (BoxList): the result of the computation by the model.\n", + " It should contain the field `scores` and `labels`.\n", + " \"\"\"\n", + " scores = predictions.get_field(\"scores\").tolist()\n", + " labels = predictions.get_field(\"labels\").tolist()\n", + " labels = [self.CATEGORIES[i] for i in labels]\n", + " boxes = predictions.bbox\n", + "\n", + " template = \"{}: {:.2f}\"\n", + " for box, score, label in zip(boxes, scores, labels):\n", + " x, y = box[:2]\n", + " s = template.format(label, score)\n", + " cv2.putText(\n", + " image, s, (x, y), cv2.FONT_HERSHEY_SIMPLEX, .5, (255, 255, 255), 1\n", + " )\n", + "\n", + " return image\n", + "\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from maskrcnn_benchmark.structures.keypoint import PersonKeypoints\n", + "\n", + "def vis_keypoints(img, kps, kp_thresh=2, alpha=0.7):\n", + " \"\"\"Visualizes keypoints (adapted from vis_one_image).\n", + " kps has shape (4, #keypoints) where 4 rows are (x, y, logit, prob).\n", + " \"\"\"\n", + " dataset_keypoints = PersonKeypoints.NAMES\n", + " kp_lines = PersonKeypoints.CONNECTIONS\n", + "\n", + " # Convert from plt 0-1 RGBA colors to 0-255 BGR colors for opencv.\n", + " cmap = plt.get_cmap('rainbow')\n", + " colors = [cmap(i) for i in np.linspace(0, 1, len(kp_lines) + 2)]\n", + " colors = [(c[2] * 255, c[1] * 255, c[0] * 255) for c in colors]\n", + "\n", + " # Perform the drawing on a copy of the image, to allow for blending.\n", + " kp_mask = np.copy(img)\n", + "\n", + " # Draw mid shoulder / mid hip first for better visualization.\n", + " mid_shoulder = (\n", + " kps[:2, dataset_keypoints.index('right_shoulder')] +\n", + " kps[:2, dataset_keypoints.index('left_shoulder')]) / 2.0\n", + " sc_mid_shoulder = np.minimum(\n", + " kps[2, dataset_keypoints.index('right_shoulder')],\n", + " kps[2, dataset_keypoints.index('left_shoulder')])\n", + " mid_hip = (\n", + " kps[:2, dataset_keypoints.index('right_hip')] +\n", + " kps[:2, dataset_keypoints.index('left_hip')]) / 2.0\n", + " sc_mid_hip = np.minimum(\n", + " kps[2, dataset_keypoints.index('right_hip')],\n", + " kps[2, dataset_keypoints.index('left_hip')])\n", + " nose_idx = dataset_keypoints.index('nose')\n", + " if sc_mid_shoulder > kp_thresh and kps[2, nose_idx] > kp_thresh:\n", + " cv2.line(\n", + " kp_mask, tuple(mid_shoulder), tuple(kps[:2, nose_idx]),\n", + " color=colors[len(kp_lines)], thickness=2, lineType=cv2.LINE_AA)\n", + " if sc_mid_shoulder > kp_thresh and sc_mid_hip > kp_thresh:\n", + " cv2.line(\n", + " kp_mask, tuple(mid_shoulder), tuple(mid_hip),\n", + " color=colors[len(kp_lines) + 1], thickness=2, lineType=cv2.LINE_AA)\n", + "\n", + " # Draw the keypoints.\n", + " for l in range(len(kp_lines)):\n", + " i1 = kp_lines[l][0]\n", + " i2 = kp_lines[l][1]\n", + " p1 = kps[0, i1], kps[1, i1]\n", + " p2 = kps[0, i2], kps[1, i2]\n", + " if kps[2, i1] > kp_thresh and kps[2, i2] > kp_thresh:\n", + " cv2.line(\n", + " kp_mask, p1, p2,\n", + " color=colors[l], thickness=2, lineType=cv2.LINE_AA)\n", + " if kps[2, i1] > kp_thresh:\n", + " cv2.circle(\n", + " kp_mask, p1,\n", + " radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA)\n", + " if kps[2, i2] > kp_thresh:\n", + " cv2.circle(\n", + " kp_mask, p2,\n", + " radius=3, color=colors[l], thickness=-1, lineType=cv2.LINE_AA)\n", + "\n", + " # Blend the keypoints.\n", + " return cv2.addWeighted(img, 1.0 - alpha, kp_mask, alpha, 0)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "If8z4OZfDHmC", + "colab_type": "text" + }, + "source": [ + "### Base Model Config\n", + "\n", + "This is the base model that we will finetune from. First we need to replace the bounding box heads and mask heads to make it compatible with our Shapes Dataset." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "wM0coO44ClbV", + "colab_type": "code", + "outputId": "f1085b00-8428-40e1-c040-e9fa451afaa2", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 54 + } + }, + "source": [ + "%%writefile base_config.yaml\n", + "MODEL:\n", + " META_ARCHITECTURE: \"GeneralizedRCNN\"\n", + " WEIGHT: \"catalog://Caffe2Detectron/COCO/35858933/e2e_mask_rcnn_R-50-FPN_1x\"\n", + " BACKBONE:\n", + " CONV_BODY: \"R-50-FPN\"\n", + " RESNETS:\n", + " BACKBONE_OUT_CHANNELS: 256\n", + " RPN:\n", + " USE_FPN: True\n", + " ANCHOR_STRIDE: (4, 8, 16, 32, 64)\n", + " PRE_NMS_TOP_N_TRAIN: 2000\n", + " PRE_NMS_TOP_N_TEST: 1000\n", + " POST_NMS_TOP_N_TEST: 1000\n", + " FPN_POST_NMS_TOP_N_TEST: 1000\n", + " ROI_HEADS:\n", + " USE_FPN: True\n", + " ROI_BOX_HEAD:\n", + " POOLER_RESOLUTION: 7\n", + " POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)\n", + " POOLER_SAMPLING_RATIO: 2\n", + " FEATURE_EXTRACTOR: \"FPN2MLPFeatureExtractor\"\n", + " PREDICTOR: \"FPNPredictor\"\n", + " ROI_MASK_HEAD:\n", + " POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)\n", + " FEATURE_EXTRACTOR: \"MaskRCNNFPNFeatureExtractor\"\n", + " PREDICTOR: \"MaskRCNNC4Predictor\"\n", + " POOLER_RESOLUTION: 14\n", + " POOLER_SAMPLING_RATIO: 2\n", + " RESOLUTION: 28\n", + " SHARE_BOX_FEATURE_EXTRACTOR: False\n", + " MASK_ON: True\n", + "DATALOADER:\n", + " SIZE_DIVISIBILITY: 32" + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Overwriting base_config.yaml\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "mOo-0LGFEAmc", + "colab_type": "text" + }, + "source": [ + "### Pretrained weight removal\n", + "\n", + "Here, the pretrained weights of bbox, mask and class predictions are removed. This is done so that we can make the model shapes dataset compatible i.e predict 3 classes instead of Coco's 81 classes." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ISFsxBxBDZcQ", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def removekey(d, listofkeys):\n", + " r = dict(d)\n", + " for key in listofkeys:\n", + " print('key: {} is removed'.format(key))\n", + " r.pop(key)\n", + " return r\n", + " \n", + "\n", + "config_file = \"base_config.yaml\"\n", + "\n", + "# update the config options with the config file\n", + "cfg.merge_from_file(config_file)\n", + "\n", + "\n", + "# Add these for printing class names over your predictions.\n", + "COCODemo.CATEGORIES = [\n", + " \"__background\",\n", + " \"square\",\n", + " \"circle\",\n", + " \"triangle\"\n", + "]\n", + "\n", + "demo = COCODemo(\n", + " cfg, \n", + " min_image_size=800,\n", + " confidence_threshold=0.7,\n", + " convert_model=True)\n", + "\n", + "base_model = demo.model\n", + "\n", + "# Removes pretrained weights from state dict\n", + "new_state_dict = removekey(base_model.state_dict(), [ \n", + " \"roi_heads.box.predictor.cls_score.weight\", \"roi_heads.box.predictor.cls_score.bias\", \n", + " \"roi_heads.box.predictor.bbox_pred.weight\", \"roi_heads.box.predictor.bbox_pred.bias\",\n", + " \"roi_heads.mask.predictor.mask_fcn_logits.weight\", \"roi_heads.mask.predictor.mask_fcn_logits.bias\"\n", + " ])\n", + "\n", + "# Save new state dict, we will use this as our starting weights for our fine-tuned model\n", + "torch.save(new_state_dict, \"base_model.pth\")" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "bbCBInqHFUg7", + "colab_type": "text" + }, + "source": [ + "### Fine Tuned Model Config\n", + "\n", + "Here we define our shape Dataset config. The important fields are \n", + "\n", + "1. WEIGHT: which point to our base_model.pth saved in the previous step\n", + "2. NUM_CLASSES: Which define how many classes we will predict . note that the number includes the background, hence our shapes dataset has 4 classes. \n", + "3. PANOPTIC.CHANNEL_SIZE: To set the channel size of the segmentation head of the FPN.\n", + "4. PANOPTIC.NUM_CLASSES: Number of classes of semantic segmentation head." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "5AhIiTgmFXyi", + "colab_type": "code", + "outputId": "6e38f4a7-2d5c-4162-a824-0261d357d3d1", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 54 + } + }, + "source": [ + "%%writefile shapes_config.yaml\n", + "MODEL:\n", + " META_ARCHITECTURE: \"GeneralizedRCNN\"\n", + " WEIGHT: \"base_model.pth\"\n", + " BACKBONE:\n", + " CONV_BODY: \"R-50-FPN\"\n", + " RESNETS:\n", + " BACKBONE_OUT_CHANNELS: 256\n", + " RPN:\n", + " USE_FPN: True\n", + " ANCHOR_STRIDE: (4, 8, 16, 32, 64)\n", + " PRE_NMS_TOP_N_TRAIN: 2000\n", + " PRE_NMS_TOP_N_TEST: 1000\n", + " POST_NMS_TOP_N_TEST: 1000\n", + " FPN_POST_NMS_TOP_N_TEST: 1000\n", + " ROI_HEADS:\n", + " USE_FPN: True\n", + " ROI_BOX_HEAD:\n", + " POOLER_RESOLUTION: 7\n", + " POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)\n", + " POOLER_SAMPLING_RATIO: 2\n", + " FEATURE_EXTRACTOR: \"FPN2MLPFeatureExtractor\"\n", + " PREDICTOR: \"FPNPredictor\"\n", + " NUM_CLASSES: 4 # background + num_classes : IMPORTANT dont forget to add this\n", + " ROI_MASK_HEAD:\n", + " POOLER_SCALES: (0.25, 0.125, 0.0625, 0.03125)\n", + " FEATURE_EXTRACTOR: \"MaskRCNNFPNFeatureExtractor\"\n", + " PREDICTOR: \"MaskRCNNC4Predictor\"\n", + " POOLER_RESOLUTION: 14\n", + " POOLER_SAMPLING_RATIO: 2\n", + " RESOLUTION: 28\n", + " SHARE_BOX_FEATURE_EXTRACTOR: False\n", + " MASK_ON: True\n", + "DATALOADER:\n", + " SIZE_DIVISIBILITY: 32" + ], + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Writing shapes_config.yaml\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tAn3omCjTFGI", + "colab_type": "text" + }, + "source": [ + "### Data Loader\n", + "\n", + "This function creates a data loader with our shapes dataset. This data loader is used internally in the repo to train the model." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "oODu2UpVTHXz", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def build_data_loader(cfg, dataset, is_train=True, is_distributed=False, start_iter=0):\n", + " num_gpus = get_world_size()\n", + " if is_train:\n", + " images_per_batch = cfg.SOLVER.IMS_PER_BATCH\n", + " assert (\n", + " images_per_batch % num_gpus == 0\n", + " ), \"SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of GPUs ({}) used.\".format(\n", + " images_per_batch, num_gpus)\n", + " images_per_gpu = images_per_batch // num_gpus\n", + " shuffle = True\n", + " num_iters = cfg.SOLVER.MAX_ITER\n", + " else:\n", + " images_per_batch = cfg.TEST.IMS_PER_BATCH\n", + " assert (\n", + " images_per_batch % num_gpus == 0\n", + " ), \"TEST.IMS_PER_BATCH ({}) must be divisible by the number of GPUs ({}) used.\".format(\n", + " images_per_batch, num_gpus)\n", + " images_per_gpu = images_per_batch // num_gpus\n", + " shuffle = False if not is_distributed else True\n", + " num_iters = None\n", + " start_iter = 0\n", + "\n", + " if images_per_gpu > 1:\n", + " logger = logging.getLogger(__name__)\n", + " logger.warning(\n", + " \"When using more than one image per GPU you may encounter \"\n", + " \"an out-of-memory (OOM) error if your GPU does not have \"\n", + " \"sufficient memory. If this happens, you can reduce \"\n", + " \"SOLVER.IMS_PER_BATCH (for training) or \"\n", + " \"TEST.IMS_PER_BATCH (for inference). For training, you must \"\n", + " \"also adjust the learning rate and schedule length according \"\n", + " \"to the linear scaling rule. See for example: \"\n", + " \"https://github.com/facebookresearch/Detectron/blob/master/configs/getting_started/tutorial_1gpu_e2e_faster_rcnn_R-50-FPN.yaml#L14\"\n", + " )\n", + "\n", + " # group images which have similar aspect ratio. In this case, we only\n", + " # group in two cases: those with width / height > 1, and the other way around,\n", + " # but the code supports more general grouping strategy\n", + " aspect_grouping = [1] if cfg.DATALOADER.ASPECT_RATIO_GROUPING else []\n", + "\n", + " paths_catalog = import_file(\n", + " \"maskrcnn_benchmark.config.paths_catalog\", cfg.PATHS_CATALOG, True\n", + " )\n", + " DatasetCatalog = paths_catalog.DatasetCatalog\n", + " dataset_list = cfg.DATASETS.TRAIN if is_train else cfg.DATASETS.TEST\n", + "\n", + " # If bbox aug is enabled in testing, simply set transforms to None and we will apply transforms later\n", + " transforms = None if not is_train and cfg.TEST.BBOX_AUG.ENABLED else build_transforms(cfg, is_train)\n", + " \n", + " dataset.transforms = transforms\n", + " datasets = [ dataset ]\n", + " \n", + " data_loaders = []\n", + " for dataset in datasets:\n", + " sampler = make_data_sampler(dataset, shuffle, is_distributed)\n", + " batch_sampler = make_batch_data_sampler(\n", + " dataset, sampler, aspect_grouping, images_per_gpu, num_iters, start_iter\n", + " )\n", + " collator = BBoxAugCollator() if not is_train and cfg.TEST.BBOX_AUG.ENABLED else \\\n", + " BatchCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)\n", + " num_workers = cfg.DATALOADER.NUM_WORKERS\n", + " data_loader = torch.utils.data.DataLoader(\n", + " dataset,\n", + " num_workers=num_workers,\n", + " batch_sampler=batch_sampler,\n", + " collate_fn=collator,\n", + " )\n", + " data_loaders.append(data_loader)\n", + " if is_train:\n", + " # during training, a single (possibly concatenated) data_loader is returned\n", + " assert len(data_loaders) == 1\n", + " return data_loaders[0]\n", + " return data_loaders" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zBrlwqT7RsdJ", + "colab_type": "text" + }, + "source": [ + "### Detection Model\n", + "\n", + "The model is the Mask RCNN as per [this](https://arxiv.org/abs/1901.02446) paper augmented with puruning functions" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "dJMk5lxwRvTh", + "colab_type": "code", + "colab": {} + }, + "source": [ + "class DetectionModel(nn.Module):\n", + " \"\"\"\n", + " Main class for Panoptic R-CNN. Currently supports boxes and masks.\n", + " It consists of three main parts:\n", + " - backbone\n", + " - rpn\n", + " - panoptic: ouputs semantic segmentation mask\n", + " - heads: takes the features + the proposals from the RPN and computes\n", + " detections / masks from it.\n", + " \"\"\"\n", + " def __init__(self, cfg, to_prune=False):\n", + " super(DetectionModel, self).__init__()\n", + "\n", + " self.backbone = build_backbone(cfg)\n", + " self.training = True\n", + " self.evaluate = False\n", + " self.rpn = build_rpn(cfg, self.backbone.out_channels)\n", + " self.roi_heads = build_roi_heads(cfg, self.backbone.out_channels)\n", + " \n", + " # Pruning Masks\n", + " self.binary_masks = None\n", + " self.to_prune = to_prune \n", + " \n", + " \n", + " ###################################### \n", + " ########Pruning Functions#############\n", + " \n", + " def prune(self, sparsity_rate):\n", + " \n", + " \n", + " self.sparsity = sparsity_rate \n", + " self.binary_masks = {}\n", + "\n", + "\n", + " for k, m in enumerate(self.modules()):\n", + "\n", + " if isinstance(m, nn.Conv2d):\n", + "\n", + " weight = m.weight.data.view(-1).clone().abs()\n", + " y, i = torch.sort(weight)\n", + "\n", + " spars_index = int(weight.shape[0]*self.sparsity/100)\n", + " threshold = y[spars_index]\n", + "\n", + " mask = weight.gt(threshold).float().cuda()\n", + " mask = mask.view(m.weight.data.shape)\n", + "\n", + " self.binary_masks[k] = mask\n", + "\n", + " m.weight.data.mul_(mask)\n", + " \n", + " def applyMasks(self):\n", + " \n", + " if not self.binary_masks:\n", + " return\n", + " \n", + " for k, m in enumerate(self.modules()):\n", + " \n", + " if isinstance(m, nn.Conv2d):\n", + " \n", + " mask = self.binary_masks[k]\n", + " m.weight.data.mul_(mask)\n", + " \n", + " \n", + " def pruningStats(self):\n", + " \n", + " total = 0\n", + " total_zero_weights = 0\n", + " for k, m in enumerate(self.modules()):\n", + "\n", + " if isinstance(m, nn.Conv2d):\n", + "\n", + " weight = m.weight.data.view(-1).clone().abs()\n", + " non_zero_weights = weight.gt(0).float().cuda().sum()\n", + " total_weights = weight.shape[0]\n", + " zero_weights = 100 - int(non_zero_weights*100/total_weights)\n", + " total += 1\n", + " total_zero_weights += zero_weights\n", + "\n", + " return total_zero_weights/total\n", + " \n", + " \n", + " ######################################\n", + " ######################################\n", + " \n", + "\n", + " def forward(self, images, targets=None):\n", + " \"\"\"\n", + " Arguments:\n", + " images (list[Tensor] or ImageList): images to be processed\n", + " targets (list[BoxList]): ground-truth boxes present in the image (optional)\n", + " Returns:\n", + " result (list[BoxList] or dict[Tensor]): the output from the model.\n", + " During training, it returns a dict[Tensor] which contains the losses.\n", + " During testing, it returns list[BoxList] contains additional fields\n", + " like `scores`, `labels` and `mask` (for Mask R-CNN models).\n", + " \"\"\"\n", + " \n", + " if self.to_prune:\n", + " self.applyMasks()\n", + "\n", + " images = to_image_list(images)\n", + " features = self.backbone(images.tensors) \n", + " proposals, proposal_losses = self.rpn(images, features, targets)\n", + " \n", + " \n", + " if self.roi_heads:\n", + " x, result, detector_losses = self.roi_heads(features, proposals, targets)\n", + " else:\n", + " # RPN-only models don't have roi_heads\n", + " x = features\n", + " result = proposals\n", + " detector_losses = {}\n", + "\n", + " if self.training: \n", + " losses = {}\n", + " losses.update(detector_losses)\n", + " losses.update(proposal_losses)\n", + " \n", + " return losses\n", + " \n", + " return result\n", + " " + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "NVjPYFN1Pz6D", + "colab_type": "text" + }, + "source": [ + "### Build Detection Network" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "WE6K5qZ7Pt5T", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def build_detection_network(cfg, to_prune=False):\n", + " return DetectionModel(cfg, to_prune)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DkbBHhFtciGa", + "colab_type": "text" + }, + "source": [ + "### Compute Target Sparsity\n", + "\n", + "According to the \"To Prune or not to Prune\" [paper](https://arxiv.org/abs/1710.01878)" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "WF5oDgUDcqAc", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def compute_target_sparsity(starting_step, current_step, ending_step, final_sparsity, initial_sparsity, span=100):\n", + " return final_sparsity + (initial_sparsity - final_sparsity) * ( (1 - ( (current_step - starting_step - span)/(ending_step - starting_step) ) )**3 )\n", + " " + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5cQ6oh2-Fa6G", + "colab_type": "text" + }, + "source": [ + "### Prune Model" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "APsQaRD-FdKR", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def prune(model, meta):\n", + " \n", + " starting_step = meta['starting_step']\n", + " current_step = meta['current_step']\n", + " ending_step = meta['ending_step']\n", + " final_sparsity = meta['final_sparsity']\n", + " initial_sparsity = meta['initial_sparsity']\n", + " span = meta['span']\n", + " \n", + " sparsity = compute_target_sparsity(starting_step, current_step, ending_step, final_sparsity, initial_sparsity, span=span)\n", + " model.prune(sparsity)\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "kkLKDmRC0-CE", + "colab_type": "text" + }, + "source": [ + "### Train Pruned Model\n", + "\n", + "The train function is the entry point into the training process. It creates data loaders, optimisers, loads from checkpoint. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "4e2-533F1Qmu", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# See if we can use apex.DistributedDataParallel instead of the torch default,\n", + "# and enable mixed-precision via apex.amp\n", + "try:\n", + " from apex import amp\n", + "except ImportError:\n", + " raise ImportError('Use APEX for multi-precision via apex.amp')\n", + " \n", + "def reduce_loss_dict(loss_dict):\n", + " \"\"\"\n", + " Reduce the loss dictionary from all processes so that process with rank\n", + " 0 has the averaged results. Returns a dict with the same fields as\n", + " loss_dict, after reduction.\n", + " \"\"\"\n", + " world_size = get_world_size()\n", + " if world_size < 2:\n", + " return loss_dict\n", + " with torch.no_grad():\n", + " loss_names = []\n", + " all_losses = []\n", + " for k in sorted(loss_dict.keys()):\n", + " loss_names.append(k)\n", + " all_losses.append(loss_dict[k])\n", + " all_losses = torch.stack(all_losses, dim=0)\n", + " dist.reduce(all_losses, dst=0)\n", + " if dist.get_rank() == 0:\n", + " # only main process gets accumulated, so only divide by\n", + " # world_size in this case\n", + " all_losses /= world_size\n", + " reduced_losses = {k: v for k, v in zip(loss_names, all_losses)}\n", + " return reduced_losses\n", + "\n", + "\n", + "def do_train(\n", + " model,\n", + " data_loader,\n", + " optimizer,\n", + " scheduler,\n", + " checkpointer,\n", + " device,\n", + " checkpoint_period,\n", + " arguments,\n", + " to_prune\n", + "):\n", + " logger = logging.getLogger(\"maskrcnn_benchmark.trainer\")\n", + " logger.error(\"Start training\")\n", + " meters = MetricLogger(delimiter=\" \")\n", + " max_iter = len(data_loader)\n", + " start_iter = arguments[\"iteration\"]\n", + " model.train()\n", + " start_training_time = time.time()\n", + " end = time.time()\n", + " \n", + " \n", + " for iteration, (images, targets, _) in enumerate(data_loader, start_iter):\n", + " \n", + " if any(len(target) < 1 for target in targets):\n", + " logger.error(f\"Iteration={iteration + 1} || Image Ids used for training {_} || targets Length={[len(target) for target in targets]}\" )\n", + " continue\n", + " \n", + " data_time = time.time() - end\n", + " iteration = iteration + 1\n", + " arguments[\"iteration\"] = iteration\n", + " \n", + " if to_prune:\n", + " prune_meta['current_step'] = iteration\n", + "\n", + " scheduler.step()\n", + "\n", + " images = images.to(device)\n", + " targets = [target.to(device) for target in targets]\n", + " \n", + " loss_dict = model(images, targets)\n", + " \n", + " losses = sum(loss for loss in loss_dict.values())\n", + " \n", + " # reduce losses over all GPUs for logging purposes\n", + " loss_dict_reduced = reduce_loss_dict(loss_dict)\n", + " losses_reduced = sum(loss for loss in loss_dict_reduced.values())\n", + " meters.update(loss=losses_reduced, **loss_dict_reduced)\n", + "\n", + " optimizer.zero_grad()\n", + " # Note: If mixed precision is not used, this ends up doing nothing\n", + " # Otherwise apply loss scaling for mixed-precision recipe\n", + " with amp.scale_loss(losses, optimizer) as scaled_losses:\n", + " scaled_losses.backward()\n", + " optimizer.step()\n", + "\n", + " batch_time = time.time() - end\n", + " end = time.time()\n", + " meters.update(time=batch_time, data=data_time)\n", + "\n", + " eta_seconds = meters.time.global_avg * (max_iter - iteration)\n", + " eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))\n", + " \n", + " \n", + " # Pruning code\n", + " if to_prune:\n", + " if (prune_meta['current_step'] - prune_meta['starting_step']) % prune_meta['span'] == 0 and prune_meta['current_step'] > prune_meta['starting_step'] and prune_meta['ending_step'] > prune_meta['current_step']:\n", + " prune(model, prune_meta)\n", + "\n", + " if iteration % 20 == 0 or iteration == max_iter:\n", + " model.applyMasks()\n", + " logger.info(\n", + " meters.delimiter.join(\n", + " [\n", + " \"eta: {eta}\",\n", + " \"iter: {iter}\",\n", + " \"{meters}\",\n", + " \"sparsity: {sparsity}\",\n", + " \"lr: {lr:.6f}\",\n", + " \"max mem: {memory:.0f}\",\n", + " \n", + " ]\n", + " ).format(\n", + " eta=eta_string,\n", + " iter=iteration,\n", + " meters=str(meters),\n", + " sparsity=model.pruningStats(),\n", + " lr=optimizer.param_groups[0][\"lr\"],\n", + " memory=torch.cuda.max_memory_allocated() / 1024.0 / 1024.0,\n", + " )\n", + " )\n", + " if iteration % checkpoint_period == 0:\n", + " checkpointer.save(\"model_{:07d}\".format(iteration), **arguments)\n", + " if iteration == max_iter:\n", + " checkpointer.save(\"model_final\", **arguments)\n", + "\n", + " total_training_time = time.time() - start_training_time\n", + " total_time_str = str(datetime.timedelta(seconds=total_training_time))\n", + " logger.info(\n", + " \"Total training time: {} ({:.4f} s / it)\".format(\n", + " total_time_str, total_training_time / (max_iter)\n", + " ))\n", + "\n", + "def train(cfg, local_rank, distributed, dataset, to_prune):\n", + " model = build_detection_network(cfg, to_prune)\n", + "\n", + " device = torch.device('cuda')\n", + " model.to(device)\n", + " \n", + " optimizer = make_optimizer(cfg, model)\n", + " scheduler = make_lr_scheduler(cfg, optimizer) \n", + "\n", + " # Initialize mixed-precision training\n", + " use_mixed_precision = cfg.DTYPE == \"float16\"\n", + " amp_opt_level = 'O1' if use_mixed_precision else 'O0'\n", + " model, optimizer = amp.initialize(model, optimizer, opt_level=amp_opt_level)\n", + "\n", + " if distributed:\n", + " model = torch.nn.parallel.DistributedDataParallel(\n", + " model, device_ids=[local_rank], output_device=local_rank,\n", + " # this should be removed if we update BatchNorm stats\n", + " broadcast_buffers=False,\n", + " )\n", + "\n", + " arguments = {}\n", + " arguments[\"iteration\"] = 0\n", + "\n", + " output_dir = cfg.OUTPUT_DIR\n", + " save_to_disk = get_rank() == 0\n", + " checkpointer = DetectronCheckpointer(\n", + " cfg, model, optimizer, scheduler, output_dir, save_to_disk\n", + " )\n", + " extra_checkpoint_data = checkpointer.load(cfg.MODEL.WEIGHT)\n", + " arguments.update(extra_checkpoint_data)\n", + "\n", + "\n", + " data_loader = build_data_loader(cfg, dataset)\n", + "\n", + " checkpoint_period = cfg.SOLVER.CHECKPOINT_PERIOD\n", + "\n", + " do_train(\n", + " model,\n", + " data_loader,\n", + " optimizer,\n", + " scheduler,\n", + " checkpointer,\n", + " device,\n", + " checkpoint_period,\n", + " arguments,\n", + " to_prune\n", + " )\n", + "\n", + " return model" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "pVeJNhzy2DZs", + "colab_type": "text" + }, + "source": [ + "## Unpruned Model Driver\n", + "\n", + "Here we fire off training of a regular unpruned model by calling the above function. before that we set some important config for our training. We make our dataset and update our config. Then we fire off training !" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "XtgfPl7F2CEP", + "colab_type": "code", + "colab": {} + }, + "source": [ + "config_file = \"shapes_config.yaml\"\n", + "\n", + "# update the config options with the config file\n", + "cfg.merge_from_file(config_file)\n", + "\n", + "cfg.merge_from_list(['OUTPUT_DIR', 'segDirNotPruned']) # The output folder where all our model checkpoints will be saved during training.\n", + "cfg.merge_from_list(['SOLVER.IMS_PER_BATCH', 25]) # Number of images to take insiade a single batch. This number depends on the size of your GPU\n", + "cfg.merge_from_list(['SOLVER.BASE_LR', 0.0001]) # The Learning Rate when training starts. Please check Detectron scaling rules to determine your learning for your GPU setup. \n", + "cfg.merge_from_list(['SOLVER.MAX_ITER', 1000]) # The number of training iterations that will be executed during training. One iteration is given as one forward and backward pass of a mini batch of the network\n", + "cfg.merge_from_list(['SOLVER.STEPS', \"(700, 800)\"]) # These two numberes represent after how many iterations is the learning rate divided by 10. \n", + "cfg.merge_from_list(['TEST.IMS_PER_BATCH', 1]) # Batch size during testing/evaluation\n", + "cfg.merge_from_list(['MODEL.RPN.FPN_POST_NMS_TOP_N_TRAIN', 2000]) # This determines how many region proposals to take in for processing into the stage after the RPN. The rule is 1000*batch_size = 4*1000 \n", + "cfg.merge_from_list(['SOLVER.CHECKPOINT_PERIOD', 100]) # After how many iterations does one want to save the model.\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TRAIN', \"(192, )\"])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TRAIN', 192])\n", + "# Make the Output dir if one doesnt exist.\n", + "output_dir = cfg.OUTPUT_DIR\n", + "if output_dir:\n", + " mkdir(output_dir)\n", + "\n", + "# Start training.\n", + "model = train(cfg, local_rank=1, distributed=False, dataset=ShapeDataset(2000), to_prune=False)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "uKfqWq3jKBcb", + "colab_type": "text" + }, + "source": [ + "# Prune Model Driver\n", + "\n", + "Here we fire off training of a pruned model by calling the above function." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "3MMMJlQHaXbV", + "colab_type": "code", + "colab": {} + }, + "source": [ + "!rm -rf segDirPruned" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "KI4Xye__asS0", + "colab_type": "code", + "colab": {} + }, + "source": [ + "config_file = \"shapes_config.yaml\"\n", + "\n", + "# update the config options with the config file\n", + "cfg.merge_from_file(config_file)\n", + "\n", + "cfg.merge_from_list(['OUTPUT_DIR', 'segDirPruned']) # The output folder where all our model checkpoints will be saved during training.\n", + "cfg.merge_from_list(['SOLVER.IMS_PER_BATCH', 25]) # Number of images to take insiade a single batch. This number depends on the size of your GPU\n", + "cfg.merge_from_list(['SOLVER.BASE_LR', 0.0001]) # The Learning Rate when training starts. Please check Detectron scaling rules to determine your learning for your GPU setup. \n", + "cfg.merge_from_list(['SOLVER.MAX_ITER', 1000]) # The number of training iterations that will be executed during training. One iteration is given as one forward and backward pass of a mini batch of the network\n", + "cfg.merge_from_list(['SOLVER.STEPS', \"(800, 900)\"]) # These two numberes represent after how many iterations is the learning rate divided by 10. \n", + "cfg.merge_from_list(['TEST.IMS_PER_BATCH', 1]) # Batch size during testing/evaluation\n", + "cfg.merge_from_list(['MODEL.RPN.FPN_POST_NMS_TOP_N_TRAIN', 2000]) # This determines how many region proposals to take in for processing into the stage after the RPN. The rule is 1000*batch_size = 4*1000 \n", + "cfg.merge_from_list(['SOLVER.CHECKPOINT_PERIOD', 100]) # After how many iterations does one want to save the model.\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TRAIN', \"(192, )\"])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TRAIN', 192])\n", + "# Make the Output dir if one doesnt exist.\n", + "output_dir = cfg.OUTPUT_DIR\n", + "if output_dir:\n", + " mkdir(output_dir)\n", + " \n", + "# Prune Config\n", + "# 70 percent sparsity \n", + "prune_meta = {\n", + " \"starting_step\" : 0,\n", + " \"current_step\": 0,\n", + " \"ending_step\": 700, # final 200 steps train with frozen masks \n", + " \"final_sparsity\": 70,\n", + " \"initial_sparsity\": 0,\n", + " \"span\": 40\n", + "}\n", + "\n", + "# Start training.\n", + "pruned_model = train(cfg, local_rank=1, distributed=False, dataset=ShapeDataset(2000), to_prune=True)" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Zp8nWrizaPBN", + "colab_type": "text" + }, + "source": [ + "# Evaluate" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "9cyRGdaNaRQI", + "colab_type": "code", + "colab": {} + }, + "source": [ + "def do_inference(\n", + " model,\n", + " data_loader,\n", + " dataset_name,\n", + " iou_types=(\"bbox\",),\n", + " box_only=False,\n", + " device=\"cuda\",\n", + " expected_results=(),\n", + " expected_results_sigma_tol=4,\n", + " output_folder=None,):\n", + " \n", + " # convert to a torch.device for efficiency\n", + " device = torch.device(device)\n", + " num_devices = get_world_size()\n", + " logger = logging.getLogger(\"maskrcnn_benchmark.inference\")\n", + " dataset = data_loader.dataset\n", + " logger.info(\"Start evaluation on {} dataset({} images).\".format(dataset_name, len(dataset)))\n", + " total_timer = Timer()\n", + " inference_timer = Timer()\n", + " total_timer.tic()\n", + " predictions = compute_on_dataset(model, data_loader, device, inference_timer)\n", + " \n", + " # wait for all processes to complete before measuring the time\n", + " synchronize()\n", + " total_time = total_timer.toc()\n", + " total_time_str = get_time_str(total_time)\n", + " logger.info(\n", + " \"Total run time: {} ({} s / img per device, on {} devices)\".format(\n", + " total_time_str, total_time * num_devices / len(dataset), num_devices\n", + " )\n", + " )\n", + " \n", + " total_infer_time = get_time_str(inference_timer.total_time)\n", + " logger.info(\n", + " \"Model inference time: {} ({} s / img per device, on {} devices)\".format(\n", + " total_infer_time,\n", + " inference_timer.total_time * num_devices / len(dataset),\n", + " num_devices,\n", + " )\n", + " )\n", + " \n", + " predictions = _accumulate_predictions_from_multiple_gpus(predictions)\n", + " if not is_main_process():\n", + " return\n", + "\n", + " if output_folder:\n", + " torch.save(predictions, os.path.join(output_folder, \"predictions.pth\"))\n", + "\n", + " extra_args = dict(\n", + " box_only=box_only,\n", + " iou_types=iou_types,\n", + " expected_results=expected_results,\n", + " expected_results_sigma_tol=expected_results_sigma_tol,\n", + " )\n", + "\n", + " return coco_evaluation(dataset=dataset,\n", + " predictions=predictions,\n", + " output_folder=output_folder,\n", + " **extra_args)\n", + "\n", + "def run_test(cfg, model, distributed, dataset):\n", + " if distributed:\n", + " model = model.module\n", + " torch.cuda.empty_cache() # TODO check if it helps\n", + " iou_types = (\"bbox\",)\n", + " \n", + " data_loaders_val = build_data_loader(cfg, dataset, is_train=False)\n", + " mkdir(\"shapeVal\")\n", + " model.evaluate = True\n", + " \n", + " for data_loader in data_loaders_val:\n", + " do_inference(\n", + " model,\n", + " data_loader, # For test we need this as zero\n", + " dataset_name=\"shape-val\",\n", + " iou_types=iou_types,\n", + " box_only=False if cfg.MODEL.RETINANET_ON else cfg.MODEL.RPN_ONLY,\n", + " device=cfg.MODEL.DEVICE,\n", + " expected_results=cfg.TEST.EXPECTED_RESULTS,\n", + " expected_results_sigma_tol=cfg.TEST.EXPECTED_RESULTS_SIGMA_TOL,\n", + " output_folder=\"shapeVal\",\n", + " )\n", + " synchronize()\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "P4dHs-N1W9Cg", + "colab_type": "text" + }, + "source": [ + "## Evaluate Pruned and Unpruned model\n", + "\n", + "Here we check the differences between a pruned and unpruned model. By checking the sparsity rate and COCO mAP evaluation." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "JHGkrOKhWn4V", + "colab_type": "text" + }, + "source": [ + "### Verify sparsity \n", + "\n", + "Here we check the rate of sparsity of our convolutional kernels. " + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "DtAXrFoxWdAh", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 52 + }, + "outputId": "0369b24b-46ef-4204-a3b6-50b9477556c9" + }, + "source": [ + "print(\"Sparsity of convolution weights of Pruned Model is \", pruned_model.pruningStats())\n", + "\n", + "print(\"Sparsity of convolution weights of Unpruned Model is \", model.pruningStats())" + ], + "execution_count": 39, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Sparsity of convolution weights of Pruned Model is 70.02898550724638\n", + "Sparsity of convolution weights of Unpruned Model is 0.0\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Y_OwJDO2W1Bg", + "colab_type": "text" + }, + "source": [ + "### Evaluate Pruned Model" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "awvrvnEiHfNs", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 674 + }, + "outputId": "b3689112-a9f9-4741-a275-15dd3e0bb706" + }, + "source": [ + "run_test(cfg, model=pruned_model, distributed=False, dataset=ShapeDataset(50))" + ], + "execution_count": 40, + "outputs": [ + { + "output_type": "stream", + "text": [ + "loading annotations into memory...\n", + "Done (t=0.00s)\n", + "creating index...\n", + "index created!\n", + "2019-07-12 05:21:57,947 maskrcnn_benchmark.inference INFO: Start evaluation on shape-val dataset(50 images).\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "100%|██████████| 50/50 [00:04<00:00, 10.73it/s]" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "2019-07-12 05:22:03,019 maskrcnn_benchmark.inference INFO: Total run time: 0:00:05.069178 (0.10138356208801269 s / img per device, on 1 devices)\n", + "2019-07-12 05:22:03,021 maskrcnn_benchmark.inference INFO: Model inference time: 0:00:04.547834 (0.09095668315887451 s / img per device, on 1 devices)\n", + "2019-07-12 05:22:03,040 maskrcnn_benchmark.inference INFO: Preparing results for COCO format\n", + "2019-07-12 05:22:03,045 maskrcnn_benchmark.inference INFO: Preparing bbox results\n", + "2019-07-12 05:22:03,060 maskrcnn_benchmark.inference INFO: Evaluating predictions\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "Loading and preparing results...\n", + "DONE (t=0.01s)\n", + "creating index...\n", + "index created!\n", + "Running per image evaluation...\n", + "Evaluate annotation type *bbox*\n", + "DONE (t=0.16s).\n", + "Accumulating evaluation results...\n", + "DONE (t=0.03s).\n", + " Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.639\n", + " Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.880\n", + " Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.778\n", + " Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n", + " Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.644\n", + " Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = -1.000\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.562\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.699\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.702\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.702\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = -1.000\n", + "2019-07-12 05:22:03,322 maskrcnn_benchmark.inference INFO: \n", + "Task: bbox\n", + "AP, AP50, AP75, APs, APm, APl\n", + "0.6387, 0.8799, 0.7780, -1.0000, 0.6443, -1.0000\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "T0zqqGPJXi5n", + "colab_type": "text" + }, + "source": [ + "### Evaluate Unpruned Model" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "gIuhGoZDXmtz", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 674 + }, + "outputId": "b0310be5-cfae-42b7-9586-50b98176236a" + }, + "source": [ + "run_test(cfg, model=model, distributed=False, dataset=ShapeDataset(50))" + ], + "execution_count": 41, + "outputs": [ + { + "output_type": "stream", + "text": [ + "loading annotations into memory...\n", + "Done (t=0.00s)\n", + "creating index...\n", + "index created!\n", + "2019-07-12 05:22:03,382 maskrcnn_benchmark.inference INFO: Start evaluation on shape-val dataset(50 images).\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "100%|██████████| 50/50 [00:04<00:00, 11.19it/s]" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "2019-07-12 05:22:08,099 maskrcnn_benchmark.inference INFO: Total run time: 0:00:04.715749 (0.09431497573852539 s / img per device, on 1 devices)\n", + "2019-07-12 05:22:08,101 maskrcnn_benchmark.inference INFO: Model inference time: 0:00:04.222170 (0.08444340705871582 s / img per device, on 1 devices)\n", + "2019-07-12 05:22:08,118 maskrcnn_benchmark.inference INFO: Preparing results for COCO format\n", + "2019-07-12 05:22:08,121 maskrcnn_benchmark.inference INFO: Preparing bbox results\n", + "2019-07-12 05:22:08,141 maskrcnn_benchmark.inference INFO: Evaluating predictions\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "Loading and preparing results...\n", + "DONE (t=0.01s)\n", + "creating index...\n", + "index created!\n", + "Running per image evaluation...\n", + "Evaluate annotation type *bbox*\n", + "DONE (t=0.12s).\n", + "Accumulating evaluation results...\n", + "DONE (t=0.03s).\n", + " Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.686\n", + " Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.943\n", + " Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.767\n", + " Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n", + " Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.694\n", + " Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = -1.000\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.577\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.746\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.749\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = -1.000\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.749\n", + " Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = -1.000\n", + "2019-07-12 05:22:08,332 maskrcnn_benchmark.inference INFO: \n", + "Task: bbox\n", + "AP, AP50, AP75, APs, APm, APl\n", + "0.6858, 0.9433, 0.7668, -1.0000, 0.6935, -1.0000\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ccHt8YMdKq6K", + "colab_type": "text" + }, + "source": [ + "# Visualise\n", + "\n", + "An important part of validating your model is visualising the results. This is done below" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "WOXgrPWIa-ND", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 87 + }, + "outputId": "aa5ff55c-01d5-4128-efcc-013f342da713" + }, + "source": [ + "# Load Dataset\n", + "dataset = ShapeDataset(50)" + ], + "execution_count": 52, + "outputs": [ + { + "output_type": "stream", + "text": [ + "loading annotations into memory...\n", + "Done (t=0.00s)\n", + "creating index...\n", + "index created!\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "-vim-GMFa_1X", + "colab_type": "text" + }, + "source": [ + "### Load unpruned model for vis" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "kb9VchvVzRpu", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Load Trained Model\n", + "config_file = \"shapes_config.yaml\"\n", + "\n", + "cfg.merge_from_file(config_file)\n", + "# manual override some options\n", + "# cfg.merge_from_list([\"MODEL.DEVICE\", \"cpu\"])\n", + "\n", + "# manual override some options\n", + "cfg.merge_from_list(['OUTPUT_DIR', 'segDirNotPruned']) # The output folder where all our model checkpoints will be saved during training.\n", + "\n", + "# update the config options with the config file\n", + "cfg.merge_from_file(config_file)\n", + "\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TRAIN', \"(192, )\"])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TRAIN', 192])\n", + "\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TEST', 192])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TEST', 192])\n", + "\n", + "\n", + "unpruned_demo = COCODemo(\n", + " cfg, \n", + " min_image_size=192,\n", + " confidence_threshold=0.7)\n", + "\n", + "# Add these for printing class names over your predictions.\n", + "COCODemo.CATEGORIES = [\n", + " \"__background\",\n", + " \"square\",\n", + " \"circle\",\n", + " \"triangle\"\n", + "]\n", + "\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "-uCq7_NPbFT9", + "colab_type": "text" + }, + "source": [ + "### Load pruned model for vis" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "goyZr5o4bFpC", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Load Trained Model\n", + "config_file = \"shapes_config.yaml\"\n", + "\n", + "cfg.merge_from_file(config_file)\n", + "# manual override some options\n", + "# cfg.merge_from_list([\"MODEL.DEVICE\", \"cpu\"])\n", + "\n", + "# manual override some options\n", + "cfg.merge_from_list(['OUTPUT_DIR', 'segDirPruned']) # The output folder where all our model checkpoints will be saved during training.\n", + "\n", + "# update the config options with the config file\n", + "cfg.merge_from_file(config_file)\n", + "\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TRAIN', \"(192, )\"])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TRAIN', 192])\n", + "\n", + "cfg.merge_from_list(['INPUT.MIN_SIZE_TEST', 192])\n", + "cfg.merge_from_list(['INPUT.MAX_SIZE_TEST', 192])\n", + "\n", + "\n", + "pruned_demo = COCODemo(\n", + " cfg, \n", + " min_image_size=192,\n", + " confidence_threshold=0.7)\n", + "\n", + "# Add these for printing class names over your predictions.\n", + "COCODemo.CATEGORIES = [\n", + " \"__background\",\n", + " \"square\",\n", + " \"circle\",\n", + " \"triangle\"\n", + "]\n" + ], + "execution_count": 0, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "c8b6wHAXjyE5", + "colab_type": "text" + }, + "source": [ + "## Visualise" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "RSPq97dtWFrA", + "colab_type": "text" + }, + "source": [ + "### Input Image" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Ir-cYCvKSbNI", + "colab_type": "code", + "outputId": "83911692-1805-464a-a163-48d575e2340e", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 485 + } + }, + "source": [ + "# Visualise Input Image\n", + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " img = dataset.load_image(i+5)\n", + " \n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(img)\n", + "plt.show()" + ], + "execution_count": 56, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3W2wXWV99/Hvj8TwJMiDNMWEW7BS\ne2OnVSZD6WitI31Aag29ax28nRqV20w7arW2Y7G+wM7UGa2tVjstThRK7KCIqAPTaiulWqcvoAZE\nHlUiiiQNRFGEUR4M/O8XZ6XdxoTk7LX3Xtc+5/uZOXPWvvbaZ//POuef37muvfZKqgpJktSOg4Yu\nQJIk/SjDWZKkxhjOkiQ1xnCWJKkxhrMkSY0xnCVJaszUwjnJmUm+kmRrkvOm9TySpstelmYv03if\nc5IVwFeBXwW2AV8AXlZVt078ySRNjb0sDWNaM+fTgK1VdUdVPQJcCqyf0nNJmh57WRrAyil93TXA\nXSO3twG/sK+dc8yRxQk/MaVSpDl149e+XVXHDVzFonoZYNVBB9ehKw6falHSPHnw0e/zyGMPZzGP\nmVY471eSjcBGANYcx8p/ftdQpUhN2vWU/3Pn0DUcqNF+PuSgwzj96F8duCKpHdd896pFP2Zay9rb\ngRNGbq/txv5bVW2qqnVVtY5jj5xSGZJ62m8vw4/286qDDp5ZcdJSNa1w/gJwcpKTkqwCzgGunNJz\nSZoee1kawFSWtatqV5LXAf8CrAAuqqpbpvFckqbHXpaGMbXXnKvqU8CnpvX1Jc2GvSzNnlcIkySp\nMYazJEmNMZwlSWqM4SxJUmMMZ0mSGmM4S5LUGMNZkqTGGM6SJDXGcJYkqTGGsyRJjTGcJUlqjOEs\nSVJjDGdJkhpjOEuS1BjDWZKkxhjOkiQ1xnCWJKkxhrMkLcLhF71w6BK0DBjOkiQ1ZuXQBUhSCxYz\nIz7Qfb//6k+PW46WOcNZ0rI17SXq0a9vUGsxXNaWJKkxY8+ck5wAfAhYDRSwqarem+QY4KPAicA3\ngJdW1Xf7lyppWpZTPw91QpezaC1Gn2XtXcAfVdX1SY4ArktyFfBK4OqqekeS84DzgD/pX6qkKVrS\n/dzaGdYGtfZn7GXtqtpRVdd32w8AtwFrgPXA5m63zcDZfYuUNF32s9SWibzmnORE4NnAtcDqqtrR\n3XU3C8tkkubEUuvn1mbNe2q9Pg2j99naSZ4IfBx4Y1Xdn+S/76uqSlL7eNxGYCMAa47rW4akCZhE\nPx9y0GGzKHW/5in0dtfqErd26zVzTvIEFhr5kqr6RDd8T5Lju/uPB3bu7bFVtamq1lXVOo49sk8Z\nkiZgUv286qCDZ1OwtISNHc5Z+JP6QuC2qnr3yF1XAhu67Q3AFeOXJ2kWllI/H37RC+dq1jxqXuvW\n5PVZ1n4O8LvATUlu6Mb+FHgHcFmSc4E7gZf2K1HSDNjPUkPGDueq+g8g+7j7jHG/rqTZWyr9vBRm\nnr7+LPDynZKWgKUQynsypJc3L98pSVJjDGdJkhozd8va/3vdfw5dwpJw25bThi5BmoiluKQ96vCL\nXujS9jLkzFmSpMYYzpIkNWbulrUlCZb+cvYoz9xefpw5S5LUGMNZkqTGGM6S5s5yWtIetVy/7+XI\ncJYkqTGGsyRJjTGcJUlqjOEsSVJjDGdJkhpjOEuS1BjDWZKkxhjOkiQ1xnCWJKkx/scXkuaGV8jy\nP8FYLgxnSXNjdyAt55A2lJcHl7UlSWpM73BOsiLJF5P8Y3f7pCTXJtma5KNJVvUvU9Is2M9SGyYx\nc34DcNvI7XcC76mqpwPfBc6dwHNImg37WWpAr3BOshb4DeCD3e0ALwAu73bZDJzd5zkkzYb9LLWj\n78z5r4E3A491t48F7quqXd3tbcCavT0wycYkW5Js4d77e5YhaQIm0s+PPPbw9CuVlrixwznJi4Cd\nVXXdOI+vqk1Vta6q1nHskeOWIWkCJtnPqw46eMLVSctPn7dSPQd4cZKzgEOAI4H3AkclWdn9tb0W\n2N6/TElTZj9LDRl75lxVb6mqtVV1InAO8G9V9XLgs8BLut02AFf0rlLSVNnPUlum8T7nPwHelGQr\nC69ZXTiF55A0G/azNICJXCGsqj4HfK7bvgM4bRJfV9LszUM/f//Vn16WVwnz6mDLh1cIkySpMYaz\nJEmN8T++kDSXltN/guFy9vLjzFmSpMYYzpIkNcZwljTXlvqS71L//rR3hrMkSY0xnCVJaoxna0ua\ne0vxzG2Xs5c3Z86SloylEGjff/Wnl8T3oX4MZ0mSGmM4S1pS5nnmOa91a/J8zVnSkjRPr0MbytqT\nM2dJkhpjOEta0lqflbZen4ZhOEuS1Bhfc5a05I3OTlt4DdrZsvbHcJa0rAwV1AayFsNlbUmSGuPM\nWdKyNe1ZtLNljctwliQOPEgPv+iFhq6mzmVtSZIa0yuckxyV5PIkX05yW5JfTHJMkquS3N59PnpS\nxUqaHvv5wDhr1iz0nTm/F/jnqvoZ4OeB24DzgKur6mTg6u62pPbZz1Ijxg7nJE8CngdcCFBVj1TV\nfcB6YHO322bg7L5FSpou+1lqS5+Z80nAt4C/T/LFJB9Mcjiwuqp2dPvcDaze24OTbEyyJckW7r2/\nRxmSJmBi/fzIYw/PqGRp6eoTziuBU4ELqurZwPfZY8mrqgqovT24qjZV1bqqWsexR/YoQ9IETKyf\nVx108NSLXc7O/5tbOP9vbhm6DE1Zn3DeBmyrqmu725ez0Nz3JDkeoPu8s1+JkmbAfpYaMnY4V9Xd\nwF1JntENnQHcClwJbOjGNgBX9KpQ0tTZz/NhdMbs7Hlp63sRktcDlyRZBdwBvIqFwL8sybnAncBL\nez6HpNmwn6VG9ArnqroBWLeXu87o83UlzZ79LLXDy3dKUuP2tYS9e/zPXv/MWZajGfDynZIkNcZw\nliSpMYazJDXsQM7K9sztpcdwliSpMZ4QJkkNcja8vDlz1tT8znvvHroEadnwsp5Li+EsSVJjXNbW\nxI3OmHdvf+wNPzlUOdJc6Tv7Pf9vbvF9z0uAM2dJkhpjOEuS1BjDWRPzO++9e58ngXlymLR/kzqh\ny5PD5p+vOUvSwAxS7cmZsyRJjTGcNREHsmz9eMvekibP5e355bK2ejFspX4MT+2NM2dJkhpjOGvm\nnG1Ls+XsfP4YzpIkNcbXnDW2PjNgL+up5W7Ws9ndz+elPeeDM2dJkhpjOEuS1Jhe4ZzkD5PckuTm\nJB9JckiSk5Jcm2Rrko8mWTWpYtWGSb5f2ZPD2mE/z86QJ2h5cth8GPs15yRrgD8ATqmqB5NcBpwD\nnAW8p6ouTfJ+4FzggolUC9y25bRJfSmNwTBdmobq5+XGYNSB6rusvRI4NMlK4DBgB/AC4PLu/s3A\n2T2fQ9Js2M9SI8YO56raDvwl8E0Wmvh7wHXAfVW1q9ttG7Bmb49PsjHJliRbuPf+ccvQEuBlPYc3\nyX5+5LGHZ1GyevCynu3rs6x9NLAeOAm4D/gYcOaBPr6qNgGbAPLzT69x69BsGJ5L2yT7+UlPOMZ+\n3gvDUIvRZ1n7V4CvV9W3quqHwCeA5wBHdctiAGuB7T1rlDR99rPUkD7h/E3g9CSHJQlwBnAr8Fng\nJd0+G4Ar+pWo5cLZ+aDs52XI2Xy7xl7Wrqprk1wOXA/sAr7IwrLWPwGXJvnzbuzCSRSq4cwyNL1y\n2DDs5+kxADWOXpfvrKrzgfP3GL4D8P1O0pyxn6V2eIUwSZqSeZg1e+Z2mwxn7dOQb3Hy7VXSbBnQ\nbTGcJUlqjP9lpPbKWas0Pmeh6suZsyRJjTGc1TRn8NLseHJYO1zW1o9oMQx977PmhcGmSXHmLElS\nYwxnSZqApTRrXkrfy7wynAXMx/uKW69PkibFcJYkqTGeECZnpFIPS3UJePf39Wevf+bAlSxPzpw1\nV+Zh+V1aSnx71TAMZ0mSGuOy9jLmDFTqxxmlpsWZs+aSf1hoSMtxqXe5fb9DM5wlSWqM4bxMLYWZ\npyeHSVqqfM15mTHMpH6W8/Kub6+aHWfOkiQ1xnDW3HM1QNJSYzhLktSY/YZzkouS7Exy88jYMUmu\nSnJ79/nobjxJ3pdka5Ibk5w6zeK1OM4wZT+Pbzm+fWpfPA7TdyAz54uBM/cYOw+4uqpOBq7ubgO8\nEDi5+9gIXDCZMqXH55nbB+xi7GdNgH+sTNd+w7mqPg98Z4/h9cDmbnszcPbI+IdqwTXAUUmOn1Sx\nkvqxn6X5MO5bqVZX1Y5u+25gdbe9BrhrZL9t3dgO9pBkIwt/jcOa48YsQwfCGaX2Y6L9fMhBh02v\n0oE4Q9Ss9T4hrKoKqDEet6mq1lXVOo49sm8Z2oflFszL7fudtEn086qDDp5CZcNw6Xb/PD7TMW44\n37N7eav7vLMb3w6cMLLf2m5MUrvsZ6kx44bzlcCGbnsDcMXI+Cu6szxPB743slwmzYQnhy2a/Sw1\nZr+vOSf5CPB84MlJtgHnA+8ALktyLnAn8NJu908BZwFbgR8Ar5pCzToAhpP2xn4+MC7VLo6X9Zy8\n/YZzVb1sH3edsZd9C3ht36IkTYf9LM0HrxCmJcvlbUnzynBeggwkaXwuaY/PYzc5hrMkSY3x/3Ne\nQpwx793vvPduPvaGnxy6DM0JT2pSC5w5S5LUGMNZkqTGuKy9BLicvX+7j5HL25LmgTNnSZIaYzhr\nWXGVQdI8MJznnGEjSUuP4SxJUmM8IWxOOWMenyeHSWqdM2dJkhpjOEuS1BjDeQ65pD0ZHkdJrTKc\ntaz530pKapHhLElSYzxbe444w5Ok5cGZs4R/+Ehqi+EsSVJjDOc54cxOkpYPw1mSpMbsN5yTXJRk\nZ5KbR8beleTLSW5M8skkR43c95YkW5N8JcmvT6vw5cK3+szOcjjW9rM0Hw5k5nwxcOYeY1cBP1tV\nPwd8FXgLQJJTgHOAZ3aP+bskKyZWraS+LsZ+lpq333Cuqs8D39lj7DNVtau7eQ2wttteD1xaVQ9X\n1deBrcBpE6xXUg/2szQfJvGa86uBT3fba4C7Ru7b1o39mCQbk2xJsoV7759AGUvPUl9ibdVyWN5+\nHL37+ZHHHp5yidLS1yuck7wV2AVcstjHVtWmqlpXVes49sg+ZUhTsdwCelL9vOqggydfnLTMjH2F\nsCSvBF4EnFFV1Q1vB04Y2W1tNyapYfaz1JaxwjnJmcCbgV+uqh+M3HUl8OEk7waeApwM/GfvKpeZ\n5TZj07DsZ6k9+w3nJB8Bng88Ock24HwWzuY8GLgqCcA1VfV7VXVLksuAW1lYHnttVT06reKXIoO5\nLbt/Hh97w08OXMlk2M/SfNhvOFfVy/YyfOHj7P924O19ipI0HfazNB+8QpgkSY0xnBuxzN++0zx/\nNpJmyXCWJKkxhrMkSY0Z+33OmgyXS+fHUjtzW1K7nDlLktSY/M/FgAYsIvkW8H3g20PXsg9Pps3a\nWq0L2q2t1brgx2t7alUdN1Qx40ryAPCVoevYh3n6+bei1bpgfmpbdC83Ec4ASbZU1bqh69ibVmtr\ntS5ot7ZW64K2a1uMlr8Pa1u8VuuCpV2by9qSJDXGcJYkqTEthfOmoQt4HK3W1mpd0G5trdYFbde2\nGC1/H9a2eK3WBUu4tmZec5YkSQtamjlLkiQaCOckZyb5SpKtSc4buJYTknw2ya1Jbknyhm78bUm2\nJ7mh+zhroPq+keSmroYt3dgxSa5Kcnv3+egZ1/SMkeNyQ5L7k7xxqGOW5KIkO5PcPDK212OUBe/r\nfvduTHLqALW9K8mXu+f/ZJKjuvETkzw4cvzeP83aJqWVfraXx67Lfh6/rsn2clUN9gGsAL4GPA1Y\nBXwJOGXAeo4HTu22jwC+CpwCvA344yGPVVfTN4An7zH2F8B53fZ5wDsH/nneDTx1qGMGPA84Fbh5\nf8cIOAv4NBDgdODaAWr7NWBlt/3OkdpOHN1vHj5a6md7eWI/T/v5wOuaaC8PPXM+DdhaVXdU1SPA\npcD6oYqpqh1VdX23/QBwG7BmqHoO0Hpgc7e9GTh7wFrOAL5WVXcOVUBVfR74zh7D+zpG64EP1YJr\ngKOSHD/L2qrqM1W1q7t5DbB2Ws8/A830s708EfbzIuqadC8PHc5rgLtGbm+jkQZKciLwbODabuh1\n3XLFRUMsN3UK+EyS65Js7MZWV9WObvtuYPUwpQFwDvCRkdstHDPY9zFq7ffv1Sz85b/bSUm+mOTf\nk/zSUEUtQmvHE7CXe7Cfx9e7l4cO5yYleSLwceCNVXU/cAHwU8CzgB3AXw1U2nOr6lTghcBrkzxv\n9M5aWEMZ5PT7JKuAFwMf64ZaOWY/Yshj9HiSvBXYBVzSDe0A/ldVPRt4E/DhJEcOVd+8spfHYz+P\nb1K9PHQ4bwdOGLm9thsbTJInsNDMl1TVJwCq6p6qerSqHgM+wMLy3cxV1fbu807gk10d9+xeuuk+\n7xyiNhb+kbm+qu7pamzimHX2dYya+P1L8krgRcDLu39sqKqHq+rebvs6Fl7L/elZ17ZITRzP3ezl\nXuznMUyyl4cO5y8AJyc5qftL7RzgyqGKSRLgQuC2qnr3yPjo6xa/Bdy852NnUNvhSY7Yvc3CyQc3\ns3C8NnS7bQCumHVtnZcxsgTWwjEbsa9jdCXwiu4sz9OB740sl81EkjOBNwMvrqofjIwfl2RFt/00\n4GTgjlnWNoZm+tle7s1+XqSJ9/K0zmY70A8WzrD7Kgt/Tbx14Fqey8ISyY3ADd3HWcA/ADd141cC\nxw9Q29NYOPv1S8Atu48VcCxwNXA78K/AMQPUdjhwL/CkkbFBjhkL/6DsAH7IwmtO5+7rGLFwVuff\ndr97NwHrBqhtKwuvk+3+fXt/t+9vdz/nG4Drgd+c9c91zO+xiX62l3vVZz+PV9dEe9krhEmS1Jih\nl7UlSdIeDGdJkhpjOEuS1BjDWZKkxhjOkiQ1xnCWJKkxhrMkSY0xnCVJaozhLElSYwxnSZIaYzhL\nktQYw1mSpMYYzpIkNcZwliSpMYazJEmNMZwlSWqM4SxJUmMMZ0mSGmM4S5LUGMNZkqTGGM6SJDVm\nauGc5MwkX0myNcl503oeSdNlL0uzl6qa/BdNVgBfBX4V2AZ8AXhZVd068SeTNDX2sjSMlVP6uqcB\nW6vqDoAklwLrgb029MpDj60nHHnClEqR5tNDO7/07ao6buAyFtXLACsPSa06IjMqT2rfIw8Uux6q\nRTXFtMJ5DXDXyO1twC+M7pBkI7AR4AlHrOXp//eqKZUizaeb//on7hy6Bg6gl2GPfn5iePpvHzKb\n6qQ5sPXjDy36MYOdEFZVm6pqXVWtW3HosUOVIWkCRvt5pbks9TatcN4OjK5Tr+3GJM0Xe1kawLTC\n+QvAyUlOSrIKOAe4ckrPJWl67GVpAFN5zbmqdiV5HfAvwArgoqq6ZRrPJWl67GVpGNM6IYyq+hTw\nqWl9fUmzYS9Ls+cVwiRJaozhLElSYwxnSZIaYzhLktQYw1mSpMYYzpIkNcZwliSpMYazJEmNMZwl\nSWqM4SxJUmMMZ0mSGmM4S5LUGMNZkqTGGM6SJDXGcJYkqTGGsyRJjTGcJUlqjOEsSVJjDGdJkhpj\nOEuS1BjDWZKkxhjOkiQ1ZuxwTnJCks8muTXJLUne0I0fk+SqJLd3n4+eXLmSpsF+ltrSZ+a8C/ij\nqjoFOB14bZJTgPOAq6vqZODq7rakttnPUkPGDueq2lFV13fbDwC3AWuA9cDmbrfNwNl9i5Q0Xfaz\n1JaJvOac5ETg2cC1wOqq2tHddTeweh+P2ZhkS5Itjz547yTKkDQBfft510MzKVNa0nqHc5InAh8H\n3lhV94/eV1UF1N4eV1WbqmpdVa1bceixfcuQNAGT6OeVh8ygUGmJW9nnwUmewEIjX1JVn+iG70ly\nfFXtSHI8sLNvkZKmbzn28wn3fWPoEppx11EnDl2CRvQ5WzvAhcBtVfXukbuuBDZ02xuAK8YvT9Is\n2M9SW/rMnJ8D/C5wU5IburE/Bd4BXJbkXOBO4KX9SpQ0A/az1JCxw7mq/gPIPu4+Y9yvK2n27Gep\nLV4hTJKkxhjOkiQ1xnCWJKkxhrMkSY0xnCVJaozhLElSYwxnSZIaYzhLktQYw1mSpMYYzpIkNcZw\nliSpMYazJEmNMZwlSWqM4SxJUmMMZ0mSGmM4S5LUGMNZkqTGGM6SJDXGcJYkqTGGsyRJjTGcJUlq\nTO9wTrIiyReT/GN3+6Qk1ybZmuSjSVb1L1PSLNjPUhsmMXN+A3DbyO13Au+pqqcD3wXOncBzSJoN\n+1lqQK9wTrIW+A3gg93tAC8ALu922Qyc3ec5JM2G/Sy1o+/M+a+BNwOPdbePBe6rql3d7W3Amp7P\nIWk27GepEWOHc5IXATur6roxH78xyZYkWx598N5xy5A0AZPs510PTbg4aRla2eOxzwFenOQs4BDg\nSOC9wFFJVnZ/ba8Ftu/twVW1CdgEcOjqZ1WPOiT1N7F+Puy4g+xnqaexZ85V9ZaqWltVJwLnAP9W\nVS8HPgu8pNttA3BF7yolTZX9LLVlGu9z/hPgTUm2svCa1YVTeA5Js2E/SwPos6z936rqc8Dnuu07\ngNMm8XUlzZ79LA3PK4RJktQYw1mSpMYYzpIkNcZwliSpMYazJEmNMZwlSWqM4SxJUmMMZ0mSGmM4\nS5LUGMNZkqTGGM6SJDXGcJYkqTGGsyRJjTGcJUlqjOEsSVJjDGdJkhpjOEuS1BjDWZKkxhjOkiQ1\nxnCWJKkxhrMkSY0xnCVJakyvcE5yVJLLk3w5yW1JfjHJMUmuSnJ79/noSRUraXrsZ6kdfWfO7wX+\nuap+Bvh54DbgPODqqjoZuLq7Lal99rPUiLHDOcmTgOcBFwJU1SNVdR+wHtjc7bYZOLtvkZKmy36W\n2tJn5nwS8C3g75N8MckHkxwOrK6qHd0+dwOr+xYpaersZ6khfcJ5JXAqcEFVPRv4PnsseVVVAbW3\nByfZmGRLki2PPnhvjzIkTcDE+nnXQ1OvVVry+oTzNmBbVV3b3b6chea+J8nxAN3nnXt7cFVtqqp1\nVbVuxaHH9ihD0gRMrJ9XHjKTeqUlbexwrqq7gbuSPKMbOgO4FbgS2NCNbQCu6FWhpKmzn6W2rOz5\n+NcDlyRZBdwBvIqFwL8sybnAncBLez6HpNmwn6VG9ArnqroBWLeXu87o83UlzZ79LLXDK4RJktQY\nw1mSpMb0fc1ZkubWXUedOHQJ0l45c5YkqTGGsyRJjTGcJUlqjOEsSVJjDGdJkhpjOEuS1BjDWcvC\nB444lw8cce7QZUjSATGcJUlqjBch0dwaZya8mMe85oELF/31JWkSDGfNjVkvS+/5fIa1pFlxWVuS\npMY4c1bTWjqJa7QWZ9GSpslwVlNaCuPHY1BLmiaXtSVJaowzZw1uXmbL++IsWtKkOXOWJKkxzpwX\nYeUDK4YuYW7tOuLRvY7P+6x5T7u/H2fQkvownDWIpRbKezKkJfXhsrYkSY3pNXNO8ofA/wMKuAl4\nFXA8cClwLHAd8LtV9UjPOrUELPXZ8t7M08li9rPUjrFnzknWAH8ArKuqnwVWAOcA7wTeU1VPB74L\nLL9/kfVjlmMw76nlY2A/S23pu6y9Ejg0yUrgMGAH8ALg8u7+zcDZPZ9D0mzYz1Ijxg7nqtoO/CXw\nTRaa+HssLHvdV1W7ut22AWv29vgkG5NsSbLl0QfvHbcMSRMwyX7e9dAsKpaWtrFfc05yNLAeOAm4\nD/gYcOaBPr6qNgGbAA5d/awatw617YKnvHLoEprS6lnck+znw447yH6WeuqzrP0rwNer6ltV9UPg\nE8BzgKO6ZTGAtcD2njVKmj77WWpIn3D+JnB6ksOSBDgDuBX4LPCSbp8NwBX9StS8cta8bw2eHGY/\nSw0Ze1m7qq5NcjlwPbAL+CILy1r/BFya5M+7sbbW7zR1hvKBaWmJ236W2tLrfc5VdT5w/h7DdwCn\n9fm6kmbPfpba4RXCJElqjNfW1sS4nD2eDxxxbhNL25La4cxZkqTGGM6SJDXGcJYkqTGGsybC15v7\n+cAR57b43mdJAzGcJUlqjOEsSVJjfCuVenE5e7J8W5UkcOYsSVJzDGdJkhpjOEuS1Bhfc9bYfL15\nOna/peoXB65D0nCcOUuS1BjDWZKkxhjOkiQ1xnCWJKkxhrMkSY0xnCVJaozhLElSYwxnSZIaYzhL\nktSY/YZzkouS7Exy88jYMUmuSnJ79/nobjxJ3pdka5Ibk5w6zeIlLY79LM2HA5k5XwycucfYecDV\nVXUycHV3G+CFwMndx0bggsmUqZZc8JRXeunO+XUx9rPUvP2Gc1V9HvjOHsPrgc3d9mbg7JHxD9WC\na4Cjkhw/qWIl9WM/S/Nh3NecV1fVjm77bmB1t70GuGtkv23d2I9JsjHJliRbHn3w3jHL0BB+/78u\n5vf/6+Khy9DkTLSfdz00vUKl5aL3CWFVVUCN8bhNVbWuqtatOPTYvmVImoBJ9PPKQ6ZQmLTMjBvO\n9+xe3uo+7+zGtwMnjOy3thuT1C77WWrMuOF8JbCh294AXDEy/oruLM/Tge+NLJdJapP9LDVm5f52\nSPIR4PnAk5NsA84H3gFcluRc4E7gpd3unwLOArYCPwBeNYWaJY3Jfpbmw37Duapeto+7ztjLvgW8\ntm9RkqbDfpbmg1cIkySpMYazJEmNMZwlSWqM4SxJUmMMZ0mSGmM4a2xewnM6XvPAhbzmgQuHLkPS\ngAxnSZIaYzhLktQYw1mSpMYYzurF/z5ysnytWRIYzpIkNcdwliSpMYazJsKl7X58+5SkUYazJEmN\nMZwlSWqM4ayJ8czt8bicLWlPhrMkSY0xnCVJaszKoQvQ0rN7afuCp7xy0Dpa53K2pH1x5ixJUmMM\nZ02NJ4ftm7NmSY/HcJYkqTH7DeckFyXZmeTmkbF3JflykhuTfDLJUSP3vSXJ1iRfSfLr0ypc88G3\nV/2ooa8EZj9L8+FAZs4XA2fuMXYV8LNV9XPAV4G3ACQ5BTgHeGb3mL9LsmJi1Urq62LsZ6l5+w3n\nqvo88J09xj5TVbu6m9cAa7vt9cClVfVwVX0d2AqcNsF6JfVgP0vzYRKvOb8a+HS3vQa4a+S+bd3Y\nj0myMcmWJFseffDeCZShlnlp2u3LAAAFqElEQVQC1Nwcg979vOuhKVcoLQO93uec5K3ALuCSxT62\nqjYBmwAOXf2s6lOH5sNoOH3giHMHrGR25iSQgcn182HHHWQ/Sz2NHc5JXgm8CDijqnY343bghJHd\n1nZjkhpmP0ttGSuck5wJvBn45ar6wchdVwIfTvJu4CnAycB/9q5SS87uGeVSnUHP2YzZfpYas99w\nTvIR4PnAk5NsA85n4WzOg4GrkgBcU1W/V1W3JLkMuJWF5bHXVtWj0ype8+81D1y4pAK69VC2n6X5\nsN9wrqqX7WV4n/8CVdXbgbf3KUrSdNjP0nzwP77Q4Ob9RLHWZ8uS5o/hrKbsGXSthrWBLGmavLa2\nJEmNceasprW05O1sWdKsGM6LsOsIT1Qd0qyXvA1jSUNxWVuSpMbkfy4GNGARybeA7wPfHrqWfXgy\nbdbWal3Qbm2t1gU/XttTq+q4oYoZV5IHgK8MXcc+zNPPvxWt1gXzU9uie7mJcAZIsqWq1g1dx960\nWlurdUG7tbVaF7Rd22K0/H1Y2+K1Whcs7dpc1pYkqTGGsyRJjWkpnDcNXcDjaLW2VuuCdmtrtS5o\nu7bFaPn7sLbFa7UuWMK1NfOasyRJWtDSzFmSJGE4S5LUnMHDOcmZSb6SZGuS8wau5YQkn01ya5Jb\nkryhG39bku1Jbug+zhqovm8kuamrYUs3dkySq5Lc3n0+esY1PWPkuNyQ5P4kbxzqmCW5KMnOJDeP\njO31GGXB+7rfvRuTnDpAbe9K8uXu+T+Z5Khu/MQkD44cv/dPs7ZJaaWf7eWx67Kfx69rsr1cVYN9\nACuArwFPA1YBXwJOGbCe44FTu+0jgK8CpwBvA/54yGPV1fQN4Ml7jP0FcF63fR7wzoF/nncDTx3q\nmAHPA04Fbt7fMQLOAj4NBDgduHaA2n4NWNltv3OkthNH95uHj5b62V6e2M/Tfj7wuibay0PPnE8D\ntlbVHVX1CHApsH6oYqpqR1Vd320/ANwGrBmqngO0HtjcbW8Gzh6wljOAr1XVnUMVUFWfB76zx/C+\njtF64EO14BrgqCTHz7K2qvpMVe3qbl4DrJ3W889AM/1sL0+E/byIuibdy0OH8xrgrpHb22ikgZKc\nCDwbuLYbel23XHHREMtNnQI+k+S6JBu7sdVVtaPbvhtYPUxpAJwDfGTkdgvHDPZ9jFr7/Xs1C3/5\n73ZSki8m+fckvzRUUYvQ2vEE7OUe7Ofx9e7locO5SUmeCHwceGNV3Q9cAPwU8CxgB/BXA5X23Ko6\nFXgh8Nokzxu9sxbWUAZ5b1ySVcCLgY91Q60csx8x5DF6PEneCuwCLumGdgD/q6qeDbwJ+HCSI4eq\nb17Zy+Oxn8c3qV4eOpy3AyeM3F7bjQ0myRNYaOZLquoTAFV1T1U9WlWPAR9gYflu5qpqe/d5J/DJ\nro57di/ddJ93DlEbC//IXF9V93Q1NnHMOvs6Rk38/iV5JfAi4OXdPzZU1cNVdW+3fR0Lr+X+9Kxr\nW6Qmjudu9nIv9vMYJtnLQ4fzF4CTk5zU/aV2DnDlUMUkCXAhcFtVvXtkfPR1i98Cbt7zsTOo7fAk\nR+zeZuHkg5tZOF4but02AFfMurbOyxhZAmvhmI3Y1zG6EnhFd5bn6cD3RpbLZiLJmcCbgRdX1Q9G\nxo9LsqLbfhpwMnDHLGsbQzP9bC/3Zj8v0sR7eVpnsx3oBwtn2H2Vhb8m3jpwLc9lYYnkRuCG7uMs\n4B+Am7rxK4HjB6jtaSyc/fol4Jbdxwo4FrgauB34V+CYAWo7HLgXeNLI2CDHjIV/UHYAP2ThNadz\n93WMWDir82+7372bgHUD1LaVhdfJdv++vb/b97e7n/MNwPXAb8765zrm99hEP9vLveqzn8era6K9\n7OU7JUlqzNDL2pIkaQ+GsyRJjTGcJUlqjOEsSVJjDGdJkhpjOEuS1BjDWZKkxvx/fqpoAT8CXPcA\nAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zWKmRev3WKK4", + "colab_type": "text" + }, + "source": [ + "### Visualise Unpruned Results" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "vwwTWZ6xba0q", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 485 + }, + "outputId": "c9ffbb8a-9951-4af8-c63e-b2d215107f1d" + }, + "source": [ + "# Visualise Results\n", + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " img = dataset.load_image(i+5)\n", + " image = np.array(img)[:, :, [2, 1, 0]]\n", + " result = unpruned_demo.run_on_opencv_image(image, objDet=\"True\")\n", + " \n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(result)\n", + "plt.show()" + ], + "execution_count": 57, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3W3wZGV54P/vNTAwoIw8OA44wzq4\nkli4icpOEf5l1qTCuhJ0BYMaTCqiIU5MGVfj7gquLxhfpCLrrkrcXQmKK24ZH5GCikYlrNHaF7AC\nITz6MCEQh51hiKBYQZCB6//idxp7mn4+p/vc3f39VP3q1336PFx9Zu7f1dd97nN3ZCaSJKkc69oO\nQJIkHcjkLElSYUzOkiQVxuQsSVJhTM6SJBXG5CxJUmFmlpwj4vSI+E5E7IqIC2Z1HEmzZVuW5i9m\ncZ9zRBwEfBd4GbAb+Bbw+sy8o/GDSZoZ27LUjllVzqcAuzLzrsz8KfAZ4MwZHUvS7NiWpRYcPKP9\nbgG+3/V8N/BLg1beeHTks46fUSTSgvq7W/jHzNzUchgTtWWAQ9cdkk876LCZBiUtkn96/Cc8+sRP\nY5JtZpWcR4qIHcAOgE1b4P1faS0UqUi/8ez997Qdw7i62/Ph6zbwsqNObTkiqRzXPHjdxNvMqlv7\nXqC7Ft5aLXtSZl6amdszc/vGY2YUhaS6RrZlOLA9H7rukLkFJy2rWSXnbwEnRsQJEXEIcA5w9YyO\nJWl2bMtSC2bSl5yZ+yPiD4GvAgcBH8/M22dxLEmzY1uW2jGzC72Z+WXgy7Pav6T5WLa2/Nl9XwXg\nN5/18pYjkQZzFJaklfTZfV+de4JexA8GTcfc2d8go44zbPtFOq+jOH2nJEmFsXKWtFKWqbqapVEV\n7rT7HHT+Rx1vVAU/i3jbZOUsSTpAJ4nO84NM53jTJtnOtsuSpE3OkiQVxm5tSQtvkkFCg7pH+y3v\n3u8k3al1Ks5J9jerAWZN769OnIs4iK4JJmdJC23Qdcw63ZvjJoTeY9ftUu33Xrq7aocl6VVLXsvO\nbm1Jkgpj5SxpIU1b3Y6z33Hvte1dr/N80q7YYesPGyRltby8TM6SltK0iauJZD/ph4F+j7Xa7NaW\nJKkwVs6S1DK7p9XLylmSpMKYnCVJc1H39rZVuiZvcpa01Jr+gz7ONJGTJJI6+ystYQ0bVT7OQLt+\ng+3GnUa0jSlHZ8nkLElSYRwQJmlhfOG+R558fFD+CgCP7/vG0G0Oyl+B+4bva5zl/QyqFDux9Tvu\nsGNMsr/XbN5wwHalVIzT3rM9Su895MsuMrPtGHjeCyPf/xU/JyyjVx/32NDXr9yzvvF9j9rnqJjG\n3c+s/caz99+YmdtbDWIKR69/Rr7sqFNnsu9OUutOTE3udxZmFWvT+9XsXPPgdTzw2I9ikm3s1pYk\nqTCWq5qZVx/32MDqc9zqtc6+R1W+o7YfdgwtvllWy4OOY7WrcZmc1YpO0ps0AY6beOvojk3LYV6J\neBQTtcZlt7YkSYWZunKOiOOBTwKbgQQuzcyLI+Jo4LPANuBu4HWZ+WD9UIebdnDQsG37bT9u5TZO\nRThJzL3769522HF6j1G3K3gc86hum4hj0L9RKfHPU2ntuUmlVM29HNilYep0a+8H/n1m3hQRRwA3\nRsQ1wBuBazPzfRFxAXABcH79UPsb5w/wqNcHJeBZ6RfTJMecJPH3HqPu+VoG3d3W/c77Mr/3IYpo\nz02aNClf9qVjGjv2ea/4wdjr2tWtfqbu1s7MPZl5U/X4x8CdwBbgTODyarXLgbPqBilptmzPUlka\nGRAWEduAFwPXA5szc0/10l7WusmKM6xCvHLP+plVz4Oq3nEGIdXtUh/2vhalWpx2INmo/XVbhd6D\nYRaxPdfVZNXcu79Jq2irZ0EDyTking5cAbwjMx+K+Nl91pmZEdF3lpOI2AHsANi0pW4U/ZPalXvW\nj3X9dh6a+IM/yTXsVRhpPOqDxqDXmxw3sGyaaM+Hr2s3uYzbnd10Qh7nOOMkaq9FC2qO1o6I9aw1\n5E9l5herxfdFxHHV68cB+/ptm5mXZub2zNy+cT5tRNIQTbXnQ9cdMp+ApSVWZ7R2AJcBd2bmB7pe\nuho4F3hf9fuqWhGOMO5I5VWqgFbhvY7z7z5tr8kq3udcSnuua1TVPK9qeZzjj6qiraBXW51u7ZcA\nvwPcGhE3V8v+E2uN+HMRcR5wD/C6eiFKmgPbs1SQqZNzZv4fYNBE3qdNu99pnf1nT71wfcXv/2wA\nVPfrT+zsv363J3b23++g5f227z3mNNuNWj4qtit+/96h28zSKl6zXVSltedJlV4x99OJaZIBY1od\nCz1D2Nl/tmXgvaqd1/tZt3MbT+y8myd23t14TIP2u27ntiePOyv9jtF7Dgadr2HncVKDBuL1O96w\n+63HGbneb/mo4zf5XqW6Rn1wKHUSFc3WQidnSZKW0UJ+8UX/bt+7p9rXpNut27lt6HajXp/mmNMY\nFd8Vv99/uybv8W3ifvGmv9mqO6ZZfte05mcRu7R7jeridnDY6onMvrctztXzXhj5/q+M9zlh1HXX\npnSSWyeZLZs2r0VrPL/x7P03Zub2tuOY1NHrn5EvO+rUmey7O0ktQ1LuZ9xr0CbqxXHNg9fxwGM/\nGjSmoy+7tSVJKsxCdmtLWm3DquZFrZilbkuXnM//6MYnH1/05ocOeD6RnU/d3yK56M0PtR2CJGlK\ndmtLklSYpU7Oi1r1SprOMnRpX/alY5bifaiehenWHjVKu+lE/O4tv9jo/uatcz4GdW+f/WdbHLEt\nSYVa6spZkqRFtDCVs6TV5ghtrRIrZ0kq0Khrz865vdxMzpIkFcbkLElSYUzOkiQVxuQsSVJhTM6S\nJBXG5CxJUmFMzpIkFcbkLElSYUzOkiQVpnZyjoiDIuJvIuIvqucnRMT1EbErIj4bEYfUD1PSPNie\npTI0UTm/Hbiz6/lFwAcz83nAg8B5DRxD0nzYnqUC1ErOEbEVeAXwsep5AL8GfKFa5XLgrDrHkDQf\ntmepHHW/lepDwLuAI6rnxwA/zMz91fPdQN8vYo6IHcAOgE3Dv6pZ0nw00p4PX7dhxmGuhvNe8YOh\nr79ms+d5mU1dOUfEK4F9mXnjNNtn5qWZuT0zt2/0296kVjXZng9d52Vpqa46lfNLgFdFxBnABmAj\ncDFwZEQcXH3a3grcWz9MSTNme5YKMnXlnJnvzsytmbkNOAf435n528DXgddUq50LXFU7SkkztQjt\neVg37nmv+MHIbmBpkcziPufzgXdGxC7WrlldNoNjSJoP27PUgroDwgDIzL8G/rp6fBdwShP7lTR/\ntud22QMgcIYwSZKKY3KWJKkwJmdJS8MuYS0Lk7MkFcKJR9RhcpYkqTAmZ0kLx3uetewauZVKkkrT\nSdCXfans+YHH+SBhd/bqsXKWJKkwJmdJC+k1mzeMVVGW2sXd6X5/PL4xdD2r5tVkt7akhfaazRv4\nwn2PDF2nO0E30c39xld+EYBP/MVvTLRddxzLlJQ/u++rAPzms17eciTDdeIcpE78w/a9ffv2ifdn\n5SxJUmGsnCUtvE6VOaqChvYGivV2rx+Uv9J3vUWqmBfJZ/d9dWRlPM46k243qlofxOQsaWmM08Xd\n0XRX96hjjMPEvHx+81kv564HvzfxdnZrS5JUGCtnSUult/qcpKt7mH4DuDoDw+DAburOuuf1WdZv\n3XG6W59iX/8BTP26WHu3H6frdlh37LQDpwbtc1iXcOmDzGbFylmSpMJYOUtaapNchx6mX2U8aFBX\nt0HrPlnh7xu+fW/1OMkAo363OI2z/TjV9yQG3Wo1aJ91jlVXKbeFmZwlLb3uru4mEvW4Ho9vHJCU\nJxnwNShJdPY37D7pUQlmVLLst91vPuvlEyfNcRNd74eBthLjNO9xVuzWliSpMFbOkhbOPKvfOnq7\nsgfF3b38Kd3g901+3GGV56DqcJaDr0qpRmdlVMU9zQxhJmdJC2NW9wHPKtmPFe++nvX2jbntiGvV\nJWn7+u246nxAGbad9zlLkrQEalXOEXEk8DHgXwAJ/C7wHeCzwDbgbuB1mflgrSjVmCd23g3Aup3b\nWo1D5Vnl9jxxRT6qul2gqlZlqls5Xwx8JTOfD7wQuBO4ALg2M08Erq2eq6Y/ufeW2vvoJOZl8Orj\nHms7hLG8+rjHBv4UyPZcgN981sufvIY5z2u1bRyzYx63VA17f53lU00GM2R557XnvvDEyYKlRnKO\niGcALwUuA8jMn2bmD4Ezgcur1S4Hzpr2GJLmw/YslaVOt/YJwP3A/4yIFwI3Am8HNmfmnmqdvcDm\nfhtHxA5gB8CmLTWi0Njsyp6fTmV85Z71tdaZo8ba8+Hrlu/LG674g75v+4CK6eAL3zD2dpOsN6wq\nG7Rd3eOOqljH3X+nEp30e5RnXb1Pe792k/scJTJzug0jtgPXAS/JzOsj4mLgIeBtmXlk13oPZuZR\nw/b1vBdGvv8rwz8nnP1nwzP4+R/dOG7oC+lP7r2Fd2/5xYm3u+jNDw187Yrfv7dOSK169XGPlZLU\n+moiOf/Gs/ffmJmT34MxhSbb89Hrn5EvO+rU2QY8Z1f8wWbO/sgU9zTNwO0fvhCAF7ztvXM/dr9J\nRUo6N6W65sHreOCxH8Uk29S55rwb2J2Z11fPvwCcDNwXEccBVL8dGiGVz/YsFWTqbu3M3BsR34+I\nn8/M7wCnAXdUP+cC76t+X9VIpCuodxBY9/PuKrqzvN+yP+HA7uxxR2sPGrDUW+X1q/76bTtOlTvu\nMcc16f4K62aeK9vzYrjiDzbDHZcA8G0r1qVWdxKStwGfiohDgLuAN7FWjX8uIs4D7gFeV/MYK6s3\n2Y7brd297rBu7X7GTbZNblvnmOPub9R+2xw9XVAXve25UKOuLTedpLuvn4573VjNqpWcM/NmoN81\nsdPq7FfS/NmepXI4fecS6de9Pa5h1eaVe9aPrCxHVX+DXh+0vLNs0op2nCq033tto3Kd9j1qtYwz\nMrrpQVndc0WX8hWKq8bpOyVJKoyV8xKZpmKGZq55Dtp+UHU4y8FXVqJaBuPeS9y7flMVtJVyu0zO\nWjqFDK4aaZVHh2uwSZNyv+0dxb347NaWJKkwJmdJWiDPP+ktPP+kt7QdhmbM5KwnR2M3fa1278Wv\nHXrMcUaBz8Kg99rkORj1/jrX+e3SltSPyVmSpMI4IKxA7/+9Fz1l2Z9w4PSd3d+A8ydDtoNvDjzO\nay95/ZOP91dz6I+qHLu36d6udzkAhwLV8v3v7b/O/vdOfsxh+4M3sP+9w/f3s3PX/f4/+ZTjff4t\nnx66n3FNc6/2monmydeKef5Jb+Hb1VSevWY1c5jmZ+pvpWqS30p1oE6S/Y8fu7n2vtr4Vqp+3dnH\nvv3zU+2rjaktm07O05rnt1I1yW+lqnecYQZdax6UpDtmGbujw0eb97dSSZKkGbBbW43prZjfdujP\n/nt9+OLXTl09S8tu2oq5+/Vh1bPV7eIxOS+pSb+NqkndSbl72aDU3H1N1vmmtWrqJmYtJ7u1JUkq\njJXzEmqjat578Wv7VszdBg206q6WndJSkkzOS6WtpAz9u7IHee0lr+87EtqErFXSdHd2Z31vr1oO\ndmtLklSYhamcO/fkDrrfuc0BUM1bmzjkojfP7ghN3eM8ScXcrZR7iaVl48jt5WDlLElSYRamclZ5\n9l782rUpOiVNZNj1Zm+dEixg5bxM03QuulFd2h9+dP+cIpEWwxV/sHkuiXnU10qOikPtW7jkLEnS\nsqvVrR0RfwT8HpDArcCbgOOAzwDHADcCv5OZP60Z5wE61fNyDQJr1sgehhoDwp6cprOBLu29TutZ\njLbas2ZnnNurHBxWpqmTc0RsAf4dcFJm/iQiPgecA5wBfDAzPxMRlwDnAR9pJFrglBvufPLxFV3L\nR31r1So4YAT2DL9xapLu7M7jQdu87dCDn/xaSUdut6et9rxKvM6sSdTt1j4YOCwiDgYOB/YAvwZ8\noXr9cuCsmseQNB+2Z6kQU1fOmXlvRPwX4B+AnwBfY63b64eZ2SmddgN9S9qI2AHsANjUQNE7q+8m\nbkPJ9wAPq5qHDQD78KP7x5res8T3vAqabM+Hr9sw+4ClJVenW/so4EzgBOCHwOeB08fdPjMvBS4F\neN4LI6eNQ/PRxHXmUV3cUPYHk2XWZHs+ev0zbM9dSvjWqWETkzitZ5nqdGv/a+DvM/P+zHwM+CLw\nEuDIqlsMYCuwPCWttLxsz1JB6iTnfwBOjYjDIyKA04A7gK8Dr6nWORe4ql6IKsHbDj146i7tOutq\nbmzPUkGmTs6ZeT1rA0VuYu22i3WsdWudD7wzInaxdvvFZQ3EqRZ1upr7+fCj+022S8D2PBujRmjP\nc5T2OBOTqBy17nPOzAuBC3sW3wWcUme/kubP9iyVw7m1NdCwihnqdU+PGhzmwDAtshIGgQ07toPD\nyuf0nSraqA8IkrSMTM6SJBXG5Ky+5jUIbJz9vPaS11tBS1opJmdJkgpjclbrxq3ErZ5VunG+r7mE\nL7nwtqrymZx1gL0Xv/ZnU3XOmfdLa5GVPEJ7kFEJ2iTdHpOzJEmF8T5nHWCS72qeBe9/1qJZxIpZ\n5TM5C5jthCOSytX58ODEJGWxW1uSpMKYnNXaADBp0dmlrVkxOav168yS2jfqg4Qjt+fL5CxJUmFM\nziuszXuaRxk1MYkTkkhaZo7WXmGL0J394Uf3e1uVirOs15oduV0OK2dJkgpjcl5Ri3Rfs13cKski\nzJ2txWdyXjElX2ceZVSCNklrlsb5Uotl4cjt9pmcJUkqjAPCVswiDAKT1L5xB4dpNqycJUkqzMjk\nHBEfj4h9EXFb17KjI+KaiPhe9fuoanlExJ9GxK6IuCUiTp5l8JrMsGuyowZdaTnYnqXFME7l/Ang\n9J5lFwDXZuaJwLXVc4BfB06sfnYAH2kmTNUxarDUIiVlR27X9glsz1LxRibnzPwm8EDP4jOBy6vH\nlwNndS3/ZK65DjgyIo5rKlhJ9diepcUw7YCwzZm5p3q8F+iMDNgCfL9rvd3Vsj30iIgdrH0aZ9OW\nKaPQSMtSMWumGm3Ph6/bMLtIC7RMt1D1M2pgmGaj9oCwzEwgp9ju0szcnpnbNx5TNwqtGu95no0m\n2vOh6w6ZQWTtuP3DF3L7hy9sO4wiDPsQMuoecE1u2uR8X6d7q/q9r1p+L3B813pbq2WSymV7lgoz\nbXK+Gji3enwucFXX8jdUozxPBX7U1V0mNWqcwWFW0GOxPUuFGXnNOSI+Dfwq8MyI2A1cCLwP+FxE\nnAfcA7yuWv3LwBnALuBh4E0ziFljWKS5s+sa9s1VsHYu/OaqNbbn8diVrbaNTM6ZOeiv/Gl91k3g\nrXWDkjQbtmdpMTh9p5ZCpyfA737WvDmKWbPg9J1LyJnApOnZpT09z11zTM6SJBXGbu0lskqDwKRZ\necHb3vuUZd/2Ht6hzv7IfdWjp547TcfKWZKkwlg5LwErZmm2flYZ+j3GvbrPjZpj5bzgTMwHGvV+\nnZREdZmM1pz9kfs8FzNkcpYkqTB2a0vShKwYNWtWzlo6zrktadGZnCVJKozd2gvKgWCjjTOlp9N5\nSiqRlfMCMjFPxi5uSYvG5CxJUmHs1l4gVsyStBqsnCVJKozJWZKkwpicF4Tf0VzPOPc+S1IpvOZc\nOK8zS9LqsXKWJKkwVs5aKR9+dP/QSUkkqQRWzpIkFWZkco6Ij0fEvoi4rWvZ+yPi2xFxS0RcGRFH\ndr327ojYFRHfiYiXzyrwVWAlp6bZnqXFME7l/Ang9J5l1wD/IjN/Efgu8G6AiDgJOAd4QbXN/4iI\ngxqLVk9yhPb0VvzcfQLbs1S8kck5M78JPNCz7GuZ2fnrdh2wtXp8JvCZzHw0M/8e2AWc0mC8kmqw\nPUuLoYlrzr8L/GX1eAvw/a7XdlfLniIidkTEDRFxw0M/aCCKJTFOV/YKV32avdrt+dEnfjrjEKXl\nVys5R8R7gP3ApybdNjMvzcztmbl94zF1olgdK94d27hR53LVrvk31Z4PXXdI88FJK2bqW6ki4o3A\nK4HTMjOrxfcCx3ettrVaJqlgtmepLFNVzhFxOvAu4FWZ+XDXS1cD50TEoRFxAnAi8H/rh7k6Pv+W\nTw987W2HHjzwHl1pWrZnqTwj/9JHxKeBXwWeGRG7gQtZG815KHBNRABcl5lvyczbI+JzwB2sdY+9\nNTMfn1Xwy2jUHNqar70XvxaAY9/++ZYjaYbtWVoMI5NzZvbLFpcNWf+PgT+uE5Sk2bA9S4vBGcK0\nsrxEIKlU/nUqRKf7lEMHr2Myac/ei1+7NF3bkspn5SxJUmFMzpIkFcZ+0pY92Z1dGfaVhpqv3tHx\nyzZyW1K5rJwlSSpM/GwyoBaDiLgf+CfgH9uOZYBnUmZspcYF5cZWalzw1Niek5mb2gpmWhHxY+A7\nbccxwCL9+5ei1LhgcWKbuC0XkZwBIuKGzNzedhz9lBpbqXFBubGVGheUHdskSn4fxja5UuOC5Y7N\nbm1JkgpjcpYkqTAlJedL2w5giFJjKzUuKDe2UuOCsmObRMnvw9gmV2pcsMSxFXPNWZIkrSmpcpYk\nSRSQnCPi9Ij4TkTsiogLWo7l+Ij4ekTcERG3R8Tbq+U7I+LeiLi5+jmjpfjujohbqxhuqJYdHRHX\nRMT3qt9HzTmmn+86LzdHxEMR8Y62zllEfDwi9kXEbV3L+p6jWPOn1f+9WyLi5BZie39EfLs6/pUR\ncWS1fFtE/KTr/F0yy9iaUkp7ti1PHZftefq4mm3LmdnaD3AQ8HfAc4FDgL8FTmoxnuOAk6vHRwDf\nBU4CdgL/oc1zVcV0N/DMnmX/GbigenwBcFHL/557gee0dc6AlwInA7eNOkfAGcBfAgGcClzfQmz/\nBji4enxRV2zbutdbhJ+S2rNtubF/T9vz+HE12pbbrpxPAXZl5l2Z+VPgM8CZbQWTmXsy86bq8Y+B\nO4EtbcUzpjOBy6vHlwNntRjLacDfZeY9bQWQmd8EHuhZPOgcnQl8MtdcBxwZEcfNM7bM/FpmduYJ\nvQ7YOqvjz0Ex7dm23Ajb8wRxNd2W207OW4Dvdz3fTSENKCK2AS8Grq8W/WHVXfHxNrqbKgl8LSJu\njIgd1bLNmbmnerwX2NxOaACcA3y663kJ5wwGn6PS/v/9Lmuf/DtOiIi/iYhvRMS/aiuoCZR2PgHb\ncg225+nVbsttJ+ciRcTTgSuAd2TmQ8BHgH8OvAjYA/zXlkL75cw8Gfh14K0R8dLuF3OtD6WV4fcR\ncQjwKqDzrRClnLMDtHmOhomI9wD7gU9Vi/YA/ywzXwy8E/jziNjYVnyLyrY8Hdvz9Jpqy20n53uB\n47ueb62WtSYi1rPWmD+VmV8EyMz7MvPxzHwC+Chr3Xdzl5n3Vr/3AVdWcdzX6bqpfu9rIzbW/sjc\nlJn3VTEWcc4qg85REf//IuKNwCuB367+2JCZj2bmD6rHN7J2Lffn5h3bhIo4nx225Vpsz1Nosi23\nnZy/BZwYESdUn9TOAa5uK5iICOAy4M7M/EDX8u7rFq8Gbuvddg6xPS0ijug8Zm3wwW2sna9zq9XO\nBa6ad2yV19PVBVbCOesy6BxdDbyhGuV5KvCjru6yuYiI04F3Aa/KzIe7lm+KiIOqx88FTgTummds\nUyimPduWa7M9T6jxtjyr0Wzj/rA2wu67rH2aeE/Lsfwya10ktwA3Vz9nAP8LuLVafjVwXAuxPZe1\n0a9/C9zeOVfAMcC1wPeAvwKObiG2pwE/AJ7RtayVc8baH5Q9wGOsXXM6b9A5Ym1U53+v/u/dCmxv\nIbZdrF0n6/x/u6Ra9+zq3/lm4Cbg387733XK91hEe7Yt14rP9jxdXI22ZWcIkySpMG13a0uSpB4m\nZ0mSCmNyliSpMCZnSZIKY3KWJKkwJmdJkgpjcpYkqTAmZ0mSCmNyliSpMCZnSZIKY3KWJKkwJmdJ\nkgpjcpYkqTAmZ0mSCmNyliSpMCZnSZIKY3KWJKkwJmdJkgpjcpYkqTAmZ0mSCmNyliSpMDNLzhFx\nekR8JyJ2RcQFszqOpNmyLUvzF5nZ/E4jDgK+C7wM2A18C3h9Zt7R+MEkzYxtWWrHwTPa7ynArsy8\nCyAiPgOcCfRt0MccdnAev3H9jEKRFtPf7nvkHzNzU8thTNSWAWLDwRlHHDKn8EbL9Y/3Xf4vn/0L\nTz6+8f/dOvb+erfrPB+1j0HH+5fP/oWpth203Tj7G3WMm+4Z/tnr5OecNHKdaZ38nJPGjqN7/UnW\n7Tbt+5jkHOSPf0o+sj8m2f+skvMW4Ptdz3cDv9S9QkTsAHYAbD1iPdf81vNmFIq0mJ71odvuaTsG\nxmjLcGB7jqevZ8PZ5bTnR459qO/yG3be8OTjdTu3jb2/3u06z0ftY9Dxbth5w1TbDtpunP2NOsbh\nb/nF4etecsPIdaZ1wyXjx9G9/iTrdpv2fUxyDh65YtfE+59Vt/ZrgNMz8/eq578D/FJm/mG/9V+0\n+bA0OUsHetaHbrsxM7e3GcOkbRlg3abDs7TkvGHvxpnt/+FLbgGm/yNfks4HmVmer1X0yBW7eOL+\nh4uonO8Fju96vrVaJmmx2JYZP8FPkty6K/rO+oOq/En1O36/46lcsxqt/S3gxIg4ISIOAc4Brp7R\nsSTNjm1ZasFMKufM3B8Rfwh8FTgI+Hhm3j6LY0manVVpy+NUrKPW6X590gq4qYp53P01fTw1b1bd\n2mTml4Evz2r/kuZjmdqySUmLwhnCJEkqzMwqZ0kqxSwr5mluWyrdrEe4azQrZ0nSUzxy7ENeBmiR\nyVmSpMLYrS1JBRvUvdxb1TZ9r7TaZXKWpIL1Xv8dlHybTspec26X3dqSJBXGylnS0uvu8h01tWWJ\n5h3fvKvmzvzk/dSds3zQvkftd1hM4+6jDpOzpJUxKOls2Lux+AS9jEZ9acg4CbLuvocl2GGvPXzJ\nLTP90hO7tSVJKoyVs6SVV3rVPM6AsGV0+Ft+carq9OFLbpn5V3h2xzYLVs6SJBXGylnSyhhU6Ywz\nBecTO+8ee9vOusP2O846kxy33/66tx10nH77r3OddlzzqG7rxjHs/c46fpOzpKXX749sd/fwEzvv\nHplIByW9YXoHmg0bkNYbV+d2EYR1AAAWC0lEQVQY3TF3f7jo3nfvfvtty7FPPd7Dl9zS932NSjyl\nJNZZ6u627vehbtbv325tSZIKY+UsaSWNc/vUoK7nzvPeCrq3gh3n253GqnorvYOQ+u37iZ139922\ne93OPsZ9X93HXwTTDiQbtr9edmtLUkN6uyc7f1zHuX47jn7JflDX86D46v7Bn2RyjXG75xfZqElI\nBr0+zr+F9zlLkrRCrJwlLb1xZqCatEqe1Dhd3LM+fke/9zpsgNkimrbqHbcanvV9ziZnSSuru2tz\nHglpmZJfE2bZLbzo7NaWJKkwJmdJS6/7SwrGsW7nNtbt3MYTO++eyaCp3lgeOfYhHjn2oSe7Smc6\nLeQY72vYIKmmYhunWu4MyprmizGGxTpqv6P2PQ9Td2tHxPHAJ4HNQAKXZubFEXE08FlgG3A38LrM\nfLB+qJJmZRnbc7+R08P+4A67rWrSBD3qlqROguw9bvfjUclhULyTzL09dPaxYw9c1nsrVpO3KM3j\n26emiWnUtrPsjq9TOe8H/n1mngScCrw1Ik4CLgCuzcwTgWur55LKZnuWChKZ2cyOIq4C/lv186uZ\nuScijgP+OjN/fti2L9p8WF7zW89rJA5pWTzrQ7fdmJnb2zh2nfa8btPhueHs9tvzONNmjtpulFHT\nfral834neS+DRms7iK2+R67YxRP3PxyTbNPIaO2I2Aa8GLge2JyZe6qX9rLWTdZvmx3ADoCtR6xv\nIgxJDajbnuPpi9mel+mrGJfpvayq2gPCIuLpwBXAOzLzgP8RuVaW9y3NM/PSzNyemduPOeygumFI\nakAT7ZkN3qEp1VUrOUfEetYa8qcy84vV4vuq7i+q3/vqhShpHlaxPd/9w+O5+4fHW2mqOFMn54gI\n4DLgzsz8QNdLVwPnVo/PBa6aPjxJ82B7lspSp//pJcDvALdGxM3Vsv8EvA/4XEScB9wDvK5eiJLm\nYCXb87HPv73tEIph70FZpk7Omfl/gEGjz06bdr+S5m8V23MTyWjdzm3s/fYLgAMTfWfZoOW9HwoG\nLe+8VvdDRBP70Hw5Q5gkSYVxWKWkldR9X+8kuqvQ7gq5+3Hv+uMsm2b5IP3i02IxOUvSGMZJxKUo\nPT6NZre2JEmFMTlLWkmTdGkvQyW699svWIr3sSrs1pakAUxmaouVsyRJhTE5S1pJfsuSSmZylrSS\nnBFLJTM5S5JUGAeESVopVsxaBFbOklbKhr0bvd6s4pmcJUkqjN3aktTD+5vVNitnSZIKY+UsaaU4\nIEyLwMpZkqTCmJwlSSqMyVnSSvFWKi0Ck7MkSYUxOUuSVJjayTkiDoqIv4mIv6ienxAR10fEroj4\nbEQcUj9MSfOwCu35kWMfcsS2itdE5fx24M6u5xcBH8zM5wEPAuc1cAxJ82F7lgpQKzlHxFbgFcDH\nqucB/BrwhWqVy4Gz6hxD0nzYnqVy1K2cPwS8C3iien4M8MPM3F893w1sqXkMSfNhe8apO1WGqZNz\nRLwS2JeZN065/Y6IuCEibvjBTx6fNgxJDWiyPfPI/tEbSBqqzvSdLwFeFRFnABuAjcDFwJERcXD1\naXsrcG+/jTPzUuBSgBdtPixrxCGpvsba87pNh9uepZqmrpwz892ZuTUztwHnAP87M38b+Drwmmq1\nc4GrakcpaaZsz1JZZnGf8/nAOyNiF2vXrC6bwTEkzYftWWpBI99KlZl/Dfx19fgu4JQm9itp/mzP\nUvucIUySpMKYnCVJKozJWZKkwpicJUkqjMlZkqTCmJwlSSqMyVmSVohzhy8Gk7MkSYUxOUvSCjn2\n+be3HYLGYHKWJKkwJmdJkgpjcpYkqTAmZ0mSCmNyliSpMCZnSeriaGZ45NiH2g5h5ZmcJUkqjMlZ\nkqTCmJwlaYU4fediMDlLklQYk7Mk9Tj2+bcv7cCwZX1fy8bkLElSYUzOkiQVplZyjogjI+ILEfHt\niLgzIv6/iDg6Iq6JiO9Vv49qKlhJs2N7lspRt3K+GPhKZj4feCFwJ3ABcG1mnghcWz2XDrDpg7ey\n6YO3th2GDmR7lgoxdXKOiGcALwUuA8jMn2bmD4Ezgcur1S4HzqobpKTZsj1LZalTOZ8A3A/8z4j4\nm4j4WEQ8DdicmXuqdfYCm+sGqeXVRvVs1d6X7VkqyME1tz0ZeFtmXh8RF9PT5ZWZGRHZb+OI2AHs\nANh6xPoaYWgR3f9Hv9B2CMV68oPDh2Keh22sPcfTbc/LwPm121Wnct4N7M7M66vnX2Ctcd8XEccB\nVL/39ds4My/NzO2Zuf2Yww6qEYakBjTWntlQ5zO/JKiRnDNzL/D9iPj5atFpwB3A1cC51bJzgatq\nRShp5mzP/S3zZCQqW92PuG8DPhURhwB3AW9iLeF/LiLOA+4BXlfzGFoQw67j9nZjd9YdZ3n3fgd1\nh/c7dt2u8959jjp2E131mz54a5td/ivRnjfs3QhM1m27igl6mvOk5tRKzpl5M7C9z0un1dmvpPmz\nPUvl8OKQGjGo4qs7KnqcSrJ3nSaOCU+t3kdVyC1XvZrQhr0brQpVLKfvlCSpMFbOqmXc662TVpXj\n7HfQOp3nk14LHrb+/X/0CwMrcqvlxeV11f4650XtMTlrpqZNXE0k+2k+DPQ+1nIw+U7G89U+u7Ul\nSSqMlbNUsXt6uXR3WY/TTbsq1eI4A+Hs1m6fyVkqTEvTd660cRLzMiWsUdfax/1Ao9mxW1uSpMKY\nnDUXTQ+y6oyeHrbfSb59qs7+mvyWK7vW5+uRYx8auzu7s273z6T7kMYVmX2/ZGauXrT5sLzmt57X\ndhga07EfOPopyx6Pbwzd5qD8lb7rj7u8n0mPOe6xp93fsHWm8cS6b96Ymf1m7Crauk2H54azy2nP\nw7po55FUS+weXrVu/LY9csUunrj/4YmuU1k5S5JUGAeEaWp73/nAk4/vZ3h37N6e5531Ry3vV6V3\nTFulDtpu3P11v2848L33vp9pDHvPatY8JiHp7LuUStQu+MVgclZRFiExdcfYm6i1mOYxz3bbI6An\neX+lfJBYZXZrS5JUGCtnFWMRquZex37gaKvnJTGsWmyqqm6ri9uu7MVjclbrFjEpd+vEb5JeXm2O\n9m6CX4+5eOzWliSpMFbOatU4VfMrLrxoDpEM96X3nj9yHQeKrY5FrEInGZne9uA1WTlLklQcK+cJ\nHPRjT1e3Sc7H40fsP+D5uNeZS6ia4cA4JqmiraCXT52quYRq1OvPi8Fso7lblK7sQSZJ1I7mXj7T\nJre2E7MJebHYrS1JUmFqVc4R8UfA7wEJ3Aq8CTgO+AxwDHAj8DuZ+dOacWoJLHrF3M8rLrxorOoZ\nyu/iXoX23DvQqbuanKSynWbaz95151lJWzUvnqmTc0RsAf4dcFJm/iQiPgecA5wBfDAzPxMRlwDn\nAR9pJFotrGGJedEScq9O/Ivcxb3s7bk7OQ1KVNNMEDIo0U8aU7/9da83bSKfNim33QWv+t3aBwOH\nRcTBwOHAHuDXgC9Ur18OnFXzGJLmw/YsFWLqyjkz742I/wL8A/AT4GusdXv9MDM7Q3N3A1v6bR8R\nO4AdAFuPWD9tGCrcpo8O/wS+6FVzt3EGipXaxd1ke46nL3Z7nrZSbWIUdPex+1X73ceYx3Sjak+d\nbu2jgDOBE4AfAp8HTh93+8y8FLgU4EWbD8tp41C5hiXmZUrKy6DJ9rxu0+EL3Z7rdOkO6paexLD1\nexN2E8dTmep0a/9r4O8z8/7MfAz4IvAS4MiqWwxgK3BvzRglzZ7tWSpIneT8D8CpEXF4RARwGnAH\n8HXgNdU65wJX1QtRi2bTRzeufNX8igsvGvo+C/yyj6Vuzxv2bnyyyuw87q06+y1r+thNe+TYh6yU\nl9TUyTkzr2dtoMhNrN12sY61bq3zgXdGxC7Wbr+4rIE4pYU0KkGXkqRXpT0PSshNJ89O0uz+maWm\nj2PCb1+t+5wz80Lgwp7FdwGn1NmvpPmzPUvlcPpONWaVRmZPYtR90CXf/7xsmpiEZNCI6jbVud9a\nZXL6TjXCxDzaonRxL6PuLt9BXcD91pnk9TYMunbexH7VLpOzJEmFMTlLUpfee4nnXSUPG6DWvdwK\nebmZnCVJKowDwlSL15ql6fQbxNWvMu4dxNZ0xexgsjKZnDU1E/Pkxhm5rdXQLxGOM7f2OEaNJp/m\nG7g0X3ZrS5JUGJOzGjdq6krZqzBvk8wC1nY1OWwA2rjdzuMOYrMbu1x2a0sS7SflcY3qkjbhLgcr\nZ0mSCmPlLEksTsU5qsLfsHfjRO9lUd73qrFylrT0SplucxzjTkIy7T7GsSjnapmZnCVJKozd2pLU\nkGH3F/d2N087Ccm08Uz6TVreC90uk7MkTak74daZ3WtYAnQO7dVkt7YkSYWxcpakGmZd9dZRZ2BX\n27GvOpOzJE1pWUc1m5jbZ7e2JEmFsXLWRIZ9E5XzRatUk45Ultpm5SxJUmFGJueI+HhE7IuI27qW\nHR0R10TE96rfR1XLIyL+NCJ2RcQtEXHyLIOXNJlVbc+LNEOYBONVzp8ATu9ZdgFwbWaeCFxbPQf4\ndeDE6mcH8JFmwpSWT0tfrfkJVqw9d6ay7P1ZZYPOieemHCOTc2Z+E3igZ/GZwOXV48uBs7qWfzLX\nXAccGRHHNRWspHpsz9JimHZA2ObM3FM93gtsrh5vAb7ftd7uatkeekTEDtY+jbP1iPVThiGpAY22\n53j6YrTnSSrE3pm+FqWL3Cp4cdUerZ2ZGRE5xXaXApcCvGjzYRNvL6l5TbTndZsOX7r23JvkTHqa\ntWlHa9/X6d6qfu+rlt8LHN+13tZqmaRy2Z6lwkybnK8Gzq0enwtc1bX8DdUoz1OBH3V1l2kJ3P/m\nwd15X3rv+XzpvefPMRo1xPYsFWZkt3ZEfBr4VeCZEbEbuBB4H/C5iDgPuAd4XbX6l4EzgF3Aw8Cb\nZhCzpCnZnqXFMDI5Z+brB7x0Wp91E3hr3aAkzYbtWVoMzhAmSVJhTM6SJBXG5CxJUmFMzpIkFcbk\nLElSYUzOkiQVpvb0nVKvzkQkLXzj0kJxwhZJg1g5S5JUGJOzJEmFMTlrasPm2Qa7bSVpWiZnSZIK\nY3KWWmCvgqRhHK0tzdGopLz3nQ9w7AeOnlM0kkpl5SxJUmFMzpIkFcbkrFruf/NDQ0dtf+m953t9\nVZImZHKWJKkwJmc1wnueJak5jtaW5mTYB5S973xgjpFIKp2VsyRJhbFyVmM6XdubPrqx7+ur+m1V\n49zbLEndrJwlSSrMyOQcER+PiH0RcVvXsvdHxLcj4paIuDIijux67d0RsSsivhMRL59V4CrXOLdX\nqR22Z2kxjNOt/QngvwGf7Fp2DfDuzNwfERcB7wbOj4iTgHOAFwDPBv4qIn4uMx9vNmwtgvvf/NDI\nLu5ey9LlPc4HkJa6sz+B7Vkq3sjKOTO/CTzQs+xrmbm/enodsLV6fCbwmcx8NDP/HtgFnNJgvJJq\nsD1Li6GJAWG/C3y2eryFtcbdsbta9hQRsQPYAbD1iPUNhKESjRok1mvRB42N22Vf8CCw2u05nm57\nluqqlZwj4j3AfuBTk26bmZcClwK8aPNhWScOlW/Sb1vqTnIlJ+pJr58XnJQba8/rNh1ue5Zqmjo5\nR8QbgVcCp2VmpzHeCxzftdrWapmkgtmepbJMlZwj4nTgXcCvZObDXS9dDfx5RHyAtQEkJwL/t3aU\nKtKmj24cOW1nt07VOOn3FZdWRU872rzUqtn2LJVnZHKOiE8Dvwo8MyJ2AxeyNprzUOCaiAC4LjPf\nkpm3R8TngDtY6x57qyM7l1vnWvKkSXrSBN3R1jXpOrd/lZSUbc/SYhiZnDPz9X0WXzZk/T8G/rhO\nUJJmw/YsLQan79REBo26nraLu9e4FfU490lP2h3e5OQoJVXLkhaPyVlFmfa6dMegBDuvWclMypKa\n4NzakiQVxspZRequQKetoufBSlnSLJicJ/D4EftHr7Tkho20HncWsGVS8gcHSYvLbm1JkgoTP5sM\nqMUgIu4H/gn4x7ZjGeCZlBlbqXFBubGVGhc8NbbnZOamtoKZVkT8GPhO23EMsEj//qUoNS5YnNgm\nbstFJGeAiLghM7e3HUc/pcZWalxQbmylxgVlxzaJkt+HsU2u1LhguWOzW1uSpMKYnCVJKkxJyfnS\ntgMYotTYSo0Lyo2t1Lig7NgmUfL7MLbJlRoXLHFsxVxzliRJa0qqnCVJEiZnSZKK03pyjojTI+I7\nEbErIi5oOZbjI+LrEXFHRNweEW+vlu+MiHsj4ubq54yW4rs7Im6tYrihWnZ0RFwTEd+rfh8155h+\nvuu83BwRD0XEO9o6ZxHx8YjYFxG3dS3re45izZ9W//duiYiTW4jt/RHx7er4V0bEkdXybRHxk67z\nd8ksY2tKKe3Ztjx1XLbn6eNqti1nZms/wEHA3wHPBQ4B/hY4qcV4jgNOrh4fAXwXOAnYCfyHNs9V\nFdPdwDN7lv1n4ILq8QXARS3/e+4FntPWOQNeCpwM3DbqHAFnAH8JBHAqcH0Lsf0b4ODq8UVdsW3r\nXm8Rfkpqz7blxv49bc/jx9VoW267cj4F2JWZd2XmT4HPAGe2FUxm7snMm6rHPwbuBLa0Fc+YzgQu\nrx5fDpzVYiynAX+Xmfe0FUBmfhPo/TaKQefoTOCTueY64MiIOG6esWXm1zKzM2n7dcDWWR1/Dopp\nz7blRtieJ4ir6bbcdnLeAny/6/luCmlAEbENeDFwfbXoD6vuio+30d1USeBrEXFjROyolm3OzD3V\n473A5nZCA+Ac4NNdz0s4ZzD4HJX2/+93Wfvk33FCRPxNRHwjIv5VW0FNoLTzCdiWa7A9T692W247\nORcpIp4OXAG8IzMfAj4C/HPgRcAe4L+2FNovZ+bJwK8Db42Il3a/mGt9KK3cGxcRhwCvAj5fLSrl\nnB2gzXM0TES8B9gPfKpatAf4Z5n5YuCdwJ9HxOp97VdNtuXp2J6n11Rbbjs53wsc3/V8a7WsNRGx\nnrXG/KnM/CJAZt6XmY9n5hPAR1nrvpu7zLy3+r0PuLKK475O1031e18bsbH2R+amzLyvirGIc1YZ\ndI6K+P8XEW8EXgn8dvXHhsx8NDN/UD2+kbVruT8379gmVMT57LAt12J7nkKTbbnt5Pwt4MSIOKH6\npHYOcHVbwUREAJcBd2bmB7qWd1+3eDVwW++2c4jtaRFxROcxa4MPbmPtfJ1brXYucNW8Y6u8nq4u\nsBLOWZdB5+hq4A3VKM9TgR91dZfNRUScDrwLeFVmPty1fFNEHFQ9fi5wInDXPGObQjHt2bZcm+15\nQo235VmNZhv3h7URdt9l7dPEe1qO5ZdZ6yK5Bbi5+jkD+F/ArdXyq4HjWojtuayNfv1b4PbOuQKO\nAa4Fvgf8FXB0C7E9DfgB8IyuZa2cM9b+oOwBHmPtmtN5g84Ra6M6/3v1f+9WYHsLse1i7TpZ5//b\nJdW6Z1f/zjcDNwH/dt7/rlO+xyLas225Vny25+niarQtO32nJEmFabtbW5Ik9TA5S5JUGJOzJEmF\nMTlLklQYk7MkSYUxOUuSVBiTsyRJhfn/AWVW8SJF7zXlAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "8qYiiUqGWR1r", + "colab_type": "text" + }, + "source": [ + "### Visualise Pruned Model Results" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "PWwT_DbRT2ID", + "colab_type": "code", + "outputId": "c64bf2b1-f885-4378-f7e8-58fac64275e8", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 485 + } + }, + "source": [ + "# Visualise Results\n", + "rows = 2\n", + "cols = 2\n", + "fig = plt.figure(figsize=(8, 8))\n", + "for i in range(1, rows*cols+1):\n", + " img = dataset.load_image(i+5)\n", + " image = np.array(img)[:, :, [2, 1, 0]]\n", + " result = pruned_demo.run_on_opencv_image(image, objDet=\"True\")\n", + " \n", + " fig.add_subplot(rows, cols, i)\n", + " plt.imshow(result)\n", + "plt.show()" + ], + "execution_count": 58, + "outputs": [ + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAHVCAYAAADLvzPyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3WvQZXV14P/vohtoUBDBtul0MwFH\nRgcTL1SX45QZTYXJSMARDF4wqYjK2NEyjsaZEYwvwKp/ShlnMMSZkbTiiFNGUZCCik6QYbzUvIDY\nGOSOtgRid3XTBC9YUZCG9X9x9oHTT5/73ufs3znP91PV1efZZ1/W2d2/Zz1r7d/eT2QmkiSpHAe1\nHYAkSdqfyVmSpMKYnCVJKozJWZKkwpicJUkqjMlZkqTCzCw5R8SpEXFPROyIiPNndRxJs+VYluYv\nZnGfc0SsAb4H/DawE/g28KbMvLPxg0maGcey1I5ZVc4vBXZk5r2Z+UvgC8AZMzqWpNlxLEstWDuj\n/W4Cftjz9U7gXwxa+cijI5993IwikRbUD27lHzJzfcthTDSWAQ496JB82prDZhqUtEj+8fFf8OgT\nv4xJtplVch4pIrYCWwHWb4KP/nVroUhF+t1f2Xd/2zGMq3c8H37QOn77mS9rOSKpHNf/+MaJt5lV\nW3sX0FsLb66WPSkzt2XmlszccuQxM4pCUl0jxzLsP54PPeiQuQUnLatZJedvAydGxAkRcQhwNnDt\njI4laXYcy1ILZtJLzsx9EfFHwHXAGuDTmXnHLI4laXYcy1I7ZnahNzO/Cnx1VvuXNB/LNpav2Hsd\nAG989qtajkQazFlYklalK/ZeN/cEvSg/GHTjHKRO/IP2PWqfo2IaZx+LxMd3SpJUGCtnSavKMlVX\nTRtV2Y9TvY7a/6h9j/r3WS3/fiZnSdJYuolx0ksCi9LOL4ltbUmSCmPlLGnhDWu3rqzWBlVx/Zb3\n7neSVm/TE6bqtoLHPW4Jle2oOKadULZoTM6SFtqgb+Z1ro+Om/RWHnsW12Sv2HvdWNeCly05DbJa\nzoFtbUmSCmPlLGkhTVPdjlPZjlOBDTr2yuOMW8kNW/+Nz37Vwrdyp51I1m8fwyzTxDOTs6SlNKsk\nME6CmaTN3ru8blu8rlk9fGSczzjqB6hxYmv7/DXJtrYkSYWxcpa0apRaXZXShp1mlnQT+x826W3W\nD0YplZWzJEmFMTlLkoD9K9iVul2HWVSqTd2nvUxsa0taaONMMJp0stHKr3uTxzizpyeZNTyq1d77\n/qQPSBlH777HTXDT3ms87J70pmbILwsrZ0mSCmPlrFpeu/Gxoe9fvfvgmex30n139zdtPCrDlQ88\nMva63UpqTb4SHhh/X1fsva6zTa8+24885pDtJv0co/bX1NOxmqhC+1W4vfuddN8rY5rkUa2LzOSs\nqb1242NDk904CXaUQft/7cbHxk64TcShsjwe3xx79u7rNqx76ou9T30Dfx3r+ibJAxJzn2MPWufx\n+ObQbXuP3RvvqFZw72foxjzLRDTtzOhx2/iT7nfYdsuUkHvZ1pYkqTBWzpqZq3cfPLN2cu++B+k9\nttXz6jGqkhrUUh6nYh613qh99CrpyWCzNs4EuUkm0S3TYzoHWZjkPOib67jf9CfZflS7dpJ1Jjnm\nyvd6tx/W3h1n/911m0iS87x+OyjmcT6L15dVmn4/GCxzghk3iU4yU3yZW9m9bGtLklSYqSvniDgO\n+CywAUhgW2ZeEhFHA1cAxwP3AW/IzB/XCXJYlTSqihtVkc7KypgnOeY4lemwz1XnfJWiG1/vxK9+\n76sZ8xzP8zbJzOg2XPnAI/tPWmtJ6RXpamhl96rT1t4H/IfM/E5EHAHcHBHXA28BbsjMj0TE+cD5\nwHn1Q53coCQ1y2uQg5Jfb7IZtf20LfVRx1jUhDZtW1sTKX48T+qA25pq6u5nnNna4xxzv/090Hnd\nL0nXeXDHOMa9fWrY8cZ9/vU08a62pNw1dVs7M3dn5neq1z8D7gI2AWcAl1erXQ6cWTdISbPleJbK\n0siEsIg4HngJcBOwITN3V2/todMmq21Qa3PcyVWz1tQxx71veNlnH0/b2ld98xjP0+htT6/JV/L4\n3uH3FDdVMfcz6H7maY/55P72DtjnkIePQDNVZRO/sWtldV33ASKTzmhfpuq6dnKOiKcDVwHvzcyH\nI+LJ9zIzIyIHbLcV2AqwftPwY4wzU3k1fYNe5s86bsL19qjZaGI8H37Q7K+fvm7DOt7Igd+IR11f\nHpQ8J0mq0ybgSY/dbXH3PrAEZvsQkmn3Oc5s7Fnsd5nVmq0dEQfTGcify8wvV4sfiIiN1fsb6fuz\nIGTmtszckplbjjymThSSmtDUeD70oEPmE7C0xKZOztH5kfoy4K7MvLjnrWuBc6rX5wDXTB/ecMtc\nQU7irL/YxFl/MaL9MANWrsujhPFcx5UPPFL8rOxJLeNn0vjqtLVfDvwBcFtE3FIt+xPgI8AXI+Jc\n4H7gDfVClDQHjmepIFMn58z8f0AMePuUaffbz6S3zfTeUtS7XW91+cSFBy7rfW9QJfrEhff13V/n\nNlB44sJONXnQhccP2H7wMQfF0+8Y/fbf3faJC+/r+/4TF97XSLehidvCprk/XbMzz/HctHGqy8u+\nUu61s3NPf2jo+6XcB635KvbxnSsT6ahEMCjh9W7XTYCjHHTh8fsl4ZXvdfbV//2njjX8/bqG7X/Q\nDwbw1PkYtM5Vf7hr7BiamJS1MoGvTPzD9m/iXt3GbfmWnJihE9+oBK3Vx8d3SpJUmGIrZ3iqihtW\nIQ1rAw+rIEdVtsO2Hfb+qO2mPd4sjtFbJU87oWza6nXev7TCKnv1Kb1i7tWNdVAF3e0S2N5ePYpO\nzv38zZZ//uTri97+cIuRLL5uQp6klS2VwFnMWna2tSVJKszCVc5gxbzSeZ88EvC8SLBY7exJ2TFY\nPRYuOddJQN0kBsCFfZYtuEGfxaQtLQZnbqvLtrYkSYVZmMp5ktnE41TDH9j0wjrhSJI0MwuTnEdZ\npva0pOksw/XmUbdVaXWwrS1JUmFMzpIkFcbkvAqc98kjh7b92/h1k5Lq8baq5WZyliSpMAsxIWxY\nZedEMEnLyIlhq5uVsyRJhVmIylmShlmGW6ikXgtROQ/7rUkXvf1hH08prXLnnv6Q7V8tlYVIzpIk\nrSYL0dZ2QpgkaTVZiOQsScN4zVnLxra2JEmFqV05R8QaYDuwKzNfHREnAF8AjgFuBv4gM39Z9ziS\nZm9Rx7OTwbRsmqic3wPc1fP1RcDHMvO5wI+Bcxs4hqT5cDxLBaiVnCNiM3A68Knq6wB+C7iyWuVy\n4Mw6x5A0H45nqRx1K+c/A94PPFF9fQzwk8zcV329E+g71ToitkbE9ojY/rAdKakEjYznR58orust\nLZypk3NEvBrYm5k3T7N9Zm7LzC2ZueVIJ1pKrWpyPB960CENRyetPnUmhL0ceE1EnAasA44ELgGO\nioi11U/bm4HBj/eSVArHs1SQqSvnzPxAZm7OzOOBs4H/m5m/D3wdeF212jnANbWjlDRTizCeX7dh\nXVuHLpLnY7nN4j7n84D3RcQOOtesLpvBMSTNh+NZakEjTwjLzG8A36he3wu8tIn9Spq/RR/Pi/60\nMO/ZFviEMEmSimNyliSpMCZnSZIKY3KWtHCGzVQ+9/SHFva67ai4naG9epicJUkqjMlZkqTCNHIr\nlSTNW7fFe+UDj/R9v7dFXOLtVW959ZcB+Mxf/a7tbB3AylmSpMJYOUtaaK/bsG5g9dzVrUybqKDf\n8uov85m/+t3a++ha9Kr5ir3X8cZnv6rtMEa6Yu91A9+rG/8s9m1ylrTwRrW4u849/aEiWtzjtLLV\njG7iHJYkx1lnmu2u2HsdV+y9ji1btky0X7CtLUlScaycJS2NSVrcMN+JYpNWyqW3szXaG5/9qqEt\n72FMzpKWyrgtbhicMFcm7d5rxCu/7r3+3F2+Jl/55LLH45ud9wYs713WG3/XoG/ub+RVfdfrbbH2\n23ac1u3AY055/XTS/U3bZl4mtrUlSSqMlbOkpdRbgY5TRfdaWVGfu6Li3a/aPf2pl29ZsZ8D1h2g\nXwt73Eq4n2m3rXPMcfc3ar91jldXSTPPrZwlSSqMlbOkpTfJdegmDLqe3M+ga8z9KrhxJhiNqv4G\nvT9oeXfZpBXtOFVov8/aRuU67WecJZOzpFVjnNncTRgnKffTRFt10PaDEtAsJ1+VlOxmYZZJ3ba2\nJEmFsXKWtKrUmSjW1HEB2Du3Q7emlMlVo9TtHozz9LFJmZxXsScuvA+Agy48vtU4pEnNM6k2ZVDM\nK5cv4mdT82xrS5JUmFqVc0QcBXwK+DUggbcB9wBXAMcD9wFvyMwf14pS+/nwrlv5wKYX1tpHt2pe\nBq/d+BhX7z647TBGeu3Gxwa+V0L8izCei3ik5d4hcVSt6rHi7LPuG3kVj+/tzPQe2CptuB3eO6lp\n3m3oQe3kJiepjfp8084qHyfO7r7v/fH3J467blv7EuCvM/N1EXEIcDjwJ8ANmfmRiDgfOB84r+Zx\n1DBb2fPTTcrDEvA468yB43mFq965oe/y3uuIay9489jbTXuMSbYb59izPOZZn3gAGO+2r0keTtKE\naW4Hm8V+xzF1WzsingG8ArgMIDN/mZk/Ac4ALq9Wuxw4s26QkmbL8SyVpU7lfALwIPA/I+JFwM3A\ne4ANmbm7WmcP0PdHrYjYCmwFWL+pRhSSmtDYeD78oAJazw3rVoMAb/zE/tXeWT2vu++dxWiD1l25\n/4Exjbm/fsftt86o416x97r9zkO//a2srKdpS8+qtT6LWGZ5GaBOcl4LnAy8OzNviohL6LS8npSZ\nGRHZb+PM3AZsA3jui/qvo6d8eNetA79eef155TXp7rofZv929riztQddK13Zgu3Xmu237Tit23GP\nOa5p9rco17Ib0th4PvrgZzieZ+iOj18AwAve/aGWI9Es1ZmtvRPYmZk3VV9fSWdwPxARGwGqv1fB\n3XzSwnM8SwWZunLOzD0R8cOIeF5m3gOcAtxZ/TkH+Ej19zWNRLrKrayEx52t3bvuRW9/eKJjjlsJ\nD9t+0m3rHnOcOEbtt+7x6mirWnc8L4ar3rkB7rwUgLvfuaFvm7mO3olNJT5vejWpO1v73cDnqpmd\n9wJvpVONfzEizgXuB95Q8xiaQreVPe0tV4OSxNW7Dx6ZvEbNPB6WLAcds3e/45p2lnQbyXHaz9gw\nx3OhRs2SbipJ9ybkWT5zW6PVSs6ZeQuwpc9bp9TZr6T5czxL5fDxnUuqTsUM9arHQdsOqg5nfY9v\ny9WoVMu49y032eK2Wm6fj++UJKkwVs5aeotwO1QhTwhTQSZ5yljv+k1PElM7rJwlqSBXvXPDxIl5\n5fZafCZnSZIKY3LWfq7effBYt0st+jG7XrvxsZnf7zzq83VvIbOlrXE8/6R38PyT3tF2GJqxhbjm\nfNUf7uKsv+j/AO7ugzXO++SR8wxpbj767158wLIPs//jO1f+VpwPD9gOvjXyeK+/9E0A7PvQ6OTU\nXbdr34f6L19p34f6rzPNMYftD95cvT94n0+du6e23/ehzw6NoQ5njqspzz/pHdxdPZBkJa8/Lz4r\nZ0mSCrMQlbPgP33qlidff+BT+9/D/J9WrPuBT73wgGUAF719/6/7/cKLL73j8wBcvfvzI2P60orO\nWnebjz/6+gPWPfY9X9pvvZXbjnPM12587Mn4Vm7Xb3/j7Lffdmsv6L43+hxMwra1Rhk1mct29uqx\nEMl5UEsblredLWn1mDYpd5fb3l4+trUlSSrMQiTnq/5w18D3Lnr7wxP/tiXNxp5LXs+eS/Zvab/7\n0LW8+9C1BywfpDt7uncW9bAZ1dKia6KVPWod731ePAvR1tZiePehg/87vfvQtXxp4LtPWfn87bZ+\nhaIktWkhKmdJklYTK2c1YtS9zb3rjDML2mpZmoyTw5aLyVm1jJOU+23T9G1K0qLxtikNY1tbkqTC\nLE1yHjRru+nZ3M4Of8o0VXPvtnW2l6RltjTJWZKkZbEwyfmqP9w19H7nrt6qdtDr7teDKuDue/22\nGbS/1cSqV6pn2PXmur91ynuel8NSTggblnT7Les+AnTQ+6OO0/YjREfFMeoHiWG/9WsaH3903wHL\nBt0D7eQwrSbzmgTmzO3FtzCVsyRJq0Wtyjki/hj4d0ACtwFvBTYCXwCOAW4G/iAzf1kzzkYru5Xq\ntqjH2X5QdT6sau/4VvX+6P1N+jnGuUyg1WOe43k1GtXKnoVxKmir5zJNnZwjYhPw74GTMvMXEfFF\n4GzgNOBjmfmFiLgUOBf4RN1Au4lklkl6liZptdfZ36wNu9bcr53d+96w1jY0/ysaNb55j2dJw9Vt\na68FDouItcDhwG7gt4Arq/cvB86seQxJ8+F4lgoxdXLOzF3AfwH+ns4g/imdttdPMrNbQu0E+pa6\nEbE1IrZHxPaHHxr/uL2t2O4Mbtuz0znrLzaN3YloYob2xx/dN7S6dgZ4e5ocz48+Yde7NMPa5le9\nc4MzuAtUp639TOAM4ATgJ8CXgFPH3T4ztwHbAJ77osh+64xKHIva4i7NqPM4TtIclnT7rWuLuyxN\njuejD35G3/G8WvmYTk2jTlv7XwN/l5kPZuZjwJeBlwNHVW0xgM2AZa1UPsezVJA6s7X/HnhZRBwO\n/AI4BdgOfB14HZ0ZnucA10yz86Zb1ctcZU8zWa6p8ztJxdxvOyvoYsx0PEuazNTJOTNviogrge8A\n+4C/pdPW+grwhYj4/6pllzURaF1tX5fuJs6VcQxa3jUoSfXbrunPOKqdPW1iVnkWbTwvijZun5qW\nt1WVpdZ9zpl5AXDBisX3Ai+ts19J8+d4lsqxlI/vLNGgqnbaarftToCkxTLqgSTgYz1LYnJWX9M+\nbERS2TO0n3/SO4YmaJXBZ2tLklQYk7NaNaoK98EkWiSjHuhR99dBNmVUHD6UpH0mZ0mSCmNyVuvG\neaynFbSk1cQJYdqP9zZL0yl5EtggwyaHOXO7XVbOkiQVxuQsYHTreFTruQlODpOkDtvaKqqV7TO3\ntWgWsZ3da9TDSWxvt8PKWZKkwpicJWlKi1419xoVq/c+z5dt7VWspHa2pPbZ4i6HlbMkSYUxOatI\n4zyYRJKWlW3tVWpRWtoff3SfM7dVnGW61qwyWTlLklQYk7OKZ4tbJVmE3zpVlzO322dbe5VZlHa2\npHY5c7tdVs6SJBXGynkVGfXs7NI5OUxtchKY5snKWZKkwoxMzhHx6YjYGxG39yw7OiKuj4jvV38/\ns1oeEfHnEbEjIm6NiJNnGbzGM85vnNLq4HjWpEZNcnNy2GyMUzl/Bjh1xbLzgRsy80TghuprgN8B\nTqz+bAU+0UyYUoczt2v7DI7nia2GGdoqy8jknJnfAn60YvEZwOXV68uBM3uWfzY7bgSOioiNTQUr\nqR7Hs7QYpr3mvCEzd1ev9wDdHys3AT/sWW9ntewAEbE1IrZHxPaHH5oyCklNaHQ8P/rEL2cXqVoz\nqrVte7tZtSeEZWYCOcV22zJzS2ZuOfKYulFokD2XvL7tELRAmhjPhx50yAwiK9Nqa2fbwp+faZPz\nA932VvX33mr5LuC4nvU2V8sklcvxLBVm2uR8LXBO9foc4Jqe5W+uZnm+DPhpT7tMLRh0XzCMnlxV\nslGTwpwYNhHHcyEGPY3r7jsvHfheG2xxz97Ih5BExOeB3wSeFRE7gQuAjwBfjIhzgfuBN1SrfxU4\nDdgB/Bx46wxi1hhWw2M6u5/BB5OMz/E8nnGTyywS5rB9Nn283iR7952X2rIuyMjknJmDvsuf0mfd\nBN5VNyhJs+F4lhaDj+9cQqvtgSPDHusJnfNh9axJjPqlD8ti5edb9s+7SEzOS2Q1tLIHscWtptzx\n8QvaDkHy2dqSJJXGylmSerzg3R968vXdzjqemL/fuRlWzpIkFcbkLElSYWxrL4EnH9F5aLtxSMtm\n2Vu0vfdzn/WJB6Z6eMiyn6O2WDlLklQYK+clMOweX1juW6hW8p5naXwrq16r4HJYOS+41Xxv8yCj\nnhnus7cllc7kLElSYUzOC2rPJa/3dzWPME4FLUkl8przgvI68/iGXYf2sZ6SSmTlLElSYUzOC8hJ\nYJK03EzOkiQVxuQsSVJhnBC2QGxnT2+c3/fspDBJpbByliSpMCZnSZIKY1t7QQxradvOHt849zxL\nUtusnCVJKszI5BwRn46IvRFxe8+yj0bE3RFxa0RcHRFH9bz3gYjYERH3RMSrZhW4NK1Rj/VcZo5n\naTGMUzl/Bjh1xbLrgV/LzBcC3wM+ABARJwFnAy+otvkfEbGmsWh1gNWaZJow6twtaZv7MziepeKN\nTM6Z+S3gRyuWfS0zu9/ZbgQ2V6/PAL6QmY9m5t8BO4CXNhivpBocz9JiaGJC2NuAK6rXm+gM7q6d\n1bIDRMRWYCvA+r5ryElgakHt8Xz4QetmGZ+0KtSaEBYRHwT2AZ+bdNvM3JaZWzJzy5HH1IlCUhOa\nGs+HHnRI88FJq8zUlXNEvAV4NXBKZma1eBdwXM9qm6tlkgrmeJbKMlVyjohTgfcDr8zMn/e8dS3w\nlxFxMfArwInA39SOcpXZc8nrOy8ObTcOrQ6OZ6k8I5NzRHwe+E3gWRGxE7iAzmzOQ4HrIwLgxsx8\nR2beERFfBO6k0x57V2Y+Pqvgl9GTiVlF6P57HPueL7UcSTMcz9JiGJmcM7PfrKTLhqz/p8Cf1glK\n0mw4nqXF4OM7F9SgR1B29c7m7rfuoMdYDtpu0uUr3+t9v+2Z5qPOnSS1ze9ShWi6nT0qAQ16v6nl\n025Tqj2XvH5pWtuSyueztSVJKozJuWBtt39XO8+/pLYsXn9xyTg7e3Es28xtSeWycpYkqTDx1MOA\nWgwi4kHgH4F/aDuWAZ5FmbGVGheUG1upccGBsf1qZq5vK5hpRcTPgHvajmOARfr3L0WpccHixDbx\nWC4iOQNExPbM3NJ2HP2UGlupcUG5sZUaF5Qd2yRK/hzGNrlS44Lljs22tiRJhTE5S5JUmJKS87a2\nAxii1NhKjQvKja3UuKDs2CZR8ucwtsmVGhcscWzFXHOWJEkdJVXOkiSJApJzRJwaEfdExI6IOL/l\nWI6LiK9HxJ0RcUdEvKdafmFE7IqIW6o/p7UU330RcVsVw/Zq2dERcX1EfL/6+5lzjul5Peflloh4\nOCLe29Y5i4hPR8TeiLi9Z1nfcxQdf17937s1Ik5uIbaPRsTd1fGvjoijquXHR8Qves7fpbOMrSml\njGfH8tRxOZ6nj6vZsZyZrf0B1gA/AJ4DHAJ8FzipxXg2AidXr48AvgecBFwI/Mc2z1UV033As1Ys\n+8/A+dXr84GLWv733AP8alvnDHgFcDJw+6hzBJwG/G8ggJcBN7UQ278B1lavL+qJ7fje9RbhT0nj\n2bHc2L+n43n8uBody21Xzi8FdmTmvZn5S+ALwBltBZOZuzPzO9XrnwF3AZvaimdMZwCXV68vB85s\nMZZTgB9k5v1tBZCZ3wJ+tGLxoHN0BvDZ7LgROCoiNs4ztsz8WmZ2H+J9I7B5Vsefg2LGs2O5EY7n\nCeJqeiy3nZw3AT/s+XonhQygiDgeeAlwU7Xoj6p2xafbaDdVEvhaRNwcEVurZRsyc3f1eg+woZ3Q\nADgb+HzP1yWcMxh8jkr7//c2Oj/5d50QEX8bEd+MiH/VVlATKO18Ao7lGhzP06s9lttOzkWKiKcD\nVwHvzcyHgU8A/xR4MbAb+K8thfYbmXky8DvAuyLiFb1vZqeH0sr0+4g4BHgN0P2tEKWcs/20eY6G\niYgPAvuAz1WLdgP/JDNfArwP+MuIOLKt+BaVY3k6jufpNTWW207Ou4Djer7eXC1rTUQcTGcwfy4z\nvwyQmQ9k5uOZ+QTwSTrtu7nLzF3V33uBq6s4Hui2bqq/97YRG51vMt/JzAeqGIs4Z5VB56iI/38R\n8Rbg1cDvV99syMxHM/Oh6vXNdK7l/rN5xzahIs5nl2O5FsfzFJocy20n528DJ0bECdVPamcD17YV\nTEQEcBlwV2Ze3LO897rFa4HbV247h9ieFhFHdF/TmXxwO53zdU612jnANfOOrfImelpgJZyzHoPO\n0bXAm6tZni8DftrTLpuLiDgVeD/wmsz8ec/y9RGxpnr9HOBE4N55xjaFYsazY7k2x/OEGh/Ls5rN\nNu4fOjPsvkfnp4kPthzLb9BpkdwK3FL9OQ34X8Bt1fJrgY0txPYcOrNfvwvc0T1XwDHADcD3gf8D\nHN1CbE8DHgKe0bOslXNG5xvKbuAxOteczh10jujM6vzv1f+924AtLcS2g851su7/t0urdc+q/p1v\nAb4D/Nt5/7tO+RmLGM+O5VrxOZ6ni6vRsewTwiRJKkzbbW1JkrSCyVmSpMKYnCVJKozJWZKkwpic\nJUkqjMlZkqTCmJwlSSqMyVmSpMKYnCVJKozJWZKkwpicJUkqjMlZkqTCmJwlSSqMyVmSpMKYnCVJ\nKozJWZKkwpicJUkqjMlZkqTCmJwlSSqMyVmSpMKYnCVJKszMknNEnBoR90TEjog4f1bHkTRbjmVp\n/iIzm99pxBrge8BvAzuBbwNvysw7Gz+YpJlxLEvtWDuj/b4U2JGZ9wJExBeAM4C+A/qYw9bmcUce\nPKNQpMX03b2P/ENmrm85jInGMkCsW5txxCFzCk8qX/7sl+Qj+2KSbWaVnDcBP+z5eifwL3pXiIit\nwFaAzUcczPW/99wZhSItpmf/2e33tx0DY4xl2H88x9MPZt1Zjmep65Grdky8TWsTwjJzW2Zuycwt\nxxy2pq0wJDWgdzyzblY/80urx6yS8y7guJ6vN1fLJC0Wx7LUglkl528DJ0bECRFxCHA2cO2MjiVp\ndhzLUgtm0n/KzH0R8UfAdcAa4NOZeccsjiVpdhzLUjtmdnEoM78KfHVW+5c0H45laf58QpgkSYUx\nOUuSVBiTsyRJhTE5S5JUGJOzJEmFMTlLklQYk7MkSYXxIbiSpOL9/NJbh75/+Dte2Oj+JjnGsH1N\nGleXyVmSVKxu4psmMQ4zSdIcdIxZxQa2tSVJKo6VsyRpYXWr1p9feuvULeRBRlXG4+5jy/YtE29n\n5SxJUmGsnCWpx6DrhONUT5N0vbzXAAAUeElEQVRMDBq3KhunIpwk5pX769122HFWHmPUddYmqthZ\nVMNNGhXb4e944dTXnU3OksT4k3uGvT8oAc9Kv5gmOeYkiX/lMaY5H4ukifi7+3jk/h0Tb2tbW5Kk\nwlg5S1INwyrIOm3NcY477THHaT1P+7kWuVpuShOtfZOzJPXol3QOf8cLx7p+Ow9NfOOf5Br2rFvz\npal7fpv6P2FbW5Kkwlg5SxLjz1ReLW3b1fI5m9LkLHUwOUvSSMOusT5y7MMArNtz5AHL+q037vJR\n60y73crlvXGXqOmkN+pY0xxnFpc3bGtLklQYK2dJYroK7aALj+eJC+978nWTuvvtd8zu+00cs1/l\n39spmPS+7yYr3SZmns+y8p7lvqdOzhFxHPBZYAOQwLbMvCQijgauAI4H7gPekJk/rh+qpFlZreO5\nX9t3VDIY1CoelEwH6U2y07w/zTGHGdTqnvaWqaZavb3Pzq5jWDzTJNlJZrTP+9na+4D/kJknAS8D\n3hURJwHnAzdk5onADdXXksrmeJYKEpnZzI4irgH+W/XnNzNzd0RsBL6Rmc8btu2LNxyW1//ecxuJ\nQ1oWz/6z22/OzMl/5G5AnfF80PrDc91ZizGe+7V0h603jVm1vZu2bs+RY58PTeaRq3bwxIM/j0m2\naeSac0QcD7wEuAnYkJm7q7f20GmT9dtmK7AVYPMRBzcRhqQG1B3P8fTFHs+PHPvwk8mpTlJeFCbi\nMtWerR0RTweuAt6bmfv9T85OWd63NM/MbZm5JTO3HHPYmrphSGpAE+OZdc4zleqqNYoi4mA6A/lz\nmfnlavEDEbGxpw22t26QkmZvNY/n+35yHMc+/44nv55nxbzn7hdMtV1vvHXc95PjADj+qB82sj81\nY+rKOSICuAy4KzMv7nnrWuCc6vU5wDXThydpHhzPUlnqVM4vB/4AuC0ibqmW/QnwEeCLEXEucD/w\nhnohSpqDVT2em6pCJzFtxdy7fRNxt/HZNdrUyTkz/x8waPbZKdPuV9L8OZ5np98s7bqJeeV+TLDL\nx8d3SpJUGKdVSlqVJpn01VSlOyuTxDdOld17O5naYXKWtKosU1KeRlPXqjVbtrUlSSqMlbMkrbCM\nFXMvJ5KVz8pZknose2LWYjA5S5JUGJOzJEmFMTlLklQYk7MkSYUxOUuSVBiTsyRJhTE5S5JUGJOz\nJEmFMTlLklQYk7MkSYUxOUuSVBiTsyRJhTE5S5JUGJOzJEmFMTlLklSY2sk5ItZExN9GxF9VX58Q\nETdFxI6IuCIiDqkfpqR5cDxLZWiicn4PcFfP1xcBH8vM5wI/Bs5t4BiS5sPxLBWgVnKOiM3A6cCn\nqq8D+C3gymqVy4Ez6xxD0nw4nqVy1K2c/wx4P/BE9fUxwE8yc1/19U5gU81jSJoPx7NUiKmTc0S8\nGtibmTdPuf3WiNgeEdsf+sXj04YhqQFNjmce2Td6A0lDra2x7cuB10TEacA64EjgEuCoiFhb/bS9\nGdjVb+PM3AZsA3jxhsOyRhyS6mtsPB+0/nDHs1TT1JVzZn4gMzdn5vHA2cD/zczfB74OvK5a7Rzg\nmtpRSpopx7NUllnc53we8L6I2EHnmtVlMziGpPlwPEstqNPWflJmfgP4RvX6XuClTexX0vw5nqX2\n+YQwSZIKY3KWJKkwJmdJkgpjcpYkqTAmZ0mSCmNyliSpMCZnSZIKY3KWJKkwJmdJkgpjcpYkqTAm\nZ0mSCmNyliSpMCZnSZIKY3KWJKkwJmdJkgpjcpYkqTBr2w5AkjRfxz7/jrZD0AhWzpK0ipiYF4PJ\nWZKkwpicJWkV2XP3C9oOQWMwOUuSVBiTsyRJhamVnCPiqIi4MiLujoi7IuJfRsTREXF9RHy/+vuZ\nTQWr5bD+Y7e1HYL6WC3jed2eI1m358iB7y/7hKll/3zLom7lfAnw15n5fOBFwF3A+cANmXkicEP1\ntbSf9R+7rZUkvQg/GLR1blgl4/mRYx/mkWMfHvi+12QZ+sOL5mPq5BwRzwBeAVwGkJm/zMyfAGcA\nl1erXQ6cWTdISbPleJbKUuchJCcADwL/MyJeBNwMvAfYkJm7q3X2ABvqhahl8+Af/3rbIRRr/cdu\na+v8OJ5XiT13v8DW9gKo09ZeC5wMfCIzXwL8IytaXpmZQPbbOCK2RsT2iNj+0C8erxGGpAY0Np55\nZN/Mg9X0TMyLoU5y3gnszMybqq+vpDO4H4iIjQDV33v7bZyZ2zJzS2ZuOeawNTXCkNSAxsYz63wq\nsFTX1KMoM/dExA8j4nmZeQ9wCnBn9ecc4CPV39c0EqmKNmgCU78W7aDWbb/lvfsd1O5deey6beF+\nn2XYsZtoQ3eP2VbLfzWN5+5kp0GTwnoryxInh3Xj643t2OffccDX/dYZZt2eI4dOlNN81f0R993A\n5yLiEOBe4K10qvEvRsS5wP3AG2oeQwUbllTqzDgeJ1n1W6fpY3aXDUvCbSfWBq2K8TxJAiq5Bbwy\ntn6xjhN/N4Eff9QPmwlMjaiVnDPzFmBLn7dOqbNfSfPneJbK4cUhTW2clu6kVeW46w869oN//OtT\nVbLD9tcb16D3tThGtbWlEvj4TkmSCmPlrJmZpqpsohKftGLu91rLzwlQHSVfV1/NTM4StqdXq36P\nqVxtCdtHdZbJtrYkSYUxOUsFsbXevu5vrSq1ouwXW7+vBy3rXV7qZ5Rtbc1Y0/cA986eHnXf8Zp8\n5QHvPQ4ce/HRAOx534/G2t+w95t6CMmoWeFqR8nJa1SCHrRs2HKVw8pZkqTCROdZ9u168YbD8vrf\ne27bYWgM3aqz6/H45tD1+1av8c2Jlvcz7LjD9jHs2MMM2mfvduPGPkp3nxFxc2b2eyhI0Q5af3iu\nO8vxLHU9ctUOnnjw5zHJNra1NbE97/vRk68fZHhLd0+fZQ/y6xMt7+r9wWDaRDhou1H76/3MvXo/\n/7DYx3XsxUc7c1ySbW1Jkkpj5awirWyft21lPIMqaUlqgslZRSktKQ/SG6eJWlLTbGtLklQYK2cV\no6mq+fQLLhprva986LxGjtd737QkNcHkrFZNmpDHTbxN7GvS5H3sxUeboCU1wra2JEmFsXJWK6Zp\nYTdZNU96vHGraFvckppg5SxJUmGsnCew5meeLpjuPDx+xL4nX49TNc+7Sh5lZTyjKmlvtZJUh9lG\nRSktKQ8yScvbVrekSdnWliSpMLWSc0T8cUTcERG3R8TnI2JdRJwQETdFxI6IuCIiDmkqWC2uYy8+\n+sk/gyxK1bzS6RdcNFbspT/9zPEslWPq5BwRm4B/D2zJzF8D1gBnAxcBH8vM5wI/Bs5tIlAtpvWf\nPJL1nxz+i93HTW6lW+TP4HiWylK3rb0WOCwi1gKHA7uB3wKurN6/HDiz5jEkzYfjWSrE1Mk5M3cB\n/wX4ezqD+KfAzcBPMrM7NXcnsKnf9hGxNSK2R8T2h37x+LRhqGCjKmZY7Gqzn1FdgFGt/bY0OZ55\nZF+/VSRNoE5b+5nAGcAJwK8ATwNOHXf7zNyWmVsyc8sxh62ZNgwtqGVpZQ8yTpIuSZPjmXXeBCLV\nVaet/a+Bv8vMBzPzMeDLwMuBo6q2GMBmYFfNGCXNnuNZKkid5Pz3wMsi4vCICOAU4E7g68DrqnXO\nAa6pF6K0uBaoxe14lgpS55rzTXQminwHuK3a1zbgPOB9EbEDOAa4rIE4tUBGzdBe5nZ2P4vQ4nY8\nS2WpdXEoMy8ALlix+F7gpXX2K2n+HM9SOZy5ocaMcz+zJGk0H9+pRpiYJak5JmdJkgpjcpYkqTAm\nZ0mSCuOEMNXitebxnX7BRQN/93MJt1NJKoeVs6ZmYpak2TA5S5JUGJOzJEmFMTlLc7Tsv41LUjNM\nzpIkFcbkLLVggX5blaQWeCuVJuYsbUmaLStnSZIKY3KWJKkwJmdJkgpjcpYkqTAmZ0mSCuNsbTXG\nWdqS1AwrZ0mSCmNyliSpMCZnSZIKMzI5R8SnI2JvRNzes+zoiLg+Ir5f/f3ManlExJ9HxI6IuDUi\nTp5l8NIia+OXYDiepcUwTuX8GeDUFcvOB27IzBOBG6qvAX4HOLH6sxX4RDNhSmrIZ3A8S8UbmZwz\n81vAj1YsPgO4vHp9OXBmz/LPZseNwFERsbGpYCXV43iWFsO015w3ZObu6vUeYEP1ehPww571dlbL\nDhARWyNie0Rsf+gXj08ZhqQGNDqeeWTf7CKVVona9zlnZkZETrHdNmAbwIs3HDbx9tKi+8qHzms7\nhAM0MZ4PWn+441mqadrK+YFue6v6e2+1fBdwXM96m6tlksrleJYKM21yvhY4p3p9DnBNz/I3V7M8\nXwb8tKddJqlMjmepMCPb2hHxeeA3gWdFxE7gAuAjwBcj4lzgfuAN1epfBU4DdgA/B946g5glTcnx\nLC2Gkck5M9804K1T+qybwLvqBiVpNhzP0mLwCWGSJBXG5CxJUmFMzpIkFcbkrMZ85UPnFXnvriQt\nGpOzJEmFqf2EMEmTG9Zh2PO+zqOvnz2vYCQVx8pZE3vw7Q/z4NsfHvi+7W1JqsfkLElSYUzOkiQV\nxmvO0hzZ7pc0DitnSZIKY3KW5mRY1Xz6BRdx+gUXzTEaSSUzOWtqw2Zsgy3cSVz2s7dz2c/e3nYY\nkgphcpYkqTAmZ82U9zxL0uRMzqpl1ANJukzQkjQ+k7MkSYUxOasR41bPVtCSNJoPIVFjugl6/SeP\nbDmSsoz6gaT7iy4kqcvKWZKkwpicNXe2tiVpOJOzJEmFGZmcI+LTEbE3Im7vWfbRiLg7Im6NiKsj\n4qie9z4QETsi4p6IeNWsAle5xrm9arVMDivtMzqepcUwTuX8GeDUFcuuB34tM18IfA/4AEBEnASc\nDbyg2uZ/RMSaxqLVQlnN9z+P88PHnvf9qI3JYJ/B8SwVb2RyzsxvAT9asexrmbmv+vJGYHP1+gzg\nC5n5aGb+HbADeGmD8UqqwfEsLYYmbqV6G3BF9XoTncHdtbNadoCI2ApsBdh8xMENhKESjXN7VW+F\nuei/mWncTkDBt0/VHs/xdMezVFet5BwRHwT2AZ+bdNvM3AZsA3jxhsOyThwq3573/YhjLz565Hr9\nkltpCXvaVnzBCRlobjwftP5wx7NU09TJOSLeArwaOCUzu4NxF3Bcz2qbq2WSCuZ4lsoyVXKOiFOB\n9wOvzMyf97x1LfCXEXEx8CvAicDf1I5SS6G3chyniu4qpe29rJPXHM9SeUYm54j4PPCbwLMiYidw\nAZ3ZnIcC10cEwI2Z+Y7MvCMivgjcSac99q7MfHxWwWtxdRP1JEka2kvUdRJzSe1sx7O0GEYm58x8\nU5/Flw1Z/0+BP60TlKTZcDxLi8FffKFWjTtRrJ9JqtlJquwm29clVc2SFofJWa2b9lr0JOZ1vdhk\nLKkJPltbkqTCWDmrKP0qz1lV03VYIUuaJZPzBB4/Yt/olVaBYU/7Wi1K/IFB0vKwrS1JUmHiqYcB\ntRhExIPAPwL/0HYsAzyLMmMrNS4oN7ZS44IDY/vVzFzfVjDTioifAfe0HccAi/TvX4pS44LFiW3i\nsVxEcgaIiO2ZuaXtOPopNbZS44JyYys1Lig7tkmU/DmMbXKlxgXLHZttbUmSCmNyliSpMCUl521t\nBzBEqbGVGheUG1upcUHZsU2i5M9hbJMrNS5Y4tiKueYsSZI6SqqcJUkSJmdJkorTenKOiFMj4p6I\n2BER57ccy3ER8fWIuDMi7oiI91TLL4yIXRFxS/XntJbiuy8ibqti2F4tOzoiro+I71d/P3POMT2v\n57zcEhEPR8R72zpnEfHpiNgbEbf3LOt7jqLjz6v/e7dGxMktxPbRiLi7Ov7VEXFUtfz4iPhFz/m7\ndJaxNaWU8exYnjoux/P0cTU7ljOztT/AGuAHwHOAQ4DvAie1GM9G4OTq9RHA94CTgAuB/9jmuapi\nug941opl/xk4v3p9PnBRy/+ee4BfbeucAa8ATgZuH3WOgNOA/w0E8DLgphZi+zfA2ur1RT2xHd+7\n3iL8KWk8O5Yb+/d0PI8fV6Njue3K+aXAjsy8NzN/CXwBOKOtYDJzd2Z+p3r9M+AuYFNb8YzpDODy\n6vXlwJktxnIK8IPMvL+tADLzW8DK30ox6BydAXw2O24EjoqIjfOMLTO/lpndh7bfCGye1fHnoJjx\n7FhuhON5griaHsttJ+dNwA97vt5JIQMoIo4HXgLcVC36o6pd8ek22k2VBL4WETdHxNZq2YbM3F29\n3gNsaCc0AM4GPt/zdQnnDAafo9L+/72Nzk/+XSdExN9GxDcj4l+1FdQESjufgGO5Bsfz9GqP5baT\nc5Ei4unAVcB7M/Nh4BPAPwVeDOwG/mtLof1GZp4M/A7wroh4Re+b2emhtHJvXEQcArwG+FK1qJRz\ntp82z9EwEfFBYB/wuWrRbuCfZOZLgPcBfxkR/jqwCTmWp+N4nl5TY7nt5LwLOK7n683VstZExMF0\nBvPnMvPLAJn5QGY+nplPAJ+k076bu8zcVf29F7i6iuOBbuum+ntvG7HR+Sbzncx8oIqxiHNWGXSO\nivj/FxFvAV4N/H71zYbMfDQzH6pe30znWu4/m3dsEyrifHY5lmtxPE+hybHcdnL+NnBiRJxQ/aR2\nNnBtW8FERACXAXdl5sU9y3uvW7wWuH3ltnOI7WkRcUT3NZ3JB7fTOV/nVKudA1wz79gqb6KnBVbC\nOesx6BxdC7y5muX5MuCnPe2yuYiIU4H3A6/JzJ/3LF8fEWuq188BTgTunWdsUyhmPDuWa3M8T6jx\nsTyr2Wzj/qEzw+57dH6a+GDLsfwGnRbJrcAt1Z/TgP8F3FYtvxbY2EJsz6Ez+/W7wB3dcwUcA9wA\nfB/4P8DRLcT2NOAh4Bk9y1o5Z3S+oewGHqNzzencQeeIzqzO/17937sN2NJCbDvoXCfr/n+7tFr3\nrOrf+RbgO8C/nfe/65SfsYjx7FiuFZ/jebq4Gh3LPr5TkqTCtN3WliRJK5icJUkqjMlZkqTCmJwl\nSSqMyVmSpMKYnCVJKozJWZKkwvz/YBaf33QExS0AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MTct6_8Hbfpz", + "colab_type": "text" + }, + "source": [ + "### Fin" + ] + } + ] +} \ No newline at end of file