diff --git a/.dockerignore b/.dockerignore index 3c6b6ab02e03..af51ccc3d8df 100644 --- a/.dockerignore +++ b/.dockerignore @@ -8,17 +8,23 @@ coco storage.googleapis.com data/samples/* -**/results*.txt +**/results*.csv *.jpg # Neural Network weights ----------------------------------------------------------------------------------------------- -**/*.weights **/*.pt **/*.pth **/*.onnx +**/*.engine **/*.mlmodel **/*.torchscript - +**/*.torchscript.pt +**/*.tflite +**/*.h5 +**/*.pb +*_saved_model/ +*_web_model/ +*_openvino_model/ # Below Copied From .gitignore ----------------------------------------------------------------------------------------- # Below Copied From .gitignore ----------------------------------------------------------------------------------------- diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md deleted file mode 100644 index 362059b288d5..000000000000 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -name: "πŸ› Bug report" -about: Create a report to help us improve -title: '' -labels: bug -assignees: '' - ---- - -Before submitting a bug report, please be aware that your issue **must be reproducible** with all of the following, otherwise it is non-actionable, and we can not help you: - - **Current repo**: run `git fetch && git status -uno` to check and `git pull` to update repo - - **Common dataset**: coco.yaml or coco128.yaml - - **Common environment**: Colab, Google Cloud, or Docker image. See https://github.com/ultralytics/yolov5#environments - -If this is a custom dataset/training question you **must include** your `train*.jpg`, `test*.jpg` and `results.png` figures, or we can not help you. You can generate these with `utils.plot_results()`. - - -## πŸ› Bug -A clear and concise description of what the bug is. - - -## To Reproduce (REQUIRED) - -Input: -``` -import torch - -a = torch.tensor([5]) -c = a / 0 -``` - -Output: -``` -Traceback (most recent call last): - File "/Users/glennjocher/opt/anaconda3/envs/env1/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3331, in run_code - exec(code_obj, self.user_global_ns, self.user_ns) - File "", line 5, in - c = a / 0 -RuntimeError: ZeroDivisionError -``` - - -## Expected behavior -A clear and concise description of what you expected to happen. - - -## Environment -If applicable, add screenshots to help explain your problem. - - - OS: [e.g. Ubuntu] - - GPU [e.g. 2080 Ti] - - -## Additional context -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 000000000000..fcb64138b088 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,85 @@ +name: πŸ› Bug Report +# title: " " +description: Problems with YOLOv5 +labels: [bug, triage] +body: + - type: markdown + attributes: + value: | + Thank you for submitting a YOLOv5 πŸ› Bug Report! + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please search the [issues](https://github.com/ultralytics/yolov5/issues) to see if a similar bug report already exists. + options: + - label: > + I have searched the YOLOv5 [issues](https://github.com/ultralytics/yolov5/issues) and found no similar bug report. + required: true + + - type: dropdown + attributes: + label: YOLOv5 Component + description: | + Please select the part of YOLOv5 where you found the bug. + multiple: true + options: + - "Training" + - "Validation" + - "Detection" + - "Export" + - "PyTorch Hub" + - "Multi-GPU" + - "Evolution" + - "Integrations" + - "Other" + validations: + required: false + + - type: textarea + attributes: + label: Bug + description: Provide console output with error messages and/or screenshots of the bug. + placeholder: | + πŸ’‘ ProTip! Include as much information as possible (screenshots, logs, tracebacks etc.) to receive the most helpful response. + validations: + required: true + + - type: textarea + attributes: + label: Environment + description: Please specify the software and hardware you used to produce the bug. + placeholder: | + - YOLO: YOLOv5 πŸš€ v6.0-67-g60e42e1 torch 1.9.0+cu111 CUDA:0 (A100-SXM4-40GB, 40536MiB) + - OS: Ubuntu 20.04 + - Python: 3.9.0 + validations: + required: false + + - type: textarea + attributes: + label: Minimal Reproducible Example + description: > + When asking a question, people will be better able to provide help if you provide code that they can easily understand and use to **reproduce** the problem. + This is referred to by community members as creating a [minimal reproducible example](https://stackoverflow.com/help/minimal-reproducible-example). + placeholder: | + ``` + # Code to reproduce your issue here + ``` + validations: + required: false + + - type: textarea + attributes: + label: Additional + description: Anything else you would like to share? + + - type: checkboxes + attributes: + label: Are you willing to submit a PR? + description: > + (Optional) We encourage you to submit a [Pull Request](https://github.com/ultralytics/yolov5/pulls) (PR) to help improve YOLOv5 for everyone, especially if you have a good understanding of how to implement a fix or feature. + See the YOLOv5 [Contributing Guide](https://github.com/ultralytics/yolov5/blob/master/CONTRIBUTING.md) to get started. + options: + - label: Yes I'd like to help by submitting a PR! diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000000..f388d7bacf66 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: true +contact_links: + - name: Slack + url: https://join.slack.com/t/ultralytics/shared_invite/zt-w29ei8bp-jczz7QYUmDtgo6r6KcMIAg + about: Ask on Ultralytics Slack Forum + - name: Stack Overflow + url: https://stackoverflow.com/search?q=YOLOv5 + about: Ask on Stack Overflow with 'YOLOv5' tag diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md deleted file mode 100644 index 87db3eacbf02..000000000000 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -name: "πŸš€ Feature request" -about: Suggest an idea for this project -title: '' -labels: enhancement -assignees: '' - ---- - -## πŸš€ Feature - - -## Motivation - - - -## Pitch - - - -## Alternatives - - - -## Additional context - - diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 000000000000..68ef985186ef --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,50 @@ +name: πŸš€ Feature Request +description: Suggest a YOLOv5 idea +# title: " " +labels: [enhancement] +body: + - type: markdown + attributes: + value: | + Thank you for submitting a YOLOv5 πŸš€ Feature Request! + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please search the [issues](https://github.com/ultralytics/yolov5/issues) to see if a similar feature request already exists. + options: + - label: > + I have searched the YOLOv5 [issues](https://github.com/ultralytics/yolov5/issues) and found no similar feature requests. + required: true + + - type: textarea + attributes: + label: Description + description: A short description of your feature. + placeholder: | + What new feature would you like to see in YOLOv5? + validations: + required: true + + - type: textarea + attributes: + label: Use case + description: | + Describe the use case of your feature request. It will help us understand and prioritize the feature request. + placeholder: | + How would this feature be used, and who would use it? + + - type: textarea + attributes: + label: Additional + description: Anything else you would like to share? + + - type: checkboxes + attributes: + label: Are you willing to submit a PR? + description: > + (Optional) We encourage you to submit a [Pull Request](https://github.com/ultralytics/yolov5/pulls) (PR) to help improve YOLOv5 for everyone, especially if you have a good understanding of how to implement a fix or feature. + See the YOLOv5 [Contributing Guide](https://github.com/ultralytics/yolov5/blob/master/CONTRIBUTING.md) to get started. + options: + - label: Yes I'd like to help by submitting a PR! diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md deleted file mode 100644 index 2c22aea70a7b..000000000000 --- a/.github/ISSUE_TEMPLATE/question.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -name: "❓Question" -about: Ask a general question -title: '' -labels: question -assignees: '' - ---- - -## ❔Question - - -## Additional context diff --git a/.github/ISSUE_TEMPLATE/question.yml b/.github/ISSUE_TEMPLATE/question.yml new file mode 100644 index 000000000000..8e0993c68bab --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.yml @@ -0,0 +1,33 @@ +name: ❓ Question +description: Ask a YOLOv5 question +# title: " " +labels: [question] +body: + - type: markdown + attributes: + value: | + Thank you for asking a YOLOv5 ❓ Question! + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please search the [issues](https://github.com/ultralytics/yolov5/issues) and [discussions](https://github.com/ultralytics/yolov5/discussions) to see if a similar question already exists. + options: + - label: > + I have searched the YOLOv5 [issues](https://github.com/ultralytics/yolov5/issues) and [discussions](https://github.com/ultralytics/yolov5/discussions) and found no similar questions. + required: true + + - type: textarea + attributes: + label: Question + description: What is your question? + placeholder: | + πŸ’‘ ProTip! Include as much information as possible (screenshots, logs, tracebacks etc.) to receive the most helpful response. + validations: + required: true + + - type: textarea + attributes: + label: Additional + description: Anything else you would like to share? diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000000..f25b017ace8b --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,9 @@ + diff --git a/.github/SECURITY.md b/.github/SECURITY.md new file mode 100644 index 000000000000..aa3e8409da6b --- /dev/null +++ b/.github/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +We aim to make YOLOv5 πŸš€ as secure as possible! If you find potential vulnerabilities or have any concerns please let us know so we can investigate and take corrective action if needed. + +### Reporting a Vulnerability + +To report vulnerabilities please email us at hello@ultralytics.com or visit https://ultralytics.com/contact. Thank you! diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 9910689197f5..c1b3d5d514c3 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,12 +1,23 @@ version: 2 updates: -- package-ecosystem: pip - directory: "/" - schedule: - interval: weekly - time: "04:00" - open-pull-requests-limit: 10 - reviewers: - - glenn-jocher - labels: - - dependencies + - package-ecosystem: pip + directory: "/" + schedule: + interval: weekly + time: "04:00" + open-pull-requests-limit: 10 + reviewers: + - glenn-jocher + labels: + - dependencies + + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: weekly + time: "04:00" + open-pull-requests-limit: 5 + reviewers: + - glenn-jocher + labels: + - dependencies diff --git a/.github/workflows/ci-testing.yml b/.github/workflows/ci-testing.yml index df508474a955..f2096ce17a17 100644 --- a/.github/workflows/ci-testing.yml +++ b/.github/workflows/ci-testing.yml @@ -1,6 +1,8 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + name: CI CPU testing -on: # https://help.github.com/en/actions/reference/events-that-trigger-workflows +on: # https://help.github.com/en/actions/reference/events-that-trigger-workflows push: branches: [ master ] pull_request: @@ -16,16 +18,16 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.8] - model: ['yolov5s'] # models to test + os: [ ubuntu-latest, macos-latest, windows-latest ] + python-version: [ 3.9 ] + model: [ 'yolov5n' ] # models to test # Timeout: https://stackoverflow.com/a/59076067/4521646 - timeout-minutes: 50 + timeout-minutes: 60 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} @@ -37,44 +39,55 @@ jobs: python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)" - name: Cache pip - uses: actions/cache@v1 + uses: actions/cache@v2.1.7 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-${{ matrix.python-version }}-pip-${{ hashFiles('requirements.txt') }} restore-keys: | ${{ runner.os }}-${{ matrix.python-version }}-pip- + # Known Keras 2.7.0 issue: https://github.com/ultralytics/yolov5/pull/5486 - name: Install dependencies run: | python -m pip install --upgrade pip pip install -qr requirements.txt -f https://download.pytorch.org/whl/cpu/torch_stable.html - pip install -q onnx + pip install -q onnx tensorflow-cpu keras==2.6.0 # wandb # extras python --version pip --version pip list shell: bash - - name: Download data - run: | - # curl -L -o tmp.zip https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128.zip - # unzip -q tmp.zip -d ../ - # rm tmp.zip + # - name: W&B login + # run: wandb login 345011b3fb26dc8337fd9b20e53857c1d403f2aa + + # - name: Download data + # run: | + # curl -L -o tmp.zip https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128.zip + # unzip -q tmp.zip -d ../datasets - name: Tests workflow run: | # export PYTHONPATH="$PWD" # to run '$ python *.py' files in subdirectories - di=cpu # inference devices # define device - - # train - python train.py --img 128 --batch 16 --weights weights/${{ matrix.model }}.pt --cfg models/${{ matrix.model }}.yaml --epochs 1 --device $di - # detect - python detect.py --weights weights/${{ matrix.model }}.pt --device $di - python detect.py --weights runs/train/exp/weights/last.pt --device $di - # test - python test.py --img 128 --batch 16 --weights weights/${{ matrix.model }}.pt --device $di - python test.py --img 128 --batch 16 --weights runs/train/exp/weights/last.pt --device $di + d=cpu # device + weights=runs/train/exp/weights/best.pt + # Train + python train.py --img 64 --batch 32 --weights ${{ matrix.model }}.pt --cfg ${{ matrix.model }}.yaml --epochs 1 --device $d + # Val + python val.py --img 64 --batch 32 --weights ${{ matrix.model }}.pt --device $d + python val.py --img 64 --batch 32 --weights $weights --device $d + # Detect + python detect.py --weights ${{ matrix.model }}.pt --device $d + python detect.py --weights $weights --device $d python hubconf.py # hub - python models/yolo.py --cfg models/${{ matrix.model }}.yaml # inspect - python models/export.py --img 128 --batch 1 --weights weights/${{ matrix.model }}.pt # export + # Export + python models/yolo.py --cfg ${{ matrix.model }}.yaml # build PyTorch model + python models/tf.py --weights ${{ matrix.model }}.pt # build TensorFlow model + python export.py --weights ${{ matrix.model }}.pt --img 64 --include torchscript onnx # export + # Python + python - <=1.7`. To install run: + [**Python>=3.7.0**](https://www.python.org/) with all [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) installed including [**PyTorch>=1.7**](https://pytorch.org/get-started/locally/). To get started: ```bash - $ pip install -r requirements.txt + git clone https://github.com/ultralytics/yolov5 # clone + cd yolov5 + pip install -r requirements.txt # install ``` ## Environments - + YOLOv5 may be run in any of the following up-to-date verified environments (with all dependencies including [CUDA](https://developer.nvidia.com/cuda)/[CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/) and [PyTorch](https://pytorch.org/) preinstalled): - + - **Google Colab and Kaggle** notebooks with free GPU: Open In Colab Open In Kaggle - **Google Cloud** Deep Learning VM. See [GCP Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/GCP-Quickstart) - **Amazon** Deep Learning AMI. See [AWS Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/AWS-Quickstart) - **Docker Image**. See [Docker Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/Docker-Quickstart) Docker Pulls - - + + ## Status - - ![CI CPU testing](https://github.com/ultralytics/yolov5/workflows/CI%20CPU%20testing/badge.svg) - - If this badge is green, all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are currently passing. CI tests verify correct operation of YOLOv5 training ([train.py](https://github.com/ultralytics/yolov5/blob/master/train.py)), testing ([test.py](https://github.com/ultralytics/yolov5/blob/master/test.py)), inference ([detect.py](https://github.com/ultralytics/yolov5/blob/master/detect.py)) and export ([export.py](https://github.com/ultralytics/yolov5/blob/master/models/export.py)) on MacOS, Windows, and Ubuntu every 24 hours and on every commit. - + + CI CPU testing + + If this badge is green, all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are currently passing. CI tests verify correct operation of YOLOv5 training ([train.py](https://github.com/ultralytics/yolov5/blob/master/train.py)), validation ([val.py](https://github.com/ultralytics/yolov5/blob/master/val.py)), inference ([detect.py](https://github.com/ultralytics/yolov5/blob/master/detect.py)) and export ([export.py](https://github.com/ultralytics/yolov5/blob/master/export.py)) on MacOS, Windows, and Ubuntu every 24 hours and on every commit. diff --git a/.github/workflows/rebase.yml b/.github/workflows/rebase.yml index e86c57744b84..75c57546166b 100644 --- a/.github/workflows/rebase.yml +++ b/.github/workflows/rebase.yml @@ -1,10 +1,9 @@ -name: Automatic Rebase # https://github.com/marketplace/actions/automatic-rebase +name: Automatic Rebase on: issue_comment: types: [created] - jobs: rebase: name: Rebase @@ -12,10 +11,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the latest code - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: - fetch-depth: 0 + token: ${{ secrets.ACTIONS_TOKEN }} + fetch-depth: 0 # otherwise, you will fail to push refs to dest repo - name: Automatic Rebase - uses: cirrus-actions/rebase@1.3.1 + uses: cirrus-actions/rebase@1.5 env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.ACTIONS_TOKEN }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 0a094e237b34..7a83950c17b7 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,18 +1,38 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + name: Close stale issues on: schedule: - - cron: "0 0 * * *" + - cron: '0 0 * * *' # Runs at 00:00 UTC every day jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v3 + - uses: actions/stale@v4 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - stale-issue-message: 'This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.' - stale-pr-message: 'This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.' + stale-issue-message: | + πŸ‘‹ Hello, this issue has been automatically marked as stale because it has not had recent activity. Please note it will be closed if no further activity occurs. + + Access additional [YOLOv5](https://ultralytics.com/yolov5) πŸš€ resources: + - **Wiki** – https://github.com/ultralytics/yolov5/wiki + - **Tutorials** – https://github.com/ultralytics/yolov5#tutorials + - **Docs** – https://docs.ultralytics.com + + Access additional [Ultralytics](https://ultralytics.com) ⚑ resources: + - **Ultralytics HUB** – https://ultralytics.com/hub + - **Vision API** – https://ultralytics.com/yolov5 + - **About Us** – https://ultralytics.com/about + - **Join Our Team** – https://ultralytics.com/work + - **Contact Us** – https://ultralytics.com/contact + + Feel free to inform us of any other **issues** you discover or **feature requests** that come to mind in the future. Pull Requests (PRs) are also always welcomed! + + Thank you for your contributions to YOLOv5 πŸš€ and Vision AI ⭐! + + stale-pr-message: 'This pull request has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions YOLOv5 πŸš€ and Vision AI ⭐.' days-before-stale: 30 days-before-close: 5 - exempt-issue-labels: 'documentation,tutorial' + exempt-issue-labels: 'documentation,tutorial,TODO' operations-per-run: 100 # The maximum number of operations per run, used to control rate limiting. diff --git a/.gitignore b/.gitignore index 91ce33fb931e..69a00843ea42 100755 --- a/.gitignore +++ b/.gitignore @@ -19,26 +19,23 @@ *.avi *.data *.json - *.cfg +!setup.cfg !cfg/yolov3*.cfg storage.googleapis.com runs/* data/* +data/images/* +!data/*.yaml +!data/hyps +!data/scripts +!data/images !data/images/zidane.jpg !data/images/bus.jpg -!data/coco.names -!data/coco_paper.names -!data/coco.data -!data/coco_*.data -!data/coco_*.txt -!data/trainvalno5k.shapes !data/*.sh -pycocotools/* -results*.txt -gcp_test*.sh +results*.csv # Datasets ------------------------------------------------------------------------------------------------------------- coco/ @@ -53,9 +50,16 @@ VOC/ # Neural Network weights ----------------------------------------------------------------------------------------------- *.weights *.pt +*.pb *.onnx +*.engine *.mlmodel *.torchscript +*.tflite +*.h5 +*_saved_model/ +*_web_model/ +*_openvino_model/ darknet53.conv.74 yolov3-tiny.conv.15 @@ -84,7 +88,7 @@ sdist/ var/ wheels/ *.egg-info/ -wandb/ +/wandb/ .installed.cfg *.egg diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000000..526a5609fdd7 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,66 @@ +# Define hooks for code formations +# Will be applied on any updated commit files if a user has installed and linked commit hook + +default_language_version: + python: python3.8 + +# Define bot property if installed via https://github.com/marketplace/pre-commit-ci +ci: + autofix_prs: true + autoupdate_commit_msg: '[pre-commit.ci] pre-commit suggestions' + autoupdate_schedule: quarterly + # submodules: true + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + - id: check-yaml + - id: check-toml + - id: pretty-format-json + - id: check-docstring-first + + - repo: https://github.com/asottile/pyupgrade + rev: v2.31.0 + hooks: + - id: pyupgrade + args: [--py36-plus] + name: Upgrade code + + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort + name: Sort imports + + # TODO + #- repo: https://github.com/pre-commit/mirrors-yapf + # rev: v0.31.0 + # hooks: + # - id: yapf + # name: formatting + + # TODO + #- repo: https://github.com/executablebooks/mdformat + # rev: 0.7.7 + # hooks: + # - id: mdformat + # additional_dependencies: + # - mdformat-gfm + # - mdformat-black + # - mdformat_frontmatter + + # TODO + #- repo: https://github.com/asottile/yesqa + # rev: v1.2.3 + # hooks: + # - id: yesqa + + - repo: https://github.com/PyCQA/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + name: PEP8 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000000..ebde03a562a0 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,94 @@ +## Contributing to YOLOv5 πŸš€ + +We love your input! We want to make contributing to YOLOv5 as easy and transparent as possible, whether it's: + +- Reporting a bug +- Discussing the current state of the code +- Submitting a fix +- Proposing a new feature +- Becoming a maintainer + +YOLOv5 works so well due to our combined community effort, and for every small improvement you contribute you will be +helping push the frontiers of what's possible in AI πŸ˜ƒ! + +## Submitting a Pull Request (PR) πŸ› οΈ + +Submitting a PR is easy! This example shows how to submit a PR for updating `requirements.txt` in 4 steps: + +### 1. Select File to Update + +Select `requirements.txt` to update by clicking on it in GitHub. +

PR_step1

+ +### 2. Click 'Edit this file' + +Button is in top-right corner. +

PR_step2

+ +### 3. Make Changes + +Change `matplotlib` version from `3.2.2` to `3.3`. +

PR_step3

+ +### 4. Preview Changes and Submit PR + +Click on the **Preview changes** tab to verify your updates. At the bottom of the screen select 'Create a **new branch** +for this commit', assign your branch a descriptive name such as `fix/matplotlib_version` and click the green **Propose +changes** button. All done, your PR is now submitted to YOLOv5 for review and approval πŸ˜ƒ! +

PR_step4

+ +### PR recommendations + +To allow your work to be integrated as seamlessly as possible, we advise you to: + +- βœ… Verify your PR is **up-to-date with upstream/master.** If your PR is behind upstream/master an + automatic [GitHub Actions](https://github.com/ultralytics/yolov5/blob/master/.github/workflows/rebase.yml) merge may + be attempted by writing /rebase in a new comment, or by running the following code, replacing 'feature' with the name + of your local branch: + +```bash +git remote add upstream https://github.com/ultralytics/yolov5.git +git fetch upstream +# git checkout feature # <--- replace 'feature' with local branch name +git merge upstream/master +git push -u origin -f +``` + +- βœ… Verify all Continuous Integration (CI) **checks are passing**. +- βœ… Reduce changes to the absolute **minimum** required for your bug fix or feature addition. _"It is not daily increase + but daily decrease, hack away the unessential. The closer to the source, the less wastage there is."_ β€” Bruce Lee + +## Submitting a Bug Report πŸ› + +If you spot a problem with YOLOv5 please submit a Bug Report! + +For us to start investigating a possible problem we need to be able to reproduce it ourselves first. We've created a few +short guidelines below to help users provide what we need in order to get started. + +When asking a question, people will be better able to provide help if you provide **code** that they can easily +understand and use to **reproduce** the problem. This is referred to by community members as creating +a [minimum reproducible example](https://stackoverflow.com/help/minimal-reproducible-example). Your code that reproduces +the problem should be: + +* βœ… **Minimal** – Use as little code as possible that still produces the same problem +* βœ… **Complete** – Provide **all** parts someone else needs to reproduce your problem in the question itself +* βœ… **Reproducible** – Test the code you're about to provide to make sure it reproduces the problem + +In addition to the above requirements, for [Ultralytics](https://ultralytics.com/) to provide assistance your code +should be: + +* βœ… **Current** – Verify that your code is up-to-date with current + GitHub [master](https://github.com/ultralytics/yolov5/tree/master), and if necessary `git pull` or `git clone` a new + copy to ensure your problem has not already been resolved by previous commits. +* βœ… **Unmodified** – Your problem must be reproducible without any modifications to the codebase in this + repository. [Ultralytics](https://ultralytics.com/) does not provide support for custom code ⚠️. + +If you believe your problem meets all of the above criteria, please close this issue and raise a new one using the πŸ› ** +Bug Report** [template](https://github.com/ultralytics/yolov5/issues/new/choose) and providing +a [minimum reproducible example](https://stackoverflow.com/help/minimal-reproducible-example) to help us better +understand and diagnose your problem. + +## License + +By contributing, you agree that your contributions will be licensed under +the [GPL-3.0 license](https://choosealicense.com/licenses/gpl-3.0/) diff --git a/Dockerfile b/Dockerfile index b47e5bbff194..304e8b2801a9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,7 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + # Start FROM Nvidia PyTorch image https://ngc.nvidia.com/catalog/containers/nvidia:pytorch -FROM nvcr.io/nvidia/pytorch:21.03-py3 +FROM nvcr.io/nvidia/pytorch:21.10-py3 # Install linux packages RUN apt update && apt install -y zip htop screen libgl1-mesa-glx @@ -7,31 +9,36 @@ RUN apt update && apt install -y zip htop screen libgl1-mesa-glx # Install python dependencies COPY requirements.txt . RUN python -m pip install --upgrade pip -RUN pip uninstall -y nvidia-tensorboard nvidia-tensorboard-plugin-dlprof -RUN pip install --no-cache -r requirements.txt coremltools onnx gsutil notebook +RUN pip uninstall -y torch torchvision torchtext +RUN pip install --no-cache -r requirements.txt albumentations wandb gsutil notebook \ + torch==1.11.0+cu113 torchvision==0.12.0+cu113 -f https://download.pytorch.org/whl/cu113/torch_stable.html +# RUN pip install --no-cache -U torch torchvision # Create working directory RUN mkdir -p /usr/src/app WORKDIR /usr/src/app # Copy contents -COPY . /usr/src/app +RUN git clone https://github.com/ultralytics/yolov5 /usr/src/app +# COPY . /usr/src/app + +# Downloads to user config dir +ADD https://ultralytics.com/assets/Arial.ttf /root/.config/Ultralytics/ # Set environment variables -ENV HOME=/usr/src/app +# ENV HOME=/usr/src/app -# --------------------------------------------------- Extras Below --------------------------------------------------- +# Usage Examples ------------------------------------------------------------------------------------------------------- # Build and Push # t=ultralytics/yolov5:latest && sudo docker build -t $t . && sudo docker push $t -# for v in {300..303}; do t=ultralytics/coco:v$v && sudo docker build -t $t . && sudo docker push $t; done # Pull and Run # t=ultralytics/yolov5:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all $t # Pull and Run with local directory access -# t=ultralytics/yolov5:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all -v "$(pwd)"/coco:/usr/src/coco $t +# t=ultralytics/yolov5:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all -v "$(pwd)"/datasets:/usr/src/datasets $t # Kill all # sudo docker kill $(sudo docker ps -q) @@ -45,8 +52,14 @@ ENV HOME=/usr/src/app # Bash into stopped container # id=$(sudo docker ps -qa) && sudo docker start $id && sudo docker exec -it $id bash -# Send weights to GCP -# python -c "from utils.general import *; strip_optimizer('runs/train/exp0_*/weights/best.pt', 'tmp.pt')" && gsutil cp tmp.pt gs://*.pt - # Clean up # docker system prune -a --volumes + +# Update Ubuntu drivers +# https://www.maketecheasier.com/install-nvidia-drivers-ubuntu/ + +# DDP test +# python -m torch.distributed.run --nproc_per_node 2 --master_port 1 train.py --epochs 3 + +# GCP VM from Image +# docker.io/ultralytics/yolov5:latest diff --git a/LICENSE b/LICENSE index 9e419e042146..92b370f0e0e1 100644 --- a/LICENSE +++ b/LICENSE @@ -671,4 +671,4 @@ into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read -. \ No newline at end of file +. diff --git a/README.md b/README.md old mode 100755 new mode 100644 index b25c6fca983c..3ebc085b6c33 --- a/README.md +++ b/README.md @@ -1,78 +1,162 @@ - - -  +
+

+ + +

+
+
+ CI CPU testing + YOLOv5 Citation + Docker Pulls +
+ Open In Colab + Open In Kaggle + Join Forum +
+ +
+

+YOLOv5 πŸš€ is a family of object detection architectures and models pretrained on the COCO dataset, and represents Ultralytics + open-source research into future vision AI methods, incorporating lessons learned and best practices evolved over thousands of hours of research and development. +

+ + + + + +
+ +##
Documentation
+ +See the [YOLOv5 Docs](https://docs.ultralytics.com) for full documentation on training, testing and deployment. + +##
Quick Start Examples
+ +
+Install + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a +[**Python>=3.7.0**](https://www.python.org/) environment, including +[**PyTorch>=1.7**](https://pytorch.org/get-started/locally/). -CI CPU testing - -This repository represents Ultralytics open-source research into future object detection methods, and incorporates lessons learned and best practices evolved over thousands of hours of training and evolution on anonymized client datasets. **All code and models are under active development, and are subject to modification or deletion without notice.** Use at your own risk. +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` -

-
- YOLOv5-P5 640 Figure (click to expand) - -

-
-
- Figure Notes (click to expand) - - * GPU Speed measures end-to-end time per image averaged over 5000 COCO val2017 images using a V100 GPU with batch size 32, and includes image preprocessing, PyTorch FP16 inference, postprocessing and NMS. - * EfficientDet data from [google/automl](https://github.com/google/automl) at batch size 8. - * **Reproduce** by `python test.py --task study --data coco.yaml --iou 0.7 --weights yolov5s6.pt yolov5m6.pt yolov5l6.pt yolov5x6.pt`
-- **April 11, 2021**: [v5.0 release](https://github.com/ultralytics/yolov5/releases/tag/v5.0): YOLOv5-P6 1280 models, [AWS](https://github.com/ultralytics/yolov5/wiki/AWS-Quickstart), [Supervise.ly](https://github.com/ultralytics/yolov5/issues/2518) and [YouTube](https://github.com/ultralytics/yolov5/pull/2752) integrations. -- **January 5, 2021**: [v4.0 release](https://github.com/ultralytics/yolov5/releases/tag/v4.0): nn.SiLU() activations, [Weights & Biases](https://wandb.ai/site?utm_campaign=repo_yolo_readme) logging, [PyTorch Hub](https://pytorch.org/hub/ultralytics_yolov5/) integration. -- **August 13, 2020**: [v3.0 release](https://github.com/ultralytics/yolov5/releases/tag/v3.0): nn.Hardswish() activations, data autodownload, native AMP. -- **July 23, 2020**: [v2.0 release](https://github.com/ultralytics/yolov5/releases/tag/v2.0): improved model definition, training and mAP. +
+Inference +Inference with YOLOv5 and [PyTorch Hub](https://github.com/ultralytics/yolov5/issues/36) +. [Models](https://github.com/ultralytics/yolov5/tree/master/models) download automatically from the latest +YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). -## Pretrained Checkpoints +```python +import torch + +# Model +model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5l, yolov5x, custom + +# Images +img = 'https://ultralytics.com/images/zidane.jpg' # or file, Path, PIL, OpenCV, numpy, list + +# Inference +results = model(img) + +# Results +results.print() # or .show(), .save(), .crop(), .pandas(), etc. +``` + +
-[assets]: https://github.com/ultralytics/yolov5/releases -Model |size
(pixels) |mAPval
0.5:0.95 |mAPtest
0.5:0.95 |mAPval
0.5 |Speed
V100 (ms) | |params
(M) |FLOPS
640 (B) ---- |--- |--- |--- |--- |--- |---|--- |--- -[YOLOv5s][assets] |640 |36.7 |36.7 |55.4 |**2.0** | |7.3 |17.0 -[YOLOv5m][assets] |640 |44.5 |44.5 |63.1 |2.7 | |21.4 |51.3 -[YOLOv5l][assets] |640 |48.2 |48.2 |66.9 |3.8 | |47.0 |115.4 -[YOLOv5x][assets] |640 |**50.4** |**50.4** |**68.8** |6.1 | |87.7 |218.8 -| | | | | | || | -[YOLOv5s6][assets] |1280 |43.3 |43.3 |61.9 |**4.3** | |12.7 |17.4 -[YOLOv5m6][assets] |1280 |50.5 |50.5 |68.7 |8.4 | |35.9 |52.4 -[YOLOv5l6][assets] |1280 |53.4 |53.4 |71.1 |12.3 | |77.2 |117.7 -[YOLOv5x6][assets] |1280 |**54.4** |**54.4** |**72.0** |22.4 | |141.8 |222.9 -| | | | | | || | -[YOLOv5x6][assets] TTA |1280 |**55.0** |**55.0** |**72.0** |70.8 | |- |-
- Table Notes (click to expand) - - * APtest denotes COCO [test-dev2017](http://cocodataset.org/#upload) server results, all other AP results denote val2017 accuracy. - * AP values are for single-model single-scale unless otherwise noted. **Reproduce mAP** by `python test.py --data coco.yaml --img 640 --conf 0.001 --iou 0.65` - * SpeedGPU averaged over 5000 COCO val2017 images using a GCP [n1-standard-16](https://cloud.google.com/compute/docs/machine-types#n1_standard_machine_types) V100 instance, and includes FP16 inference, postprocessing and NMS. **Reproduce speed** by `python test.py --data coco.yaml --img 640 --conf 0.25 --iou 0.45` - * All checkpoints are trained to 300 epochs with default settings and hyperparameters (no autoaugmentation). - * Test Time Augmentation ([TTA](https://github.com/ultralytics/yolov5/issues/303)) includes reflection and scale augmentation. **Reproduce TTA** by `python test.py --data coco.yaml --img 1536 --iou 0.7 --augment` +Inference with detect.py + +`detect.py` runs inference on a variety of sources, downloading [models](https://github.com/ultralytics/yolov5/tree/master/models) automatically from +the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases) and saving results to `runs/detect`. + +```bash +python detect.py --source 0 # webcam + img.jpg # image + vid.mp4 # video + path/ # directory + path/*.jpg # glob + 'https://youtu.be/Zgi9g1ksQHc' # YouTube + 'rtsp://example.com/media.mp4' # RTSP, RTMP, HTTP stream +``` +
+
+Training -## Requirements +The commands below reproduce YOLOv5 [COCO](https://github.com/ultralytics/yolov5/blob/master/data/scripts/get_coco.sh) +results. [Models](https://github.com/ultralytics/yolov5/tree/master/models) +and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest +YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). Training times for YOLOv5n/s/m/l/x are +1/2/4/6/8 days on a V100 GPU ([Multi-GPU](https://github.com/ultralytics/yolov5/issues/475) times faster). Use the +largest `--batch-size` possible, or pass `--batch-size -1` for +YOLOv5 [AutoBatch](https://github.com/ultralytics/yolov5/pull/5092). Batch sizes shown for V100-16GB. -Python 3.8 or later with all [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) dependencies installed, including `torch>=1.7`. To install run: - ```bash -$ pip install -r requirements.txt +python train.py --data coco.yaml --cfg yolov5n.yaml --weights '' --batch-size 128 + yolov5s 64 + yolov5m 40 + yolov5l 24 + yolov5x 16 ``` + + +
-## Tutorials +
+Tutorials * [Train Custom Data](https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data)  πŸš€ RECOMMENDED -* [Tips for Best Training Results](https://github.com/ultralytics/yolov5/wiki/Tips-for-Best-Training-Results)  ☘️ RECOMMENDED +* [Tips for Best Training Results](https://github.com/ultralytics/yolov5/wiki/Tips-for-Best-Training-Results)  ☘️ + RECOMMENDED * [Weights & Biases Logging](https://github.com/ultralytics/yolov5/issues/1289)  🌟 NEW -* [Supervisely Ecosystem](https://github.com/ultralytics/yolov5/issues/2518)  🌟 NEW +* [Roboflow for Datasets, Labeling, and Active Learning](https://github.com/ultralytics/yolov5/issues/4975)  🌟 NEW * [Multi-GPU Training](https://github.com/ultralytics/yolov5/issues/475) * [PyTorch Hub](https://github.com/ultralytics/yolov5/issues/36)  ⭐ NEW -* [TorchScript, ONNX, CoreML Export](https://github.com/ultralytics/yolov5/issues/251) πŸš€ +* [TFLite, ONNX, CoreML, TensorRT Export](https://github.com/ultralytics/yolov5/issues/251) πŸš€ * [Test-Time Augmentation (TTA)](https://github.com/ultralytics/yolov5/issues/303) * [Model Ensembling](https://github.com/ultralytics/yolov5/issues/318) * [Model Pruning/Sparsity](https://github.com/ultralytics/yolov5/issues/304) @@ -80,91 +164,141 @@ $ pip install -r requirements.txt * [Transfer Learning with Frozen Layers](https://github.com/ultralytics/yolov5/issues/1314)  ⭐ NEW * [TensorRT Deployment](https://github.com/wang-xinyu/tensorrtx) +
-## Environments - -YOLOv5 may be run in any of the following up-to-date verified environments (with all dependencies including [CUDA](https://developer.nvidia.com/cuda)/[CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/) and [PyTorch](https://pytorch.org/) preinstalled): - -- **Google Colab and Kaggle** notebooks with free GPU: Open In Colab Open In Kaggle -- **Google Cloud** Deep Learning VM. See [GCP Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/GCP-Quickstart) -- **Amazon** Deep Learning AMI. See [AWS Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/AWS-Quickstart) -- **Docker Image**. See [Docker Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/Docker-Quickstart) Docker Pulls - - -## Inference - -`detect.py` runs inference on a variety of sources, downloading models automatically from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases) and saving results to `runs/detect`. -```bash -$ python detect.py --source 0 # webcam - file.jpg # image - file.mp4 # video - path/ # directory - path/*.jpg # glob - 'https://youtu.be/NUsoVlDFqZg' # YouTube video - 'rtsp://example.com/media.mp4' # RTSP, RTMP, HTTP stream -``` - -To run inference on example images in `data/images`: -```bash -$ python detect.py --source data/images --weights yolov5s.pt --conf 0.25 +##
Environments
-Namespace(agnostic_nms=False, augment=False, classes=None, conf_thres=0.25, device='', exist_ok=False, img_size=640, iou_thres=0.45, name='exp', project='runs/detect', save_conf=False, save_txt=False, source='data/images/', update=False, view_img=False, weights=['yolov5s.pt']) -YOLOv5 v4.0-96-g83dc1b4 torch 1.7.0+cu101 CUDA:0 (Tesla V100-SXM2-16GB, 16160.5MB) +Get started in seconds with our verified environments. Click each icon below for details. -Fusing layers... -Model Summary: 224 layers, 7266973 parameters, 0 gradients, 17.0 GFLOPS -image 1/2 /content/yolov5/data/images/bus.jpg: 640x480 4 persons, 1 bus, Done. (0.010s) -image 2/2 /content/yolov5/data/images/zidane.jpg: 384x640 2 persons, 1 tie, Done. (0.011s) -Results saved to runs/detect/exp2 -Done. (0.103s) -``` - + -### PyTorch Hub +##
Integrations
-Inference with YOLOv5 and [PyTorch Hub](https://github.com/ultralytics/yolov5/issues/36): -```python -import torch + -# Model -model = torch.hub.load('ultralytics/yolov5', 'yolov5s') +|Weights and Biases|Roboflow ⭐ NEW| +|:-:|:-:| +|Automatically track and visualize all your YOLOv5 training runs in the cloud with [Weights & Biases](https://wandb.ai/site?utm_campaign=repo_yolo_readme)|Label and export your custom datasets directly to YOLOv5 for training with [Roboflow](https://roboflow.com/?ref=ultralytics) | -# Image -img = 'https://ultralytics.com/images/zidane.jpg' -# Inference -results = model(img) -results.print() # or .show(), .save() -``` + -Run commands below to reproduce results on [COCO](https://github.com/ultralytics/yolov5/blob/master/data/scripts/get_coco.sh) dataset (dataset auto-downloads on first use). Training times for YOLOv5s/m/l/x are 2/4/6/8 days on a single V100 (multi-GPU times faster). Use the largest `--batch-size` your GPU allows (batch sizes shown for 16 GB devices). -```bash -$ python train.py --data coco.yaml --cfg yolov5s.yaml --weights '' --batch-size 64 - yolov5m 40 - yolov5l 24 - yolov5x 16 -``` - +##
Why YOLOv5
+

+
+ YOLOv5-P5 640 Figure (click to expand) -## Citation +

+
+
+ Figure Notes (click to expand) -[![DOI](https://zenodo.org/badge/264818686.svg)](https://zenodo.org/badge/latestdoi/264818686) +* **COCO AP val** denotes mAP@0.5:0.95 metric measured on the 5000-image [COCO val2017](http://cocodataset.org) dataset over various inference sizes from 256 to 1536. +* **GPU Speed** measures average inference time per image on [COCO val2017](http://cocodataset.org) dataset using a [AWS p3.2xlarge](https://aws.amazon.com/ec2/instance-types/p3/) V100 instance at batch-size 32. +* **EfficientDet** data from [google/automl](https://github.com/google/automl) at batch size 8. +* **Reproduce** by `python val.py --task study --data coco.yaml --iou 0.7 --weights yolov5n6.pt yolov5s6.pt yolov5m6.pt yolov5l6.pt yolov5x6.pt` +
+### Pretrained Checkpoints -## About Us +[assets]: https://github.com/ultralytics/yolov5/releases -Ultralytics is a U.S.-based particle physics and AI startup with over 6 years of expertise supporting government, academic and business clients. We offer a wide range of vision AI services, spanning from simple expert advice up to delivery of fully customized, end-to-end production solutions, including: -- **Cloud-based AI** systems operating on **hundreds of HD video streams in realtime.** -- **Edge AI** integrated into custom iOS and Android apps for realtime **30 FPS video inference.** -- **Custom data training**, hyperparameter evolution, and model exportation to any destination. +[TTA]: https://github.com/ultralytics/yolov5/issues/303 + +|Model |size
(pixels) |mAPval
0.5:0.95 |mAPval
0.5 |Speed
CPU b1
(ms) |Speed
V100 b1
(ms) |Speed
V100 b32
(ms) |params
(M) |FLOPs
@640 (B) +|--- |--- |--- |--- |--- |--- |--- |--- |--- +|[YOLOv5n][assets] |640 |28.0 |45.7 |**45** |**6.3**|**0.6**|**1.9**|**4.5** +|[YOLOv5s][assets] |640 |37.4 |56.8 |98 |6.4 |0.9 |7.2 |16.5 +|[YOLOv5m][assets] |640 |45.4 |64.1 |224 |8.2 |1.7 |21.2 |49.0 +|[YOLOv5l][assets] |640 |49.0 |67.3 |430 |10.1 |2.7 |46.5 |109.1 +|[YOLOv5x][assets] |640 |50.7 |68.9 |766 |12.1 |4.8 |86.7 |205.7 +| | | | | | | | | +|[YOLOv5n6][assets] |1280 |36.0 |54.4 |153 |8.1 |2.1 |3.2 |4.6 +|[YOLOv5s6][assets] |1280 |44.8 |63.7 |385 |8.2 |3.6 |12.6 |16.8 +|[YOLOv5m6][assets] |1280 |51.3 |69.3 |887 |11.1 |6.8 |35.7 |50.0 +|[YOLOv5l6][assets] |1280 |53.7 |71.3 |1784 |15.8 |10.5 |76.8 |111.4 +|[YOLOv5x6][assets]
+ [TTA][TTA]|1280
1536 |55.0
**55.8** |72.7
**72.7** |3136
- |26.2
- |19.4
- |140.7
- |209.8
- -For business inquiries and professional support requests please visit us at https://www.ultralytics.com. +
+ Table Notes (click to expand) +* All checkpoints are trained to 300 epochs with default settings. Nano and Small models use [hyp.scratch-low.yaml](https://github.com/ultralytics/yolov5/blob/master/data/hyps/hyp.scratch-low.yaml) hyps, all others use [hyp.scratch-high.yaml](https://github.com/ultralytics/yolov5/blob/master/data/hyps/hyp.scratch-high.yaml). +* **mAPval** values are for single-model single-scale on [COCO val2017](http://cocodataset.org) dataset.
Reproduce by `python val.py --data coco.yaml --img 640 --conf 0.001 --iou 0.65` +* **Speed** averaged over COCO val images using a [AWS p3.2xlarge](https://aws.amazon.com/ec2/instance-types/p3/) instance. NMS times (~1 ms/img) not included.
Reproduce by `python val.py --data coco.yaml --img 640 --task speed --batch 1` +* **TTA** [Test Time Augmentation](https://github.com/ultralytics/yolov5/issues/303) includes reflection and scale augmentations.
Reproduce by `python val.py --data coco.yaml --img 1536 --iou 0.7 --augment` -## Contact +
-**Issues should be raised directly in the repository.** For business inquiries or professional support requests please visit https://www.ultralytics.com or email Glenn Jocher at glenn.jocher@ultralytics.com. +##
Contribute
+ +We love your input! We want to make contributing to YOLOv5 as easy and transparent as possible. Please see our [Contributing Guide](CONTRIBUTING.md) to get started, and fill out the [YOLOv5 Survey](https://ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experiences. Thank you to all our contributors! + + + +##
Contact
+ +For YOLOv5 bugs and feature requests please visit [GitHub Issues](https://github.com/ultralytics/yolov5/issues). For business inquiries or +professional support requests please visit [https://ultralytics.com/contact](https://ultralytics.com/contact). + +
+ + diff --git a/data/Argoverse.yaml b/data/Argoverse.yaml new file mode 100644 index 000000000000..312791b33a2d --- /dev/null +++ b/data/Argoverse.yaml @@ -0,0 +1,67 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Argoverse-HD dataset (ring-front-center camera) http://www.cs.cmu.edu/~mengtial/proj/streaming/ by Argo AI +# Example usage: python train.py --data Argoverse.yaml +# parent +# β”œβ”€β”€ yolov5 +# └── datasets +# └── Argoverse ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/Argoverse # dataset root dir +train: Argoverse-1.1/images/train/ # train images (relative to 'path') 39384 images +val: Argoverse-1.1/images/val/ # val images (relative to 'path') 15062 images +test: Argoverse-1.1/images/test/ # test images (optional) https://eval.ai/web/challenges/challenge-page/800/overview + +# Classes +nc: 8 # number of classes +names: ['person', 'bicycle', 'car', 'motorcycle', 'bus', 'truck', 'traffic_light', 'stop_sign'] # class names + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + import json + + from tqdm import tqdm + from utils.general import download, Path + + + def argoverse2yolo(set): + labels = {} + a = json.load(open(set, "rb")) + for annot in tqdm(a['annotations'], desc=f"Converting {set} to YOLOv5 format..."): + img_id = annot['image_id'] + img_name = a['images'][img_id]['name'] + img_label_name = img_name[:-3] + "txt" + + cls = annot['category_id'] # instance class id + x_center, y_center, width, height = annot['bbox'] + x_center = (x_center + width / 2) / 1920.0 # offset and scale + y_center = (y_center + height / 2) / 1200.0 # offset and scale + width /= 1920.0 # scale + height /= 1200.0 # scale + + img_dir = set.parents[2] / 'Argoverse-1.1' / 'labels' / a['seq_dirs'][a['images'][annot['image_id']]['sid']] + if not img_dir.exists(): + img_dir.mkdir(parents=True, exist_ok=True) + + k = str(img_dir / img_label_name) + if k not in labels: + labels[k] = [] + labels[k].append(f"{cls} {x_center} {y_center} {width} {height}\n") + + for k in labels: + with open(k, "w") as f: + f.writelines(labels[k]) + + + # Download + dir = Path('../datasets/Argoverse') # dataset root dir + urls = ['https://argoverse-hd.s3.us-east-2.amazonaws.com/Argoverse-HD-Full.zip'] + download(urls, dir=dir, delete=False) + + # Convert + annotations_dir = 'Argoverse-HD/annotations/' + (dir / 'Argoverse-1.1' / 'tracking').rename(dir / 'Argoverse-1.1' / 'images') # rename 'tracking' to 'images' + for d in "train.json", "val.json": + argoverse2yolo(dir / annotations_dir / d) # convert VisDrone annotations to YOLO labels diff --git a/data/GlobalWheat2020.yaml b/data/GlobalWheat2020.yaml index f45182b43e25..c1ba289f2833 100644 --- a/data/GlobalWheat2020.yaml +++ b/data/GlobalWheat2020.yaml @@ -1,43 +1,42 @@ -# Global Wheat 2020 dataset http://www.global-wheat.com/ -# Train command: python train.py --data GlobalWheat2020.yaml -# Default dataset location is next to YOLOv5: -# /parent_folder -# /datasets/GlobalWheat2020 -# /yolov5 - - -# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] -train: # 3422 images - - ../datasets/GlobalWheat2020/images/arvalis_1 - - ../datasets/GlobalWheat2020/images/arvalis_2 - - ../datasets/GlobalWheat2020/images/arvalis_3 - - ../datasets/GlobalWheat2020/images/ethz_1 - - ../datasets/GlobalWheat2020/images/rres_1 - - ../datasets/GlobalWheat2020/images/inrae_1 - - ../datasets/GlobalWheat2020/images/usask_1 - -val: # 748 images (WARNING: train set contains ethz_1) - - ../datasets/GlobalWheat2020/images/ethz_1 - -test: # 1276 images - - ../datasets/GlobalWheat2020/images/utokyo_1 - - ../datasets/GlobalWheat2020/images/utokyo_2 - - ../datasets/GlobalWheat2020/images/nau_1 - - ../datasets/GlobalWheat2020/images/uq_1 - -# number of classes -nc: 1 - -# class names -names: [ 'wheat_head' ] - - -# download command/URL (optional) -------------------------------------------------------------------------------------- +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Global Wheat 2020 dataset http://www.global-wheat.com/ by University of Saskatchewan +# Example usage: python train.py --data GlobalWheat2020.yaml +# parent +# β”œβ”€β”€ yolov5 +# └── datasets +# └── GlobalWheat2020 ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/GlobalWheat2020 # dataset root dir +train: # train images (relative to 'path') 3422 images + - images/arvalis_1 + - images/arvalis_2 + - images/arvalis_3 + - images/ethz_1 + - images/rres_1 + - images/inrae_1 + - images/usask_1 +val: # val images (relative to 'path') 748 images (WARNING: train set contains ethz_1) + - images/ethz_1 +test: # test images (optional) 1276 images + - images/utokyo_1 + - images/utokyo_2 + - images/nau_1 + - images/uq_1 + +# Classes +nc: 1 # number of classes +names: ['wheat_head'] # class names + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- download: | from utils.general import download, Path + # Download - dir = Path('../datasets/GlobalWheat2020') # dataset directory + dir = Path(yaml['path']) # dataset root dir urls = ['https://zenodo.org/record/4298502/files/global-wheat-codalab-official.zip', 'https://github.com/ultralytics/yolov5/releases/download/v1.0/GlobalWheat2020_labels.zip'] download(urls, dir=dir) diff --git a/data/Objects365.yaml b/data/Objects365.yaml new file mode 100644 index 000000000000..bd6e5d6e1144 --- /dev/null +++ b/data/Objects365.yaml @@ -0,0 +1,113 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Objects365 dataset https://www.objects365.org/ by Megvii +# Example usage: python train.py --data Objects365.yaml +# parent +# β”œβ”€β”€ yolov5 +# └── datasets +# └── Objects365 ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/Objects365 # dataset root dir +train: images/train # train images (relative to 'path') 1742289 images +val: images/val # val images (relative to 'path') 80000 images +test: # test images (optional) + +# Classes +nc: 365 # number of classes +names: ['Person', 'Sneakers', 'Chair', 'Other Shoes', 'Hat', 'Car', 'Lamp', 'Glasses', 'Bottle', 'Desk', 'Cup', + 'Street Lights', 'Cabinet/shelf', 'Handbag/Satchel', 'Bracelet', 'Plate', 'Picture/Frame', 'Helmet', 'Book', + 'Gloves', 'Storage box', 'Boat', 'Leather Shoes', 'Flower', 'Bench', 'Potted Plant', 'Bowl/Basin', 'Flag', + 'Pillow', 'Boots', 'Vase', 'Microphone', 'Necklace', 'Ring', 'SUV', 'Wine Glass', 'Belt', 'Monitor/TV', + 'Backpack', 'Umbrella', 'Traffic Light', 'Speaker', 'Watch', 'Tie', 'Trash bin Can', 'Slippers', 'Bicycle', + 'Stool', 'Barrel/bucket', 'Van', 'Couch', 'Sandals', 'Basket', 'Drum', 'Pen/Pencil', 'Bus', 'Wild Bird', + 'High Heels', 'Motorcycle', 'Guitar', 'Carpet', 'Cell Phone', 'Bread', 'Camera', 'Canned', 'Truck', + 'Traffic cone', 'Cymbal', 'Lifesaver', 'Towel', 'Stuffed Toy', 'Candle', 'Sailboat', 'Laptop', 'Awning', + 'Bed', 'Faucet', 'Tent', 'Horse', 'Mirror', 'Power outlet', 'Sink', 'Apple', 'Air Conditioner', 'Knife', + 'Hockey Stick', 'Paddle', 'Pickup Truck', 'Fork', 'Traffic Sign', 'Balloon', 'Tripod', 'Dog', 'Spoon', 'Clock', + 'Pot', 'Cow', 'Cake', 'Dinning Table', 'Sheep', 'Hanger', 'Blackboard/Whiteboard', 'Napkin', 'Other Fish', + 'Orange/Tangerine', 'Toiletry', 'Keyboard', 'Tomato', 'Lantern', 'Machinery Vehicle', 'Fan', + 'Green Vegetables', 'Banana', 'Baseball Glove', 'Airplane', 'Mouse', 'Train', 'Pumpkin', 'Soccer', 'Skiboard', + 'Luggage', 'Nightstand', 'Tea pot', 'Telephone', 'Trolley', 'Head Phone', 'Sports Car', 'Stop Sign', + 'Dessert', 'Scooter', 'Stroller', 'Crane', 'Remote', 'Refrigerator', 'Oven', 'Lemon', 'Duck', 'Baseball Bat', + 'Surveillance Camera', 'Cat', 'Jug', 'Broccoli', 'Piano', 'Pizza', 'Elephant', 'Skateboard', 'Surfboard', + 'Gun', 'Skating and Skiing shoes', 'Gas stove', 'Donut', 'Bow Tie', 'Carrot', 'Toilet', 'Kite', 'Strawberry', + 'Other Balls', 'Shovel', 'Pepper', 'Computer Box', 'Toilet Paper', 'Cleaning Products', 'Chopsticks', + 'Microwave', 'Pigeon', 'Baseball', 'Cutting/chopping Board', 'Coffee Table', 'Side Table', 'Scissors', + 'Marker', 'Pie', 'Ladder', 'Snowboard', 'Cookies', 'Radiator', 'Fire Hydrant', 'Basketball', 'Zebra', 'Grape', + 'Giraffe', 'Potato', 'Sausage', 'Tricycle', 'Violin', 'Egg', 'Fire Extinguisher', 'Candy', 'Fire Truck', + 'Billiards', 'Converter', 'Bathtub', 'Wheelchair', 'Golf Club', 'Briefcase', 'Cucumber', 'Cigar/Cigarette', + 'Paint Brush', 'Pear', 'Heavy Truck', 'Hamburger', 'Extractor', 'Extension Cord', 'Tong', 'Tennis Racket', + 'Folder', 'American Football', 'earphone', 'Mask', 'Kettle', 'Tennis', 'Ship', 'Swing', 'Coffee Machine', + 'Slide', 'Carriage', 'Onion', 'Green beans', 'Projector', 'Frisbee', 'Washing Machine/Drying Machine', + 'Chicken', 'Printer', 'Watermelon', 'Saxophone', 'Tissue', 'Toothbrush', 'Ice cream', 'Hot-air balloon', + 'Cello', 'French Fries', 'Scale', 'Trophy', 'Cabbage', 'Hot dog', 'Blender', 'Peach', 'Rice', 'Wallet/Purse', + 'Volleyball', 'Deer', 'Goose', 'Tape', 'Tablet', 'Cosmetics', 'Trumpet', 'Pineapple', 'Golf Ball', + 'Ambulance', 'Parking meter', 'Mango', 'Key', 'Hurdle', 'Fishing Rod', 'Medal', 'Flute', 'Brush', 'Penguin', + 'Megaphone', 'Corn', 'Lettuce', 'Garlic', 'Swan', 'Helicopter', 'Green Onion', 'Sandwich', 'Nuts', + 'Speed Limit Sign', 'Induction Cooker', 'Broom', 'Trombone', 'Plum', 'Rickshaw', 'Goldfish', 'Kiwi fruit', + 'Router/modem', 'Poker Card', 'Toaster', 'Shrimp', 'Sushi', 'Cheese', 'Notepaper', 'Cherry', 'Pliers', 'CD', + 'Pasta', 'Hammer', 'Cue', 'Avocado', 'Hamimelon', 'Flask', 'Mushroom', 'Screwdriver', 'Soap', 'Recorder', + 'Bear', 'Eggplant', 'Board Eraser', 'Coconut', 'Tape Measure/Ruler', 'Pig', 'Showerhead', 'Globe', 'Chips', + 'Steak', 'Crosswalk Sign', 'Stapler', 'Camel', 'Formula 1', 'Pomegranate', 'Dishwasher', 'Crab', + 'Hoverboard', 'Meat ball', 'Rice Cooker', 'Tuba', 'Calculator', 'Papaya', 'Antelope', 'Parrot', 'Seal', + 'Butterfly', 'Dumbbell', 'Donkey', 'Lion', 'Urinal', 'Dolphin', 'Electric Drill', 'Hair Dryer', 'Egg tart', + 'Jellyfish', 'Treadmill', 'Lighter', 'Grapefruit', 'Game board', 'Mop', 'Radish', 'Baozi', 'Target', 'French', + 'Spring Rolls', 'Monkey', 'Rabbit', 'Pencil Case', 'Yak', 'Red Cabbage', 'Binoculars', 'Asparagus', 'Barbell', + 'Scallop', 'Noddles', 'Comb', 'Dumpling', 'Oyster', 'Table Tennis paddle', 'Cosmetics Brush/Eyeliner Pencil', + 'Chainsaw', 'Eraser', 'Lobster', 'Durian', 'Okra', 'Lipstick', 'Cosmetics Mirror', 'Curling', 'Table Tennis'] + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + from pycocotools.coco import COCO + from tqdm import tqdm + + from utils.general import Path, download, np, xyxy2xywhn + + + # Make Directories + dir = Path(yaml['path']) # dataset root dir + for p in 'images', 'labels': + (dir / p).mkdir(parents=True, exist_ok=True) + for q in 'train', 'val': + (dir / p / q).mkdir(parents=True, exist_ok=True) + + # Train, Val Splits + for split, patches in [('train', 50 + 1), ('val', 43 + 1)]: + print(f"Processing {split} in {patches} patches ...") + images, labels = dir / 'images' / split, dir / 'labels' / split + + # Download + url = f"https://dorc.ks3-cn-beijing.ksyun.com/data-set/2020Objects365%E6%95%B0%E6%8D%AE%E9%9B%86/{split}/" + if split == 'train': + download([f'{url}zhiyuan_objv2_{split}.tar.gz'], dir=dir, delete=False) # annotations json + download([f'{url}patch{i}.tar.gz' for i in range(patches)], dir=images, curl=True, delete=False, threads=8) + elif split == 'val': + download([f'{url}zhiyuan_objv2_{split}.json'], dir=dir, delete=False) # annotations json + download([f'{url}images/v1/patch{i}.tar.gz' for i in range(15 + 1)], dir=images, curl=True, delete=False, threads=8) + download([f'{url}images/v2/patch{i}.tar.gz' for i in range(16, patches)], dir=images, curl=True, delete=False, threads=8) + + # Move + for f in tqdm(images.rglob('*.jpg'), desc=f'Moving {split} images'): + f.rename(images / f.name) # move to /images/{split} + + # Labels + coco = COCO(dir / f'zhiyuan_objv2_{split}.json') + names = [x["name"] for x in coco.loadCats(coco.getCatIds())] + for cid, cat in enumerate(names): + catIds = coco.getCatIds(catNms=[cat]) + imgIds = coco.getImgIds(catIds=catIds) + for im in tqdm(coco.loadImgs(imgIds), desc=f'Class {cid + 1}/{len(names)} {cat}'): + width, height = im["width"], im["height"] + path = Path(im["file_name"]) # image filename + try: + with open(labels / path.with_suffix('.txt').name, 'a') as file: + annIds = coco.getAnnIds(imgIds=im["id"], catIds=catIds, iscrowd=None) + for a in coco.loadAnns(annIds): + x, y, w, h = a['bbox'] # bounding box in xywh (xy top-left corner) + xyxy = np.array([x, y, x + w, y + h])[None] # pixels(1,4) + x, y, w, h = xyxy2xywhn(xyxy, w=width, h=height, clip=True)[0] # normalized and clipped + file.write(f"{cid} {x:.5f} {y:.5f} {w:.5f} {h:.5f}\n") + except Exception as e: + print(e) diff --git a/data/SKU-110K.yaml b/data/SKU-110K.yaml index a8c1f25b385a..46459eab6bb7 100644 --- a/data/SKU-110K.yaml +++ b/data/SKU-110K.yaml @@ -1,39 +1,40 @@ -# SKU-110K retail items dataset https://github.com/eg4000/SKU110K_CVPR19 -# Train command: python train.py --data SKU-110K.yaml -# Default dataset location is next to YOLOv5: -# /parent_folder -# /datasets/SKU-110K -# /yolov5 +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# SKU-110K retail items dataset https://github.com/eg4000/SKU110K_CVPR19 by Trax Retail +# Example usage: python train.py --data SKU-110K.yaml +# parent +# β”œβ”€β”€ yolov5 +# └── datasets +# └── SKU-110K ← downloads here -# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] -train: ../datasets/SKU-110K/train.txt # 8219 images -val: ../datasets/SKU-110K/val.txt # 588 images -test: ../datasets/SKU-110K/test.txt # 2936 images +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/SKU-110K # dataset root dir +train: train.txt # train images (relative to 'path') 8219 images +val: val.txt # val images (relative to 'path') 588 images +test: test.txt # test images (optional) 2936 images -# number of classes -nc: 1 +# Classes +nc: 1 # number of classes +names: ['object'] # class names -# class names -names: [ 'object' ] - -# download command/URL (optional) -------------------------------------------------------------------------------------- +# Download script/URL (optional) --------------------------------------------------------------------------------------- download: | import shutil from tqdm import tqdm from utils.general import np, pd, Path, download, xyxy2xywh + # Download - datasets = Path('../datasets') # download directory + dir = Path(yaml['path']) # dataset root dir + parent = Path(dir.parent) # download dir urls = ['http://trax-geometry.s3.amazonaws.com/cvpr_challenge/SKU110K_fixed.tar.gz'] - download(urls, dir=datasets, delete=False) + download(urls, dir=parent, delete=False) # Rename directories - dir = (datasets / 'SKU-110K') if dir.exists(): shutil.rmtree(dir) - (datasets / 'SKU110K_fixed').rename(dir) # rename dir + (parent / 'SKU110K_fixed').rename(dir) # rename dir (dir / 'labels').mkdir(parents=True, exist_ok=True) # create labels dir # Convert labels diff --git a/data/VOC.yaml b/data/VOC.yaml new file mode 100644 index 000000000000..be04fb1e2ecb --- /dev/null +++ b/data/VOC.yaml @@ -0,0 +1,80 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# PASCAL VOC dataset http://host.robots.ox.ac.uk/pascal/VOC by University of Oxford +# Example usage: python train.py --data VOC.yaml +# parent +# β”œβ”€β”€ yolov5 +# └── datasets +# └── VOC ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/VOC +train: # train images (relative to 'path') 16551 images + - images/train2012 + - images/train2007 + - images/val2012 + - images/val2007 +val: # val images (relative to 'path') 4952 images + - images/test2007 +test: # test images (optional) + - images/test2007 + +# Classes +nc: 20 # number of classes +names: ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', + 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor'] # class names + + +# Download script/URL (optional) --------------------------------------------------------------------------------------- +download: | + import xml.etree.ElementTree as ET + + from tqdm import tqdm + from utils.general import download, Path + + + def convert_label(path, lb_path, year, image_id): + def convert_box(size, box): + dw, dh = 1. / size[0], 1. / size[1] + x, y, w, h = (box[0] + box[1]) / 2.0 - 1, (box[2] + box[3]) / 2.0 - 1, box[1] - box[0], box[3] - box[2] + return x * dw, y * dh, w * dw, h * dh + + in_file = open(path / f'VOC{year}/Annotations/{image_id}.xml') + out_file = open(lb_path, 'w') + tree = ET.parse(in_file) + root = tree.getroot() + size = root.find('size') + w = int(size.find('width').text) + h = int(size.find('height').text) + + for obj in root.iter('object'): + cls = obj.find('name').text + if cls in yaml['names'] and not int(obj.find('difficult').text) == 1: + xmlbox = obj.find('bndbox') + bb = convert_box((w, h), [float(xmlbox.find(x).text) for x in ('xmin', 'xmax', 'ymin', 'ymax')]) + cls_id = yaml['names'].index(cls) # class id + out_file.write(" ".join([str(a) for a in (cls_id, *bb)]) + '\n') + + + # Download + dir = Path(yaml['path']) # dataset root dir + url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/' + urls = [url + 'VOCtrainval_06-Nov-2007.zip', # 446MB, 5012 images + url + 'VOCtest_06-Nov-2007.zip', # 438MB, 4953 images + url + 'VOCtrainval_11-May-2012.zip'] # 1.95GB, 17126 images + download(urls, dir=dir / 'images', delete=False, threads=3) + + # Convert + path = dir / f'images/VOCdevkit' + for year, image_set in ('2012', 'train'), ('2012', 'val'), ('2007', 'train'), ('2007', 'val'), ('2007', 'test'): + imgs_path = dir / 'images' / f'{image_set}{year}' + lbs_path = dir / 'labels' / f'{image_set}{year}' + imgs_path.mkdir(exist_ok=True, parents=True) + lbs_path.mkdir(exist_ok=True, parents=True) + + image_ids = open(path / f'VOC{year}/ImageSets/Main/{image_set}.txt').read().strip().split() + for id in tqdm(image_ids, desc=f'{image_set}{year}'): + f = path / f'VOC{year}/JPEGImages/{id}.jpg' # old img path + lb_path = (lbs_path / f.name).with_suffix('.txt') # new label path + f.rename(imgs_path / f.name) # move image + convert_label(path, lb_path, year, id) # convert labels to YOLO format diff --git a/data/VisDrone.yaml b/data/VisDrone.yaml index c4603b200132..2a3b2f03e674 100644 --- a/data/VisDrone.yaml +++ b/data/VisDrone.yaml @@ -1,24 +1,24 @@ -# VisDrone2019-DET dataset https://github.com/VisDrone/VisDrone-Dataset -# Train command: python train.py --data VisDrone.yaml -# Default dataset location is next to YOLOv5: -# /parent_folder -# /VisDrone -# /yolov5 +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# VisDrone2019-DET dataset https://github.com/VisDrone/VisDrone-Dataset by Tianjin University +# Example usage: python train.py --data VisDrone.yaml +# parent +# β”œβ”€β”€ yolov5 +# └── datasets +# └── VisDrone ← downloads here -# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] -train: ../VisDrone/VisDrone2019-DET-train/images # 6471 images -val: ../VisDrone/VisDrone2019-DET-val/images # 548 images -test: ../VisDrone/VisDrone2019-DET-test-dev/images # 1610 images +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/VisDrone # dataset root dir +train: VisDrone2019-DET-train/images # train images (relative to 'path') 6471 images +val: VisDrone2019-DET-val/images # val images (relative to 'path') 548 images +test: VisDrone2019-DET-test-dev/images # test images (optional) 1610 images -# number of classes -nc: 10 +# Classes +nc: 10 # number of classes +names: ['pedestrian', 'people', 'bicycle', 'car', 'van', 'truck', 'tricycle', 'awning-tricycle', 'bus', 'motor'] -# class names -names: [ 'pedestrian', 'people', 'bicycle', 'car', 'van', 'truck', 'tricycle', 'awning-tricycle', 'bus', 'motor' ] - -# download command/URL (optional) -------------------------------------------------------------------------------------- +# Download script/URL (optional) --------------------------------------------------------------------------------------- download: | from utils.general import download, os, Path @@ -49,12 +49,12 @@ download: | # Download - dir = Path('../VisDrone') # dataset directory + dir = Path(yaml['path']) # dataset root dir urls = ['https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-train.zip', 'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-val.zip', 'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-test-dev.zip', 'https://github.com/ultralytics/yolov5/releases/download/v1.0/VisDrone2019-DET-test-challenge.zip'] - download(urls, dir=dir) + download(urls, dir=dir, threads=4) # Convert for d in 'VisDrone2019-DET-train', 'VisDrone2019-DET-val', 'VisDrone2019-DET-test-dev': diff --git a/data/argoverse_hd.yaml b/data/argoverse_hd.yaml deleted file mode 100644 index 0ba314d82ce1..000000000000 --- a/data/argoverse_hd.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# Argoverse-HD dataset (ring-front-center camera) http://www.cs.cmu.edu/~mengtial/proj/streaming/ -# Train command: python train.py --data argoverse_hd.yaml -# Default dataset location is next to YOLOv5: -# /parent_folder -# /argoverse -# /yolov5 - - -# download command/URL (optional) -download: bash data/scripts/get_argoverse_hd.sh - -# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] -train: ../argoverse/Argoverse-1.1/images/train/ # 39384 images -val: ../argoverse/Argoverse-1.1/images/val/ # 15062 iamges -test: ../argoverse/Argoverse-1.1/images/test/ # Submit to: https://eval.ai/web/challenges/challenge-page/800/overview - -# number of classes -nc: 8 - -# class names -names: [ 'person', 'bicycle', 'car', 'motorcycle', 'bus', 'truck', 'traffic_light', 'stop_sign' ] diff --git a/data/coco.yaml b/data/coco.yaml index f818a49ff0fa..7494fc2f9cd1 100644 --- a/data/coco.yaml +++ b/data/coco.yaml @@ -1,35 +1,45 @@ -# COCO 2017 dataset http://cocodataset.org -# Train command: python train.py --data coco.yaml -# Default dataset location is next to YOLOv5: -# /parent_folder -# /coco -# /yolov5 - - -# download command/URL (optional) -download: bash data/scripts/get_coco.sh - -# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] -train: ../coco/train2017.txt # 118287 images -val: ../coco/val2017.txt # 5000 images -test: ../coco/test-dev2017.txt # 20288 of 40670 images, submit to https://competitions.codalab.org/competitions/20794 - -# number of classes -nc: 80 - -# class names -names: [ 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', - 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', - 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', - 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', - 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', - 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', - 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', - 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', - 'hair drier', 'toothbrush' ] - -# Print classes -# with open('data/coco.yaml') as f: -# d = yaml.safe_load(f) # dict -# for i, x in enumerate(d['names']): -# print(i, x) +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# COCO 2017 dataset http://cocodataset.org by Microsoft +# Example usage: python train.py --data coco.yaml +# parent +# β”œβ”€β”€ yolov5 +# └── datasets +# └── coco ← downloads here + + +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/coco # dataset root dir +train: train2017.txt # train images (relative to 'path') 118287 images +val: val2017.txt # val images (relative to 'path') 5000 images +test: test-dev2017.txt # 20288 of 40670 images, submit to https://competitions.codalab.org/competitions/20794 + +# Classes +nc: 80 # number of classes +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] # class names + + +# Download script/URL (optional) +download: | + from utils.general import download, Path + + + # Download labels + segments = False # segment or box labels + dir = Path(yaml['path']) # dataset root dir + url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/' + urls = [url + ('coco2017labels-segments.zip' if segments else 'coco2017labels.zip')] # labels + download(urls, dir=dir.parent) + + # Download data + urls = ['http://images.cocodataset.org/zips/train2017.zip', # 19G, 118k images + 'http://images.cocodataset.org/zips/val2017.zip', # 1G, 5k images + 'http://images.cocodataset.org/zips/test2017.zip'] # 7G, 41k images (optional) + download(urls, dir=dir / 'images', threads=3) diff --git a/data/coco128.yaml b/data/coco128.yaml index 83fbc29d3404..d07c704407a1 100644 --- a/data/coco128.yaml +++ b/data/coco128.yaml @@ -1,28 +1,30 @@ -# COCO 2017 dataset http://cocodataset.org - first 128 training images -# Train command: python train.py --data coco128.yaml -# Default dataset location is next to YOLOv5: -# /parent_folder -# /coco128 -# /yolov5 +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# COCO128 dataset https://www.kaggle.com/ultralytics/coco128 (first 128 images from COCO train2017) by Ultralytics +# Example usage: python train.py --data coco128.yaml +# parent +# β”œβ”€β”€ yolov5 +# └── datasets +# └── coco128 ← downloads here -# download command/URL (optional) -download: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128.zip +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/coco128 # dataset root dir +train: images/train2017 # train images (relative to 'path') 128 images +val: images/train2017 # val images (relative to 'path') 128 images +test: # test images (optional) -# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] -train: ../coco128/images/train2017/ # 128 images -val: ../coco128/images/train2017/ # 128 images +# Classes +nc: 80 # number of classes +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] # class names -# number of classes -nc: 80 -# class names -names: [ 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', - 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', - 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', - 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', - 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', - 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', - 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', - 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', - 'hair drier', 'toothbrush' ] +# Download script/URL (optional) +download: https://ultralytics.com/assets/coco128.zip diff --git a/data/hyp.finetune_objects365.yaml b/data/hyps/hyp.Objects365.yaml similarity index 58% rename from data/hyp.finetune_objects365.yaml rename to data/hyps/hyp.Objects365.yaml index 2b104ef2d9bf..74971740f7c7 100644 --- a/data/hyp.finetune_objects365.yaml +++ b/data/hyps/hyp.Objects365.yaml @@ -1,3 +1,8 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Hyperparameters for Objects365 training +# python train.py --weights yolov5m.pt --data Objects365.yaml --evolve +# See Hyperparameter Evolution tutorial for details https://github.com/ultralytics/yolov5#tutorials + lr0: 0.00258 lrf: 0.17 momentum: 0.779 @@ -26,3 +31,4 @@ flipud: 0.0 fliplr: 0.5 mosaic: 1.0 mixup: 0.0 +copy_paste: 0.0 diff --git a/data/hyps/hyp.VOC.yaml b/data/hyps/hyp.VOC.yaml new file mode 100644 index 000000000000..0aa4e7d9f8f5 --- /dev/null +++ b/data/hyps/hyp.VOC.yaml @@ -0,0 +1,40 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Hyperparameters for VOC training +# python train.py --batch 128 --weights yolov5m6.pt --data VOC.yaml --epochs 50 --img 512 --hyp hyp.scratch-med.yaml --evolve +# See Hyperparameter Evolution tutorial for details https://github.com/ultralytics/yolov5#tutorials + +# YOLOv5 Hyperparameter Evolution Results +# Best generation: 467 +# Last generation: 996 +# metrics/precision, metrics/recall, metrics/mAP_0.5, metrics/mAP_0.5:0.95, val/box_loss, val/obj_loss, val/cls_loss +# 0.87729, 0.85125, 0.91286, 0.72664, 0.0076739, 0.0042529, 0.0013865 + +lr0: 0.00334 +lrf: 0.15135 +momentum: 0.74832 +weight_decay: 0.00025 +warmup_epochs: 3.3835 +warmup_momentum: 0.59462 +warmup_bias_lr: 0.18657 +box: 0.02 +cls: 0.21638 +cls_pw: 0.5 +obj: 0.51728 +obj_pw: 0.67198 +iou_t: 0.2 +anchor_t: 3.3744 +fl_gamma: 0.0 +hsv_h: 0.01041 +hsv_s: 0.54703 +hsv_v: 0.27739 +degrees: 0.0 +translate: 0.04591 +scale: 0.75544 +shear: 0.0 +perspective: 0.0 +flipud: 0.0 +fliplr: 0.5 +mosaic: 0.85834 +mixup: 0.04266 +copy_paste: 0.0 +anchors: 3.412 diff --git a/data/hyp.finetune.yaml b/data/hyps/hyp.finetune.yaml similarity index 98% rename from data/hyp.finetune.yaml rename to data/hyps/hyp.finetune.yaml index 1b84cff95c2c..3aa1923f78a6 100644 --- a/data/hyp.finetune.yaml +++ b/data/hyps/hyp.finetune.yaml @@ -36,3 +36,4 @@ flipud: 0.00856 fliplr: 0.5 mosaic: 1.0 mixup: 0.243 +copy_paste: 0.0 diff --git a/data/hyps/hyp.scratch-high.yaml b/data/hyps/hyp.scratch-high.yaml new file mode 100644 index 000000000000..123cc8407413 --- /dev/null +++ b/data/hyps/hyp.scratch-high.yaml @@ -0,0 +1,34 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Hyperparameters for high-augmentation COCO training from scratch +# python train.py --batch 32 --cfg yolov5m6.yaml --weights '' --data coco.yaml --img 1280 --epochs 300 +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.1 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.3 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 0.7 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.9 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.1 # image mixup (probability) +copy_paste: 0.1 # segment copy-paste (probability) diff --git a/data/hyps/hyp.scratch-low.yaml b/data/hyps/hyp.scratch-low.yaml new file mode 100644 index 000000000000..b9ef1d55a3b6 --- /dev/null +++ b/data/hyps/hyp.scratch-low.yaml @@ -0,0 +1,34 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Hyperparameters for low-augmentation COCO training from scratch +# python train.py --batch 64 --cfg yolov5n6.yaml --weights '' --data coco.yaml --img 640 --epochs 300 --linear +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.01 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.5 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 1.0 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.5 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.0 # image mixup (probability) +copy_paste: 0.0 # segment copy-paste (probability) diff --git a/data/hyps/hyp.scratch-med.yaml b/data/hyps/hyp.scratch-med.yaml new file mode 100644 index 000000000000..d6867d7557ba --- /dev/null +++ b/data/hyps/hyp.scratch-med.yaml @@ -0,0 +1,34 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Hyperparameters for medium-augmentation COCO training from scratch +# python train.py --batch 32 --cfg yolov5m6.yaml --weights '' --data coco.yaml --img 1280 --epochs 300 +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.1 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.3 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 0.7 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.9 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.1 # image mixup (probability) +copy_paste: 0.0 # segment copy-paste (probability) diff --git a/data/hyp.scratch.yaml b/data/hyps/hyp.scratch.yaml similarity index 98% rename from data/hyp.scratch.yaml rename to data/hyps/hyp.scratch.yaml index 44f26b6658ae..e10b9893dd50 100644 --- a/data/hyp.scratch.yaml +++ b/data/hyps/hyp.scratch.yaml @@ -31,3 +31,4 @@ flipud: 0.0 # image flip up-down (probability) fliplr: 0.5 # image flip left-right (probability) mosaic: 1.0 # image mosaic (probability) mixup: 0.0 # image mixup (probability) +copy_paste: 0.0 diff --git a/data/objects365.yaml b/data/objects365.yaml deleted file mode 100644 index eb99995903cf..000000000000 --- a/data/objects365.yaml +++ /dev/null @@ -1,102 +0,0 @@ -# Objects365 dataset https://www.objects365.org/ -# Train command: python train.py --data objects365.yaml -# Default dataset location is next to YOLOv5: -# /parent_folder -# /datasets/objects365 -# /yolov5 - -# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] -train: ../datasets/objects365/images/train # 1742289 images -val: ../datasets/objects365/images/val # 5570 images - -# number of classes -nc: 365 - -# class names -names: [ 'Person', 'Sneakers', 'Chair', 'Other Shoes', 'Hat', 'Car', 'Lamp', 'Glasses', 'Bottle', 'Desk', 'Cup', - 'Street Lights', 'Cabinet/shelf', 'Handbag/Satchel', 'Bracelet', 'Plate', 'Picture/Frame', 'Helmet', 'Book', - 'Gloves', 'Storage box', 'Boat', 'Leather Shoes', 'Flower', 'Bench', 'Potted Plant', 'Bowl/Basin', 'Flag', - 'Pillow', 'Boots', 'Vase', 'Microphone', 'Necklace', 'Ring', 'SUV', 'Wine Glass', 'Belt', 'Monitor/TV', - 'Backpack', 'Umbrella', 'Traffic Light', 'Speaker', 'Watch', 'Tie', 'Trash bin Can', 'Slippers', 'Bicycle', - 'Stool', 'Barrel/bucket', 'Van', 'Couch', 'Sandals', 'Basket', 'Drum', 'Pen/Pencil', 'Bus', 'Wild Bird', - 'High Heels', 'Motorcycle', 'Guitar', 'Carpet', 'Cell Phone', 'Bread', 'Camera', 'Canned', 'Truck', - 'Traffic cone', 'Cymbal', 'Lifesaver', 'Towel', 'Stuffed Toy', 'Candle', 'Sailboat', 'Laptop', 'Awning', - 'Bed', 'Faucet', 'Tent', 'Horse', 'Mirror', 'Power outlet', 'Sink', 'Apple', 'Air Conditioner', 'Knife', - 'Hockey Stick', 'Paddle', 'Pickup Truck', 'Fork', 'Traffic Sign', 'Balloon', 'Tripod', 'Dog', 'Spoon', 'Clock', - 'Pot', 'Cow', 'Cake', 'Dinning Table', 'Sheep', 'Hanger', 'Blackboard/Whiteboard', 'Napkin', 'Other Fish', - 'Orange/Tangerine', 'Toiletry', 'Keyboard', 'Tomato', 'Lantern', 'Machinery Vehicle', 'Fan', - 'Green Vegetables', 'Banana', 'Baseball Glove', 'Airplane', 'Mouse', 'Train', 'Pumpkin', 'Soccer', 'Skiboard', - 'Luggage', 'Nightstand', 'Tea pot', 'Telephone', 'Trolley', 'Head Phone', 'Sports Car', 'Stop Sign', - 'Dessert', 'Scooter', 'Stroller', 'Crane', 'Remote', 'Refrigerator', 'Oven', 'Lemon', 'Duck', 'Baseball Bat', - 'Surveillance Camera', 'Cat', 'Jug', 'Broccoli', 'Piano', 'Pizza', 'Elephant', 'Skateboard', 'Surfboard', - 'Gun', 'Skating and Skiing shoes', 'Gas stove', 'Donut', 'Bow Tie', 'Carrot', 'Toilet', 'Kite', 'Strawberry', - 'Other Balls', 'Shovel', 'Pepper', 'Computer Box', 'Toilet Paper', 'Cleaning Products', 'Chopsticks', - 'Microwave', 'Pigeon', 'Baseball', 'Cutting/chopping Board', 'Coffee Table', 'Side Table', 'Scissors', - 'Marker', 'Pie', 'Ladder', 'Snowboard', 'Cookies', 'Radiator', 'Fire Hydrant', 'Basketball', 'Zebra', 'Grape', - 'Giraffe', 'Potato', 'Sausage', 'Tricycle', 'Violin', 'Egg', 'Fire Extinguisher', 'Candy', 'Fire Truck', - 'Billiards', 'Converter', 'Bathtub', 'Wheelchair', 'Golf Club', 'Briefcase', 'Cucumber', 'Cigar/Cigarette', - 'Paint Brush', 'Pear', 'Heavy Truck', 'Hamburger', 'Extractor', 'Extension Cord', 'Tong', 'Tennis Racket', - 'Folder', 'American Football', 'earphone', 'Mask', 'Kettle', 'Tennis', 'Ship', 'Swing', 'Coffee Machine', - 'Slide', 'Carriage', 'Onion', 'Green beans', 'Projector', 'Frisbee', 'Washing Machine/Drying Machine', - 'Chicken', 'Printer', 'Watermelon', 'Saxophone', 'Tissue', 'Toothbrush', 'Ice cream', 'Hot-air balloon', - 'Cello', 'French Fries', 'Scale', 'Trophy', 'Cabbage', 'Hot dog', 'Blender', 'Peach', 'Rice', 'Wallet/Purse', - 'Volleyball', 'Deer', 'Goose', 'Tape', 'Tablet', 'Cosmetics', 'Trumpet', 'Pineapple', 'Golf Ball', - 'Ambulance', 'Parking meter', 'Mango', 'Key', 'Hurdle', 'Fishing Rod', 'Medal', 'Flute', 'Brush', 'Penguin', - 'Megaphone', 'Corn', 'Lettuce', 'Garlic', 'Swan', 'Helicopter', 'Green Onion', 'Sandwich', 'Nuts', - 'Speed Limit Sign', 'Induction Cooker', 'Broom', 'Trombone', 'Plum', 'Rickshaw', 'Goldfish', 'Kiwi fruit', - 'Router/modem', 'Poker Card', 'Toaster', 'Shrimp', 'Sushi', 'Cheese', 'Notepaper', 'Cherry', 'Pliers', 'CD', - 'Pasta', 'Hammer', 'Cue', 'Avocado', 'Hamimelon', 'Flask', 'Mushroom', 'Screwdriver', 'Soap', 'Recorder', - 'Bear', 'Eggplant', 'Board Eraser', 'Coconut', 'Tape Measure/Ruler', 'Pig', 'Showerhead', 'Globe', 'Chips', - 'Steak', 'Crosswalk Sign', 'Stapler', 'Camel', 'Formula 1', 'Pomegranate', 'Dishwasher', 'Crab', - 'Hoverboard', 'Meat ball', 'Rice Cooker', 'Tuba', 'Calculator', 'Papaya', 'Antelope', 'Parrot', 'Seal', - 'Butterfly', 'Dumbbell', 'Donkey', 'Lion', 'Urinal', 'Dolphin', 'Electric Drill', 'Hair Dryer', 'Egg tart', - 'Jellyfish', 'Treadmill', 'Lighter', 'Grapefruit', 'Game board', 'Mop', 'Radish', 'Baozi', 'Target', 'French', - 'Spring Rolls', 'Monkey', 'Rabbit', 'Pencil Case', 'Yak', 'Red Cabbage', 'Binoculars', 'Asparagus', 'Barbell', - 'Scallop', 'Noddles', 'Comb', 'Dumpling', 'Oyster', 'Table Tennis paddle', 'Cosmetics Brush/Eyeliner Pencil', - 'Chainsaw', 'Eraser', 'Lobster', 'Durian', 'Okra', 'Lipstick', 'Cosmetics Mirror', 'Curling', 'Table Tennis' ] - - -# download command/URL (optional) -------------------------------------------------------------------------------------- -download: | - from pycocotools.coco import COCO - from tqdm import tqdm - - from utils.general import download, Path - - # Make Directories - dir = Path('../datasets/objects365') # dataset directory - for p in 'images', 'labels': - (dir / p).mkdir(parents=True, exist_ok=True) - for q in 'train', 'val': - (dir / p / q).mkdir(parents=True, exist_ok=True) - - # Download - url = "https://dorc.ks3-cn-beijing.ksyun.com/data-set/2020Objects365%E6%95%B0%E6%8D%AE%E9%9B%86/train/" - download([url + 'zhiyuan_objv2_train.tar.gz'], dir=dir, delete=False) # annotations json - download([url + f for f in [f'patch{i}.tar.gz' for i in range(51)]], dir=dir / 'images' / 'train', - curl=True, delete=False, threads=8) - - # Move - train = dir / 'images' / 'train' - for f in tqdm(train.rglob('*.jpg'), desc=f'Moving images'): - f.rename(train / f.name) # move to /images/train - - # Labels - coco = COCO(dir / 'zhiyuan_objv2_train.json') - names = [x["name"] for x in coco.loadCats(coco.getCatIds())] - for cid, cat in enumerate(names): - catIds = coco.getCatIds(catNms=[cat]) - imgIds = coco.getImgIds(catIds=catIds) - for im in tqdm(coco.loadImgs(imgIds), desc=f'Class {cid + 1}/{len(names)} {cat}'): - width, height = im["width"], im["height"] - path = Path(im["file_name"]) # image filename - try: - with open(dir / 'labels' / 'train' / path.with_suffix('.txt').name, 'a') as file: - annIds = coco.getAnnIds(imgIds=im["id"], catIds=catIds, iscrowd=None) - for a in coco.loadAnns(annIds): - x, y, w, h = a['bbox'] # bounding box in xywh (xy top-left corner) - x, y = x + w / 2, y + h / 2 # xy to center - file.write(f"{cid} {x / width:.5f} {y / height:.5f} {w / width:.5f} {h / height:.5f}\n") - - except Exception as e: - print(e) diff --git a/data/scripts/download_weights.sh b/data/scripts/download_weights.sh new file mode 100755 index 000000000000..e9fa65394178 --- /dev/null +++ b/data/scripts/download_weights.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Download latest models from https://github.com/ultralytics/yolov5/releases +# Example usage: bash path/to/download_weights.sh +# parent +# └── yolov5 +# β”œβ”€β”€ yolov5s.pt ← downloads here +# β”œβ”€β”€ yolov5m.pt +# └── ... + +python - <train.txt -cat 2007_train.txt 2007_val.txt 2007_test.txt 2012_train.txt 2012_val.txt >train.all.txt - -mkdir ../VOC ../VOC/images ../VOC/images/train ../VOC/images/val -mkdir ../VOC/labels ../VOC/labels/train ../VOC/labels/val - -python3 - "$@" <= cls >= 0, f'incorrect class index {cls}' + + # Write YOLO label + if id not in shapes: + shapes[id] = Image.open(file).size + box = xyxy2xywhn(box[None].astype(np.float), w=shapes[id][0], h=shapes[id][1], clip=True) + with open((labels / id).with_suffix('.txt'), 'a') as f: + f.write(f"{cls} {' '.join(f'{x:.6f}' for x in box[0])}\n") # write label.txt + except Exception as e: + print(f'WARNING: skipping one label for {file}: {e}') + + + # Download manually from https://challenge.xviewdataset.org + dir = Path(yaml['path']) # dataset root dir + # urls = ['https://d307kc0mrhucc3.cloudfront.net/train_labels.zip', # train labels + # 'https://d307kc0mrhucc3.cloudfront.net/train_images.zip', # 15G, 847 train images + # 'https://d307kc0mrhucc3.cloudfront.net/val_images.zip'] # 5G, 282 val images (no labels) + # download(urls, dir=dir, delete=False) + + # Convert labels + convert_labels(dir / 'xView_train.geojson') + + # Move images + images = Path(dir / 'images') + images.mkdir(parents=True, exist_ok=True) + Path(dir / 'train_images').rename(dir / 'images' / 'train') + Path(dir / 'val_images').rename(dir / 'images' / 'val') + + # Split + autosplit(dir / 'images' / 'train') diff --git a/detect.py b/detect.py index d932cca3e08e..559b3414f506 100644 --- a/detect.py +++ b/detect.py @@ -1,97 +1,155 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Run inference on images, videos, directories, streams, etc. + +Usage - sources: + $ python path/to/detect.py --weights yolov5s.pt --source 0 # webcam + img.jpg # image + vid.mp4 # video + path/ # directory + path/*.jpg # glob + 'https://youtu.be/Zgi9g1ksQHc' # YouTube + 'rtsp://example.com/media.mp4' # RTSP, RTMP, HTTP stream + +Usage - formats: + $ python path/to/detect.py --weights yolov5s.pt # PyTorch + yolov5s.torchscript # TorchScript + yolov5s.onnx # ONNX Runtime or OpenCV DNN with --dnn + yolov5s.xml # OpenVINO + yolov5s.engine # TensorRT + yolov5s.mlmodel # CoreML (MacOS-only) + yolov5s_saved_model # TensorFlow SavedModel + yolov5s.pb # TensorFlow GraphDef + yolov5s.tflite # TensorFlow Lite + yolov5s_edgetpu.tflite # TensorFlow Edge TPU +""" + import argparse -import time +import os +import sys from pathlib import Path import cv2 import torch import torch.backends.cudnn as cudnn -from models.export import load_checkpoint -from utils.datasets import LoadStreams, LoadImages -from utils.general import check_img_size, check_requirements, check_imshow, non_max_suppression, apply_classifier, \ - scale_coords, xyxy2xywh, strip_optimizer, set_logging, increment_path, save_one_box -from utils.plots import colors, plot_one_box -from utils.torch_utils import select_device, load_classifier, time_synchronized - - -def detect(opt): - source, weights, view_img, save_txt, imgsz = opt.source, opt.weights, opt.view_img, opt.save_txt, opt.img_size - save_img = not opt.nosave and not source.endswith('.txt') # save inference images - webcam = source.isnumeric() or source.endswith('.txt') or source.lower().startswith( - ('rtsp://', 'rtmp://', 'http://', 'https://')) +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative + +from models.common import DetectMultiBackend +from utils.datasets import IMG_FORMATS, VID_FORMATS, LoadImages, LoadStreams +from utils.general import (LOGGER, check_file, check_img_size, check_imshow, check_requirements, colorstr, + increment_path, non_max_suppression, print_args, scale_coords, strip_optimizer, xyxy2xywh) +from utils.plots import Annotator, colors, save_one_box +from utils.torch_utils import select_device, time_sync +from export import load_checkpoint + + +@torch.no_grad() +def run(weights=ROOT / 'yolov5s.pt', # model.pt path(s) + source=ROOT / 'data/images', # file/dir/URL/glob, 0 for webcam + data=ROOT / 'data/coco128.yaml', # dataset.yaml path + imgsz=(640, 640), # inference size (height, width) + conf_thres=0.25, # confidence threshold + iou_thres=0.45, # NMS IOU threshold + max_det=1000, # maximum detections per image + device='', # cuda device, i.e. 0 or 0,1,2,3 or cpu + view_img=False, # show results + save_txt=False, # save results to *.txt + save_conf=False, # save confidences in --save-txt labels + save_crop=False, # save cropped prediction boxes + nosave=False, # do not save images/videos + classes=None, # filter by class: --class 0, or --class 0 2 3 + agnostic_nms=False, # class-agnostic NMS + augment=False, # augmented inference + visualize=False, # visualize features + update=False, # update all models + project=ROOT / 'runs/detect', # save results to project/name + name='exp', # save results to project/name + exist_ok=False, # existing project/name ok, do not increment + line_thickness=3, # bounding box thickness (pixels) + hide_labels=False, # hide labels + hide_conf=False, # hide confidences + half=False, # use FP16 half-precision inference + dnn=False, # use OpenCV DNN for ONNX inference + ): + source = str(source) + save_img = not nosave and not source.endswith('.txt') # save inference images + is_file = Path(source).suffix[1:] in (IMG_FORMATS + VID_FORMATS) + is_url = source.lower().startswith(('rtsp://', 'rtmp://', 'http://', 'https://')) + webcam = source.isnumeric() or source.endswith('.txt') or (is_url and not is_file) + if is_url and is_file: + source = check_file(source) # download # Directories - save_dir = increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok) # increment run + save_dir = increment_path(Path(project) / name, exist_ok=exist_ok) # increment run (save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir - # Initialize - set_logging() - device = select_device(opt.device) - half = device.type != 'cpu' # half precision only supported on CUDA - # Load model - model, extras = load_checkpoint('ensemble', weights, device) # load FP32 model - stride = int(model.stride.max()) # model stride - imgsz = check_img_size(imgsz, s=stride) # check img_size - names = model.module.names if hasattr(model, 'module') else model.names # get class names - if half: - model.half() # to FP16 - - # Second-stage classifier - classify = False - if classify: - modelc = load_classifier(name='resnet101', n=2) # initialize - modelc.load_state_dict(torch.load('weights/resnet101.pt', map_location=device)['model']).to(device).eval() - - # Set Dataloader - vid_path, vid_writer = None, None + device = select_device(device) + model, extras = load_checkpoint(type_='val', weights=weights, device=device) # load FP32 model + stride, names, pt = model.stride, model.names, model.pt + imgsz = check_img_size(imgsz, s=stride) # check image size + + # Dataloader if webcam: view_img = check_imshow() cudnn.benchmark = True # set True to speed up constant image size inference - dataset = LoadStreams(source, img_size=imgsz, stride=stride) + dataset = LoadStreams(source, img_size=imgsz, stride=stride, auto=pt) + bs = len(dataset) # batch_size else: - dataset = LoadImages(source, img_size=imgsz, stride=stride) + dataset = LoadImages(source, img_size=imgsz, stride=stride, auto=pt) + bs = 1 # batch_size + vid_path, vid_writer = [None] * bs, [None] * bs # Run inference - if device.type != 'cpu': - model(torch.zeros(1, 3, imgsz, imgsz).to(device).type_as(next(model.parameters()))) # run once - t0 = time.time() - for path, img, im0s, vid_cap in dataset: - img = torch.from_numpy(img).to(device) - img = img.half() if half else img.float() # uint8 to fp16/32 - img /= 255.0 # 0 - 255 to 0.0 - 1.0 - if img.ndimension() == 3: - img = img.unsqueeze(0) + model.warmup(imgsz=(1 if pt else bs, 3, *imgsz)) # warmup + dt, seen = [0.0, 0.0, 0.0], 0 + for path, im, im0s, vid_cap, s in dataset: + t1 = time_sync() + im = torch.from_numpy(im).to(device) + im = im.half() if model.fp16 else im.float() # uint8 to fp16/32 + im /= 255 # 0 - 255 to 0.0 - 1.0 + if len(im.shape) == 3: + im = im[None] # expand for batch dim + t2 = time_sync() + dt[0] += t2 - t1 # Inference - t1 = time_synchronized() - pred = model(img, augment=opt.augment)[0] + visualize = increment_path(save_dir / Path(path).stem, mkdir=True) if visualize else False + pred = model(im, augment=augment, visualize=visualize) + t3 = time_sync() + dt[1] += t3 - t2 - # Apply NMS - pred = non_max_suppression(pred, opt.conf_thres, opt.iou_thres, opt.classes, opt.agnostic_nms, - max_det=opt.max_det) - t2 = time_synchronized() + # NMS + pred = non_max_suppression(pred, conf_thres, iou_thres, classes, agnostic_nms, max_det=max_det) + dt[2] += time_sync() - t3 - # Apply Classifier - if classify: - pred = apply_classifier(pred, modelc, img, im0s) + # Second-stage classifier (optional) + # pred = utils.general.apply_classifier(pred, classifier_model, im, im0s) - # Process detections - for i, det in enumerate(pred): # detections per image + # Process predictions + for i, det in enumerate(pred): # per image + seen += 1 if webcam: # batch_size >= 1 - p, s, im0, frame = path[i], f'{i}: ', im0s[i].copy(), dataset.count + p, im0, frame = path[i], im0s[i].copy(), dataset.count + s += f'{i}: ' else: - p, s, im0, frame = path, '', im0s.copy(), getattr(dataset, 'frame', 0) + p, im0, frame = path, im0s.copy(), getattr(dataset, 'frame', 0) p = Path(p) # to Path - save_path = str(save_dir / p.name) # img.jpg - txt_path = str(save_dir / 'labels' / p.stem) + ('' if dataset.mode == 'image' else f'_{frame}') # img.txt - s += '%gx%g ' % img.shape[2:] # print string + save_path = str(save_dir / p.name) # im.jpg + txt_path = str(save_dir / 'labels' / p.stem) + ('' if dataset.mode == 'image' else f'_{frame}') # im.txt + s += '%gx%g ' % im.shape[2:] # print string gn = torch.tensor(im0.shape)[[1, 0, 1, 0]] # normalization gain whwh - imc = im0.copy() if opt.save_crop else im0 # for opt.save_crop + imc = im0.copy() if save_crop else im0 # for save_crop + annotator = Annotator(im0, line_width=line_thickness, example=str(names)) if len(det): # Rescale boxes from img_size to im0 size - det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round() + det[:, :4] = scale_coords(im.shape[2:], det[:, :4], im0.shape).round() # Print results for c in det[:, -1].unique(): @@ -102,21 +160,19 @@ def detect(opt): for *xyxy, conf, cls in reversed(det): if save_txt: # Write to file xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh - line = (cls, *xywh, conf) if opt.save_conf else (cls, *xywh) # label format + line = (cls, *xywh, conf) if save_conf else (cls, *xywh) # label format with open(txt_path + '.txt', 'a') as f: f.write(('%g ' * len(line)).rstrip() % line + '\n') - if save_img or opt.save_crop or view_img: # Add bbox to image + if save_img or save_crop or view_img: # Add bbox to image c = int(cls) # integer class - label = None if opt.hide_labels else (names[c] if opt.hide_conf else f'{names[c]} {conf:.2f}') - plot_one_box(xyxy, im0, label=label, color=colors(c, True), line_thickness=opt.line_thickness) - if opt.save_crop: + label = None if hide_labels else (names[c] if hide_conf else f'{names[c]} {conf:.2f}') + annotator.box_label(xyxy, label, color=colors(c, True)) + if save_crop: save_one_box(xyxy, imc, file=save_dir / 'crops' / names[c] / f'{p.stem}.jpg', BGR=True) - # Print time (inference + NMS) - print(f'{s}Done. ({t2 - t1:.3f}s)') - # Stream results + im0 = annotator.result() if view_img: cv2.imshow(str(p), im0) cv2.waitKey(1) # 1 millisecond @@ -126,59 +182,72 @@ def detect(opt): if dataset.mode == 'image': cv2.imwrite(save_path, im0) else: # 'video' or 'stream' - if vid_path != save_path: # new video - vid_path = save_path - if isinstance(vid_writer, cv2.VideoWriter): - vid_writer.release() # release previous video writer + if vid_path[i] != save_path: # new video + vid_path[i] = save_path + if isinstance(vid_writer[i], cv2.VideoWriter): + vid_writer[i].release() # release previous video writer if vid_cap: # video fps = vid_cap.get(cv2.CAP_PROP_FPS) w = int(vid_cap.get(cv2.CAP_PROP_FRAME_WIDTH)) h = int(vid_cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) else: # stream fps, w, h = 30, im0.shape[1], im0.shape[0] - save_path += '.mp4' - vid_writer = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (w, h)) - vid_writer.write(im0) + save_path = str(Path(save_path).with_suffix('.mp4')) # force *.mp4 suffix on results videos + vid_writer[i] = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (w, h)) + vid_writer[i].write(im0) + # Print time (inference-only) + LOGGER.info(f'{s}Done. ({t3 - t2:.3f}s)') + + # Print results + t = tuple(x / seen * 1E3 for x in dt) # speeds per image + LOGGER.info(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {(1, 3, *imgsz)}' % t) if save_txt or save_img: s = f"\n{len(list(save_dir.glob('labels/*.txt')))} labels saved to {save_dir / 'labels'}" if save_txt else '' - print(f"Results saved to {save_dir}{s}") - - print(f'Done. ({time.time() - t0:.3f}s)') + LOGGER.info(f"Results saved to {colorstr('bold', save_dir)}{s}") + if update: + strip_optimizer(weights) # update model (to fix SourceChangeWarning) -if __name__ == '__main__': +def parse_opt(): parser = argparse.ArgumentParser() - parser.add_argument('--weights', nargs='+', type=str, default='yolov5s.pt', help='model.pt path(s)') - parser.add_argument('--source', type=str, default='data/images', help='source') # file/folder, 0 for webcam - parser.add_argument('--img-size', type=int, default=640, help='inference size (pixels)') - parser.add_argument('--conf-thres', type=float, default=0.25, help='object confidence threshold') - parser.add_argument('--iou-thres', type=float, default=0.45, help='IOU threshold for NMS') - parser.add_argument('--max-det', type=int, default=1000, help='maximum number of detections per image') + parser.add_argument('--weights', nargs='+', type=str, default=ROOT / 'yolov5s.pt', help='model path(s)') + parser.add_argument('--source', type=str, default=ROOT / 'data/images', help='file/dir/URL/glob, 0 for webcam') + parser.add_argument('--data', type=str, default=ROOT / 'data/coco128.yaml', help='(optional) dataset.yaml path') + parser.add_argument('--imgsz', '--img', '--img-size', nargs='+', type=int, default=[640], help='inference size h,w') + parser.add_argument('--conf-thres', type=float, default=0.25, help='confidence threshold') + parser.add_argument('--iou-thres', type=float, default=0.45, help='NMS IoU threshold') + parser.add_argument('--max-det', type=int, default=1000, help='maximum detections per image') parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') - parser.add_argument('--view-img', action='store_true', help='display results') + parser.add_argument('--view-img', action='store_true', help='show results') parser.add_argument('--save-txt', action='store_true', help='save results to *.txt') parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels') parser.add_argument('--save-crop', action='store_true', help='save cropped prediction boxes') parser.add_argument('--nosave', action='store_true', help='do not save images/videos') - parser.add_argument('--classes', nargs='+', type=int, help='filter by class: --class 0, or --class 0 2 3') + parser.add_argument('--classes', nargs='+', type=int, help='filter by class: --classes 0, or --classes 0 2 3') parser.add_argument('--agnostic-nms', action='store_true', help='class-agnostic NMS') parser.add_argument('--augment', action='store_true', help='augmented inference') + parser.add_argument('--visualize', action='store_true', help='visualize features') parser.add_argument('--update', action='store_true', help='update all models') - parser.add_argument('--project', default='runs/detect', help='save results to project/name') + parser.add_argument('--project', default=ROOT / 'runs/detect', help='save results to project/name') parser.add_argument('--name', default='exp', help='save results to project/name') parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') parser.add_argument('--line-thickness', default=3, type=int, help='bounding box thickness (pixels)') parser.add_argument('--hide-labels', default=False, action='store_true', help='hide labels') parser.add_argument('--hide-conf', default=False, action='store_true', help='hide confidences') + parser.add_argument('--half', action='store_true', help='use FP16 half-precision inference') + parser.add_argument('--dnn', action='store_true', help='use OpenCV DNN for ONNX inference') opt = parser.parse_args() - print(opt) - check_requirements(exclude=('tensorboard', 'pycocotools', 'thop')) - - with torch.no_grad(): - if opt.update: # update all models (to fix SourceChangeWarning) - for opt.weights in ['yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt']: - detect(opt=opt) - strip_optimizer(opt.weights) - else: - detect(opt=opt) + opt.imgsz *= 2 if len(opt.imgsz) == 1 else 1 # expand + print_args(FILE.stem, opt) + return opt + + +def main(opt): + check_requirements(exclude=('tensorboard', 'thop')) + run(**vars(opt)) + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) diff --git a/export.py b/export.py new file mode 100644 index 000000000000..bcbf84a90077 --- /dev/null +++ b/export.py @@ -0,0 +1,698 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Export a YOLOv5 PyTorch model to other formats. TensorFlow exports authored by https://github.com/zldrobit + +Format | `export.py --include` | Model +--- | --- | --- +PyTorch | - | yolov5s.pt +TorchScript | `torchscript` | yolov5s.torchscript +ONNX | `onnx` | yolov5s.onnx +OpenVINO | `openvino` | yolov5s_openvino_model/ +TensorRT | `engine` | yolov5s.engine +CoreML | `coreml` | yolov5s.mlmodel +TensorFlow SavedModel | `saved_model` | yolov5s_saved_model/ +TensorFlow GraphDef | `pb` | yolov5s.pb +TensorFlow Lite | `tflite` | yolov5s.tflite +TensorFlow Edge TPU | `edgetpu` | yolov5s_edgetpu.tflite +TensorFlow.js | `tfjs` | yolov5s_web_model/ + +Requirements: + $ pip install -r requirements.txt coremltools onnx onnx-simplifier onnxruntime openvino-dev tensorflow-cpu # CPU + $ pip install -r requirements.txt coremltools onnx onnx-simplifier onnxruntime-gpu openvino-dev tensorflow # GPU + +Usage: + $ python path/to/export.py --weights yolov5s.pt --include torchscript onnx openvino engine coreml tflite ... + +Inference: + $ python path/to/detect.py --weights yolov5s.pt # PyTorch + yolov5s.torchscript # TorchScript + yolov5s.onnx # ONNX Runtime or OpenCV DNN with --dnn + yolov5s.xml # OpenVINO + yolov5s.engine # TensorRT + yolov5s.mlmodel # CoreML (MacOS-only) + yolov5s_saved_model # TensorFlow SavedModel + yolov5s.pb # TensorFlow GraphDef + yolov5s.tflite # TensorFlow Lite + yolov5s_edgetpu.tflite # TensorFlow Edge TPU + +TensorFlow.js: + $ cd .. && git clone https://github.com/zldrobit/tfjs-yolov5-example.git && cd tfjs-yolov5-example + $ npm install + $ ln -s ../../yolov5/yolov5s_web_model public/yolov5s_web_model + $ npm start +""" + +import argparse +from copy import deepcopy +import json +import os +import platform +import subprocess +import sys +import time +import warnings +from pathlib import Path + +import pandas as pd +import torch +import torch.nn as nn +from torch.utils.mobile_optimizer import optimize_for_mobile + +from sparseml.pytorch.utils import ModuleExporter +from sparseml.pytorch.sparsification.quantization import skip_onnx_input_quantize + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative + +from models.common import Conv, DetectMultiBackend +from models.experimental import attempt_load +from models.yolo import Detect, Model +from utils.activations import SiLU +from utils.datasets import LoadImages +from utils.general import (LOGGER, check_dataset, check_img_size, check_requirements, check_version, colorstr, + file_size, print_args, url2file, intersect_dicts) +from utils.torch_utils import select_device, torch_distributed_zero_first, is_parallel +from utils.downloads import attempt_download +from utils.sparse import SparseMLWrapper, check_download_sparsezoo_weights + + + +def export_formats(): + # YOLOv5 export formats + x = [['PyTorch', '-', '.pt', True], + ['TorchScript', 'torchscript', '.torchscript', True], + ['ONNX', 'onnx', '.onnx', True], + ['OpenVINO', 'openvino', '_openvino_model', False], + ['TensorRT', 'engine', '.engine', True], + ['CoreML', 'coreml', '.mlmodel', False], + ['TensorFlow SavedModel', 'saved_model', '_saved_model', True], + ['TensorFlow GraphDef', 'pb', '.pb', True], + ['TensorFlow Lite', 'tflite', '.tflite', False], + ['TensorFlow Edge TPU', 'edgetpu', '_edgetpu.tflite', False], + ['TensorFlow.js', 'tfjs', '_web_model', False]] + return pd.DataFrame(x, columns=['Format', 'Argument', 'Suffix', 'GPU']) + + +def export_torchscript(model, im, file, optimize, prefix=colorstr('TorchScript:')): + # YOLOv5 TorchScript model export + try: + LOGGER.info(f'\n{prefix} starting export with torch {torch.__version__}...') + f = file.with_suffix('.torchscript') + + ts = torch.jit.trace(model, im, strict=False) + d = {"shape": im.shape, "stride": int(max(model.stride)), "names": model.names} + extra_files = {'config.txt': json.dumps(d)} # torch._C.ExtraFilesMap() + if optimize: # https://pytorch.org/tutorials/recipes/mobile_interpreter.html + optimize_for_mobile(ts)._save_for_lite_interpreter(str(f), _extra_files=extra_files) + else: + ts.save(str(f), _extra_files=extra_files) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'{prefix} export failure: {e}') + + +def export_onnx(model, im, file, opset, train, dynamic, simplify, prefix=colorstr('ONNX:')): + # YOLOv5 ONNX export + try: + check_requirements(('onnx',)) + import onnx + + LOGGER.info(f'\n{prefix} starting export with onnx {onnx.__version__}...') + f = file.with_suffix('.onnx') + + # export through SparseML so quantized and pruned graphs can be corrected + save_dir = f.parent.absolute() + save_name = str(f).split(os.path.sep)[-1] + + # get the number of outputs so we know how to name and change dynamic axes + # nested outputs can be returned if model is exported with dynamic + def _count_outputs(outputs): + count = 0 + if isinstance(outputs, list) or isinstance(outputs, tuple): + for out in outputs: + count += _count_outputs(out) + else: + count += 1 + return count + + outputs = model(im) + num_outputs = _count_outputs(outputs) + input_names = ['input'] + output_names = [f'out_{i}' for i in range(num_outputs)] + dynamic_axes = {k: {0: 'batch'} for k in (input_names + output_names)} if dynamic else None + exporter = ModuleExporter(model, save_dir) + exporter.export_onnx(im, name=save_name, convert_qat=True, + input_names=input_names, output_names=output_names, dynamic_axes=dynamic_axes) + try: + skip_onnx_input_quantize(f, f) + except: + pass + + # Checks + model_onnx = onnx.load(f) # load onnx model + onnx.checker.check_model(model_onnx) # check onnx model + # LOGGER.info(onnx.helper.printable_graph(model_onnx.graph)) # print + + # Simplify + if simplify: + try: + check_requirements(('onnx-simplifier',)) + import onnxsim + + LOGGER.info(f'{prefix} simplifying with onnx-simplifier {onnxsim.__version__}...') + model_onnx, check = onnxsim.simplify( + model_onnx, + dynamic_input_shape=dynamic, + input_shapes={'images': list(im.shape)} if dynamic else None) + assert check, 'assert check failed' + onnx.save(model_onnx, f) + except Exception as e: + LOGGER.info(f'{prefix} simplifier failure: {e}') + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'{prefix} export failure: {e}') + + +def export_openvino(model, im, file, prefix=colorstr('OpenVINO:')): + # YOLOv5 OpenVINO export + try: + check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/ + import openvino.inference_engine as ie + + LOGGER.info(f'\n{prefix} starting export with openvino {ie.__version__}...') + f = str(file).replace('.pt', '_openvino_model' + os.sep) + + cmd = f"mo --input_model {file.with_suffix('.onnx')} --output_dir {f}" + subprocess.check_output(cmd, shell=True) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_coreml(model, im, file, prefix=colorstr('CoreML:')): + # YOLOv5 CoreML export + try: + check_requirements(('coremltools',)) + import coremltools as ct + + LOGGER.info(f'\n{prefix} starting export with coremltools {ct.__version__}...') + f = file.with_suffix('.mlmodel') + + ts = torch.jit.trace(model, im, strict=False) # TorchScript model + ct_model = ct.convert(ts, inputs=[ct.ImageType('image', shape=im.shape, scale=1 / 255, bias=[0, 0, 0])]) + ct_model.save(f) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return ct_model, f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + return None, None + + +def export_engine(model, im, file, train, half, simplify, workspace=4, verbose=False, prefix=colorstr('TensorRT:')): + # YOLOv5 TensorRT export https://developer.nvidia.com/tensorrt + try: + check_requirements(('tensorrt',)) + import tensorrt as trt + + if trt.__version__[0] == '7': # TensorRT 7 handling https://github.com/ultralytics/yolov5/issues/6012 + grid = model.model[-1].anchor_grid + model.model[-1].anchor_grid = [a[..., :1, :1, :] for a in grid] + export_onnx(model, im, file, 12, train, False, simplify) # opset 12 + model.model[-1].anchor_grid = grid + else: # TensorRT >= 8 + check_version(trt.__version__, '8.0.0', hard=True) # require tensorrt>=8.0.0 + export_onnx(model, im, file, 13, train, False, simplify) # opset 13 + onnx = file.with_suffix('.onnx') + + LOGGER.info(f'\n{prefix} starting export with TensorRT {trt.__version__}...') + assert im.device.type != 'cpu', 'export running on CPU but must be on GPU, i.e. `python export.py --device 0`' + assert onnx.exists(), f'failed to export ONNX file: {onnx}' + f = file.with_suffix('.engine') # TensorRT engine file + logger = trt.Logger(trt.Logger.INFO) + if verbose: + logger.min_severity = trt.Logger.Severity.VERBOSE + + builder = trt.Builder(logger) + config = builder.create_builder_config() + config.max_workspace_size = workspace * 1 << 30 + # config.set_memory_pool_limit(trt.MemoryPoolType.WORKSPACE, workspace << 30) # fix TRT 8.4 deprecation notice + + flag = (1 << int(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH)) + network = builder.create_network(flag) + parser = trt.OnnxParser(network, logger) + if not parser.parse_from_file(str(onnx)): + raise RuntimeError(f'failed to load ONNX file: {onnx}') + + inputs = [network.get_input(i) for i in range(network.num_inputs)] + outputs = [network.get_output(i) for i in range(network.num_outputs)] + LOGGER.info(f'{prefix} Network Description:') + for inp in inputs: + LOGGER.info(f'{prefix}\tinput "{inp.name}" with shape {inp.shape} and dtype {inp.dtype}') + for out in outputs: + LOGGER.info(f'{prefix}\toutput "{out.name}" with shape {out.shape} and dtype {out.dtype}') + + LOGGER.info(f'{prefix} building FP{16 if builder.platform_has_fast_fp16 else 32} engine in {f}') + if builder.platform_has_fast_fp16: + config.set_flag(trt.BuilderFlag.FP16) + with builder.build_engine(network, config) as engine, open(f, 'wb') as t: + t.write(engine.serialize()) + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_saved_model(model, im, file, dynamic, + tf_nms=False, agnostic_nms=False, topk_per_class=100, topk_all=100, iou_thres=0.45, + conf_thres=0.25, keras=False, prefix=colorstr('TensorFlow SavedModel:')): + # YOLOv5 TensorFlow SavedModel export + try: + import tensorflow as tf + from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2 + + from models.tf import TFDetect, TFModel + + LOGGER.info(f'\n{prefix} starting export with tensorflow {tf.__version__}...') + f = str(file).replace('.pt', '_saved_model') + batch_size, ch, *imgsz = list(im.shape) # BCHW + + tf_model = TFModel(cfg=model.yaml, model=model, nc=model.nc, imgsz=imgsz) + im = tf.zeros((batch_size, *imgsz, ch)) # BHWC order for TensorFlow + _ = tf_model.predict(im, tf_nms, agnostic_nms, topk_per_class, topk_all, iou_thres, conf_thres) + inputs = tf.keras.Input(shape=(*imgsz, ch), batch_size=None if dynamic else batch_size) + outputs = tf_model.predict(inputs, tf_nms, agnostic_nms, topk_per_class, topk_all, iou_thres, conf_thres) + keras_model = tf.keras.Model(inputs=inputs, outputs=outputs) + keras_model.trainable = False + keras_model.summary() + if keras: + keras_model.save(f, save_format='tf') + else: + m = tf.function(lambda x: keras_model(x)) # full model + spec = tf.TensorSpec(keras_model.inputs[0].shape, keras_model.inputs[0].dtype) + m = m.get_concrete_function(spec) + frozen_func = convert_variables_to_constants_v2(m) + tfm = tf.Module() + tfm.__call__ = tf.function(lambda x: frozen_func(x)[0], [spec]) + tfm.__call__(im) + tf.saved_model.save( + tfm, + f, + options=tf.saved_model.SaveOptions(experimental_custom_gradients=False) if + check_version(tf.__version__, '2.6') else tf.saved_model.SaveOptions()) + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return keras_model, f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + return None, None + + +def export_pb(keras_model, im, file, prefix=colorstr('TensorFlow GraphDef:')): + # YOLOv5 TensorFlow GraphDef *.pb export https://github.com/leimao/Frozen_Graph_TensorFlow + try: + import tensorflow as tf + from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2 + + LOGGER.info(f'\n{prefix} starting export with tensorflow {tf.__version__}...') + f = file.with_suffix('.pb') + + m = tf.function(lambda x: keras_model(x)) # full model + m = m.get_concrete_function(tf.TensorSpec(keras_model.inputs[0].shape, keras_model.inputs[0].dtype)) + frozen_func = convert_variables_to_constants_v2(m) + frozen_func.graph.as_graph_def() + tf.io.write_graph(graph_or_graph_def=frozen_func.graph, logdir=str(f.parent), name=f.name, as_text=False) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_tflite(keras_model, im, file, int8, data, ncalib, prefix=colorstr('TensorFlow Lite:')): + # YOLOv5 TensorFlow Lite export + try: + import tensorflow as tf + + LOGGER.info(f'\n{prefix} starting export with tensorflow {tf.__version__}...') + batch_size, ch, *imgsz = list(im.shape) # BCHW + f = str(file).replace('.pt', '-fp16.tflite') + + converter = tf.lite.TFLiteConverter.from_keras_model(keras_model) + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS] + converter.target_spec.supported_types = [tf.float16] + converter.optimizations = [tf.lite.Optimize.DEFAULT] + if int8: + from models.tf import representative_dataset_gen + dataset = LoadImages(check_dataset(data)['train'], img_size=imgsz, auto=False) # representative data + converter.representative_dataset = lambda: representative_dataset_gen(dataset, ncalib) + converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8] + converter.target_spec.supported_types = [] + converter.inference_input_type = tf.uint8 # or tf.int8 + converter.inference_output_type = tf.uint8 # or tf.int8 + converter.experimental_new_quantizer = True + f = str(file).replace('.pt', '-int8.tflite') + + tflite_model = converter.convert() + open(f, "wb").write(tflite_model) + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_edgetpu(keras_model, im, file, prefix=colorstr('Edge TPU:')): + # YOLOv5 Edge TPU export https://coral.ai/docs/edgetpu/models-intro/ + try: + cmd = 'edgetpu_compiler --version' + help_url = 'https://coral.ai/docs/edgetpu/compiler/' + assert platform.system() == 'Linux', f'export only supported on Linux. See {help_url}' + if subprocess.run(cmd + ' >/dev/null', shell=True).returncode != 0: + LOGGER.info(f'\n{prefix} export requires Edge TPU compiler. Attempting install from {help_url}') + sudo = subprocess.run('sudo --version >/dev/null', shell=True).returncode == 0 # sudo installed on system + for c in ['curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -', + 'echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | sudo tee /etc/apt/sources.list.d/coral-edgetpu.list', + 'sudo apt-get update', + 'sudo apt-get install edgetpu-compiler']: + subprocess.run(c if sudo else c.replace('sudo ', ''), shell=True, check=True) + ver = subprocess.run(cmd, shell=True, capture_output=True, check=True).stdout.decode().split()[-1] + + LOGGER.info(f'\n{prefix} starting export with Edge TPU compiler {ver}...') + f = str(file).replace('.pt', '-int8_edgetpu.tflite') # Edge TPU model + f_tfl = str(file).replace('.pt', '-int8.tflite') # TFLite model + + cmd = f"edgetpu_compiler -s {f_tfl}" + subprocess.run(cmd, shell=True, check=True) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + + +def export_tfjs(keras_model, im, file, prefix=colorstr('TensorFlow.js:')): + # YOLOv5 TensorFlow.js export + try: + check_requirements(('tensorflowjs',)) + import re + + import tensorflowjs as tfjs + + LOGGER.info(f'\n{prefix} starting export with tensorflowjs {tfjs.__version__}...') + f = str(file).replace('.pt', '_web_model') # js dir + f_pb = file.with_suffix('.pb') # *.pb path + f_json = f + '/model.json' # *.json path + + cmd = f'tensorflowjs_converter --input_format=tf_frozen_model ' \ + f'--output_node_names="Identity,Identity_1,Identity_2,Identity_3" {f_pb} {f}' + subprocess.run(cmd, shell=True) + + json = open(f_json).read() + with open(f_json, 'w') as j: # sort JSON Identity_* in ascending order + subst = re.sub( + r'{"outputs": {"Identity.?.?": {"name": "Identity.?.?"}, ' + r'"Identity.?.?": {"name": "Identity.?.?"}, ' + r'"Identity.?.?": {"name": "Identity.?.?"}, ' + r'"Identity.?.?": {"name": "Identity.?.?"}}}', + r'{"outputs": {"Identity": {"name": "Identity"}, ' + r'"Identity_1": {"name": "Identity_1"}, ' + r'"Identity_2": {"name": "Identity_2"}, ' + r'"Identity_3": {"name": "Identity_3"}}}', + json) + j.write(subst) + + LOGGER.info(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') + return f + except Exception as e: + LOGGER.info(f'\n{prefix} export failure: {e}') + +def create_checkpoint(epoch, model, optimizer, ema, sparseml_wrapper, **kwargs): + pickle = not sparseml_wrapper.qat_active(epoch) # qat does not support pickled exports + ckpt_model = deepcopy(model.module if is_parallel(model) else model).float() + yaml = ckpt_model.yaml + if not pickle: + ckpt_model = ckpt_model.state_dict() + + return {'epoch': epoch, + 'model': ckpt_model, + 'optimizer': optimizer.state_dict(), + 'yaml': yaml, + 'hyp': model.hyp, + **ema.state_dict(pickle), + **sparseml_wrapper.state_dict(), + **kwargs} + +def load_checkpoint( + type_, + weights, + device, + cfg=None, + hyp=None, + nc=None, + data=None, + dnn=False, + half = False, + recipe=None, + resume=None, + rank=-1 + ): + with torch_distributed_zero_first(rank): + # download if not found locally or from sparsezoo if stub + weights = attempt_download(weights) or check_download_sparsezoo_weights(weights) + ckpt = torch.load(weights[0] if isinstance(weights, list) or isinstance(weights, tuple) + else weights, map_location="cpu") # load checkpoint + start_epoch = ckpt['epoch'] + 1 if 'epoch' in ckpt else 0 + pickled = isinstance(ckpt['model'], nn.Module) + train_type = type_ == 'train' + ensemble_type = type_ == 'ensemble' + val_type = type_ =='val' + + if pickled and ensemble_type: + cfg = None + if ensemble_type: + model = attempt_load(weights, map_location=device) # load ensemble using pickled + state_dict = model.state_dict() + elif val_type: + model = DetectMultiBackend(weights, device=device, dnn=dnn, data=data, fp16=half) + state_dict = model.model.state_dict() + else: + # load model from config and weights + cfg = cfg or (ckpt['yaml'] if 'yaml' in ckpt else None) or \ + (ckpt['model'].yaml if pickled else None) + model = Model(cfg, ch=3, nc=ckpt['nc'] if ('nc' in ckpt and not nc) else nc, + anchors=hyp.get('anchors') if hyp else None).to(device) + model_key = 'ema' if (not train_type and 'ema' in ckpt and ckpt['ema']) else 'model' + state_dict = ckpt[model_key].float().state_dict() if pickled else ckpt[model_key] + if val_type: + model = DetectMultiBackend(model=model, device=device, dnn=dnn, data=data, fp16=half) + + # turn gradients for params back on in case they were removed + for p in model.parameters(): + p.requires_grad = True + + # load sparseml recipe for applying pruning and quantization + checkpoint_recipe = train_recipe = None + if resume: + train_recipe = ckpt['recipe'] if ('recipe' in ckpt) else None + elif ckpt['recipe'] or recipe: + train_recipe, checkpoint_recipe = recipe, ckpt['recipe'] + + sparseml_wrapper = SparseMLWrapper(model.model if val_type else model, checkpoint_recipe, train_recipe) + exclude_anchors = train_type and (cfg or hyp.get('anchors')) and not resume + loaded = False + + sparseml_wrapper.apply_checkpoint_structure() + if train_type: + # intialize the recipe for training and restore the weights before if no quantized weights + quantized_state_dict = any([name.endswith('.zero_point') for name in state_dict.keys()]) + if not quantized_state_dict: + state_dict = load_state_dict(model, state_dict, train=True, exclude_anchors=exclude_anchors) + loaded = True + sparseml_wrapper.initialize(start_epoch) + + if not loaded: + state_dict = load_state_dict(model, state_dict, train=train_type, exclude_anchors=exclude_anchors) + + model.float() + report = 'Transferred %g/%g items from %s' % (len(state_dict), len(model.state_dict()), weights) + + return model, { + 'ckpt': ckpt, + 'state_dict': state_dict, + 'sparseml_wrapper': sparseml_wrapper, + 'report': report, + } + + +def load_state_dict(model, state_dict, train, exclude_anchors): + # fix older state_dict names not porting to the new model setup + state_dict = {key if not key.startswith("module.") else key[7:]: val for key, val in state_dict.items()} + + if train: + # load any missing weights from the model + state_dict = intersect_dicts(state_dict, model.state_dict(), exclude=['anchor'] if exclude_anchors else []) + + model.load_state_dict(state_dict, strict=not train) # load + + return state_dict + +@torch.no_grad() +def run(data=ROOT / 'data/coco128.yaml', # 'dataset.yaml path' + weights=ROOT / 'yolov5s.pt', # weights path + imgsz=(640, 640), # image (height, width) + batch_size=1, # batch size + device='cpu', # cuda device, i.e. 0 or 0,1,2,3 or cpu + include=('onnx'), # include formats + half=False, # FP16 half-precision export + inplace=False, # set YOLOv5 Detect() inplace=True + train=False, # model.train() mode + optimize=False, # TorchScript: optimize for mobile + int8=False, # CoreML/TF INT8 quantization + dynamic=False, # ONNX/TF: dynamic axes + simplify=False, # ONNX: simplify model + opset=12, # ONNX: opset version + verbose=False, # TensorRT: verbose log + workspace=4, # TensorRT: workspace size (GB) + nms=False, # TF: add NMS to model + agnostic_nms=False, # TF: add agnostic NMS to model + topk_per_class=100, # TF.js NMS: topk per class to keep + topk_all=100, # TF.js NMS: topk for all classes to keep + iou_thres=0.45, # TF.js NMS: IoU threshold + conf_thres=0.25, # TF.js NMS: confidence threshold + remove_grid=False, + ): + t = time.time() + include = [x.lower() for x in include] # to lowercase + formats = tuple(export_formats()['Argument'][1:]) # --include arguments + flags = [x in include for x in formats] + assert sum(flags) == len(include), f'ERROR: Invalid --include {include}, valid --include arguments are {formats}' + jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs = flags # export booleans + file = Path(url2file(weights) if str(weights).startswith(('http:/', 'https:/')) else weights) # PyTorch weights + + # Load PyTorch model + device = select_device(device) + assert not (device.type == 'cpu' and half), '--half only compatible with GPU export, i.e. use --device 0' + model, extras = load_checkpoint(type_='ensemble', weights=weights, device=device) # load FP32 model + sparseml_wrapper = extras['sparseml_wrapper'] + nc, names = extras["ckpt"]["nc"], model.names # number of classes, class names + + # Checks + imgsz *= 2 if len(imgsz) == 1 else 1 # expand + opset = 12 if ('openvino' in include) else opset # OpenVINO requires opset <= 12 + assert nc == len(names), f'Model class count {nc} != len(names) {len(names)}' + + # Input + gs = int(max(model.stride)) # grid size (max stride) + imgsz = [check_img_size(x, gs) for x in imgsz] # verify img_size are gs-multiples + im = torch.zeros(batch_size, 3, *imgsz).to(device) # image size(1,3,320,192) BCHW iDetection + + # Update model + if half: + im, model = im.half(), model.half() # to FP16 + model.train() if train else model.eval() # training mode = no Detect() layer grid construction + for k, m in model.named_modules(): + if isinstance(m, Conv): # assign export-friendly activations + if isinstance(m.act, nn.SiLU): + m.act = SiLU() + elif isinstance(m, Detect): + m.inplace = inplace + m.onnx_dynamic = dynamic + if hasattr(m, 'forward_export'): + m.forward = m.forward_export # assign custom forward (optional) + model.model[-1].export = not remove_grid # set Detect() layer grid export + + for _ in range(2): + y = model(im) # dry runs + shape = tuple(y[0].shape) # model output shape + LOGGER.info(f"\n{colorstr('PyTorch:')} starting from {file} with output shape {shape} ({file_size(file):.1f} MB)") + + # Exports + f = [''] * 10 # exported filenames + warnings.filterwarnings(action='ignore', category=torch.jit.TracerWarning) # suppress TracerWarning + if jit: + f[0] = export_torchscript(model, im, file, optimize) + if engine: # TensorRT required before ONNX + f[1] = export_engine(model, im, file, train, half, simplify, workspace, verbose) + if onnx or xml: # OpenVINO requires ONNX + f[2] = export_onnx(model, im, file, opset, train, dynamic, simplify) + if xml: # OpenVINO + f[3] = export_openvino(model, im, file) + if coreml: + _, f[4] = export_coreml(model, im, file) + + # TensorFlow Exports + if any((saved_model, pb, tflite, edgetpu, tfjs)): + if int8 or edgetpu: # TFLite --int8 bug https://github.com/ultralytics/yolov5/issues/5707 + check_requirements(('flatbuffers==1.12',)) # required before `import tensorflow` + assert not (tflite and tfjs), 'TFLite and TF.js models must be exported separately, please pass only one type.' + model, f[5] = export_saved_model(model.cpu(), im, file, dynamic, tf_nms=nms or agnostic_nms or tfjs, + agnostic_nms=agnostic_nms or tfjs, topk_per_class=topk_per_class, + topk_all=topk_all, conf_thres=conf_thres, iou_thres=iou_thres) # keras model + if pb or tfjs: # pb prerequisite to tfjs + f[6] = export_pb(model, im, file) + if tflite or edgetpu: + f[7] = export_tflite(model, im, file, int8=int8 or edgetpu, data=data, ncalib=100) + if edgetpu: + f[8] = export_edgetpu(model, im, file) + if tfjs: + f[9] = export_tfjs(model, im, file) + + # Finish + f = [str(x) for x in f if x] # filter out '' and None + if any(f): + LOGGER.info(f'\nExport complete ({time.time() - t:.2f}s)' + f"\nResults saved to {colorstr('bold', file.parent.resolve())}" + f"\nDetect: python detect.py --weights {f[-1]}" + f"\nPyTorch Hub: model = torch.hub.load('ultralytics/yolov5', 'custom', '{f[-1]}')" + f"\nValidate: python val.py --weights {f[-1]}" + f"\nVisualize: https://netron.app") + return f # return list of exported files/dirs + + +def parse_opt(): + parser = argparse.ArgumentParser() + parser.add_argument('--data', type=str, default=ROOT / 'data/coco128.yaml', help='dataset.yaml path') + parser.add_argument('--weights', nargs='+', type=str, default=ROOT / 'yolov5s.pt', help='model.pt path(s)') + parser.add_argument('--imgsz', '--img', '--img-size', nargs='+', type=int, default=[640, 640], help='image (h, w)') + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + parser.add_argument('--device', default='cpu', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--half', action='store_true', help='FP16 half-precision export') + parser.add_argument('--inplace', action='store_true', help='set YOLOv5 Detect() inplace=True') + parser.add_argument('--train', action='store_true', help='model.train() mode') + parser.add_argument('--optimize', action='store_true', help='TorchScript: optimize for mobile') + parser.add_argument('--int8', action='store_true', help='CoreML/TF INT8 quantization') + parser.add_argument('--dynamic', action='store_true', help='ONNX/TF: dynamic axes') + parser.add_argument('--simplify', action='store_true', help='ONNX: simplify model') + parser.add_argument('--opset', type=int, default=12, help='ONNX: opset version') + parser.add_argument('--verbose', action='store_true', help='TensorRT: verbose log') + parser.add_argument('--workspace', type=int, default=4, help='TensorRT: workspace size (GB)') + parser.add_argument('--nms', action='store_true', help='TF: add NMS to model') + parser.add_argument('--agnostic-nms', action='store_true', help='TF: add agnostic NMS to model') + parser.add_argument('--topk-per-class', type=int, default=100, help='TF.js NMS: topk per class to keep') + parser.add_argument('--topk-all', type=int, default=100, help='TF.js NMS: topk for all classes to keep') + parser.add_argument('--iou-thres', type=float, default=0.45, help='TF.js NMS: IoU threshold') + parser.add_argument('--conf-thres', type=float, default=0.25, help='TF.js NMS: confidence threshold') + parser.add_argument("--remove-grid", action="store_true", help="remove export of Detect() layer grid") + parser.add_argument('--include', nargs='+', + default=['torchscript', 'onnx'], + help='torchscript, onnx, openvino, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs') + opt = parser.parse_args() + print_args(FILE.stem, opt) + return opt + + +def main(opt): + for opt.weights in (opt.weights if isinstance(opt.weights, list) else [opt.weights]): + run(**vars(opt)) + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) \ No newline at end of file diff --git a/hubconf.py b/hubconf.py index f74e70c85a65..39fa614b2e34 100644 --- a/hubconf.py +++ b/hubconf.py @@ -1,18 +1,21 @@ -"""YOLOv5 PyTorch Hub models https://pytorch.org/hub/ultralytics_yolov5/ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +PyTorch Hub models https://pytorch.org/hub/ultralytics_yolov5/ Usage: import torch model = torch.hub.load('ultralytics/yolov5', 'yolov5s') + model = torch.hub.load('ultralytics/yolov5:master', 'custom', 'path/to/yolov5s.onnx') # file from branch """ import torch def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): - """Creates a specified YOLOv5 model + """Creates or loads a YOLOv5 model Arguments: - name (str): name of model, i.e. 'yolov5s' + name (str): model name 'yolov5s' or path 'path/to/best.pt' pretrained (bool): load pretrained weights into the model channels (int): number of input channels classes (int): number of model classes @@ -21,42 +24,44 @@ def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbo device (str, torch.device, None): device to use for model parameters Returns: - YOLOv5 pytorch model + YOLOv5 model """ from pathlib import Path - from models.yolo import Model, attempt_load - from utils.general import check_requirements, set_logging - from utils.google_utils import attempt_download + from models.common import AutoShape, DetectMultiBackend + from models.yolo import Model + from utils.downloads import attempt_download + from utils.general import LOGGER, check_requirements, intersect_dicts, logging from utils.torch_utils import select_device - check_requirements(Path(__file__).parent / 'requirements.txt', exclude=('tensorboard', 'pycocotools', 'thop')) - set_logging(verbose=verbose) - - fname = Path(name).with_suffix('.pt') # checkpoint filename + if not verbose: + LOGGER.setLevel(logging.WARNING) + check_requirements(exclude=('tensorboard', 'thop', 'opencv-python')) + name = Path(name) + path = name.with_suffix('.pt') if name.suffix == '' else name # checkpoint path try: + device = select_device(('0' if torch.cuda.is_available() else 'cpu') if device is None else device) + if pretrained and channels == 3 and classes == 80: - model = attempt_load(fname, map_location=torch.device('cpu')) # download/load FP32 model + model = DetectMultiBackend(path, device=device) # download/load FP32 model + # model = models.experimental.attempt_load(path, map_location=device) # download/load FP32 model else: - cfg = list((Path(__file__).parent / 'models').rglob(f'{name}.yaml'))[0] # model.yaml path + cfg = list((Path(__file__).parent / 'models').rglob(f'{path.stem}.yaml'))[0] # model.yaml path model = Model(cfg, channels, classes) # create model if pretrained: - attempt_download(fname) # download if not found locally - ckpt = torch.load(fname, map_location=torch.device('cpu')) # load - msd = model.state_dict() # model state_dict + ckpt = torch.load(attempt_download(path), map_location=device) # load csd = ckpt['model'].float().state_dict() # checkpoint state_dict as FP32 - csd = {k: v for k, v in csd.items() if msd[k].shape == v.shape} # filter + csd = intersect_dicts(csd, model.state_dict(), exclude=['anchors']) # intersect model.load_state_dict(csd, strict=False) # load if len(ckpt['model'].names) == classes: model.names = ckpt['model'].names # set class names attribute if autoshape: - model = model.autoshape() # for file/URI/PIL/cv2/np inputs and NMS - device = select_device('0' if torch.cuda.is_available() else 'cpu') if device is None else torch.device(device) + model = AutoShape(model) # for file/URI/PIL/cv2/np inputs and NMS return model.to(device) except Exception as e: help_url = 'https://github.com/ultralytics/yolov5/issues/36' - s = 'Cache may be out of date, try `force_reload=True`. See %s for help.' % help_url + s = f'{e}. Cache may be out of date, try `force_reload=True` or see {help_url} for help.' raise Exception(s) from e @@ -65,6 +70,11 @@ def custom(path='path/to/model.pt', autoshape=True, verbose=True, device=None): return _create(path, autoshape=autoshape, verbose=verbose, device=device) +def yolov5n(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-nano model https://github.com/ultralytics/yolov5 + return _create('yolov5n', pretrained, channels, classes, autoshape, verbose, device) + + def yolov5s(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): # YOLOv5-small model https://github.com/ultralytics/yolov5 return _create('yolov5s', pretrained, channels, classes, autoshape, verbose, device) @@ -85,6 +95,11 @@ def yolov5x(pretrained=True, channels=3, classes=80, autoshape=True, verbose=Tru return _create('yolov5x', pretrained, channels, classes, autoshape, verbose, device) +def yolov5n6(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): + # YOLOv5-nano-P6 model https://github.com/ultralytics/yolov5 + return _create('yolov5n6', pretrained, channels, classes, autoshape, verbose, device) + + def yolov5s6(pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None): # YOLOv5-small-P6 model https://github.com/ultralytics/yolov5 return _create('yolov5s6', pretrained, channels, classes, autoshape, verbose, device) @@ -110,16 +125,19 @@ def yolov5x6(pretrained=True, channels=3, classes=80, autoshape=True, verbose=Tr # model = custom(path='path/to/model.pt') # custom # Verify inference + from pathlib import Path + import cv2 import numpy as np from PIL import Image imgs = ['data/images/zidane.jpg', # filename - 'https://github.com/ultralytics/yolov5/releases/download/v1.0/zidane.jpg', # URI + Path('data/images/zidane.jpg'), # Path + 'https://ultralytics.com/images/zidane.jpg', # URI cv2.imread('data/images/bus.jpg')[:, :, ::-1], # OpenCV Image.open('data/images/bus.jpg'), # PIL np.zeros((320, 640, 3))] # numpy - results = model(imgs) # batched inference + results = model(imgs, size=320) # batched inference results.print() results.save() diff --git a/models/common.py b/models/common.py index 4211db406c3d..e0b783f55033 100644 --- a/models/common.py +++ b/models/common.py @@ -1,21 +1,31 @@ -# YOLOv5 common modules +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Common modules +""" +import json import math +import platform +import warnings +from collections import OrderedDict, namedtuple from copy import copy from pathlib import Path +import cv2 import numpy as np import pandas as pd import requests import torch import torch.nn as nn +import yaml from PIL import Image from torch.cuda import amp -from utils.datasets import letterbox -from utils.general import non_max_suppression, make_divisible, scale_coords, increment_path, xyxy2xywh, save_one_box -from utils.plots import colors, plot_one_box -from utils.torch_utils import time_synchronized +from utils.datasets import exif_transpose, letterbox +from utils.general import (LOGGER, check_requirements, check_suffix, check_version, colorstr, increment_path, + make_divisible, non_max_suppression, scale_coords, xywh2xyxy, xyxy2xywh) +from utils.plots import Annotator, colors, save_one_box +from utils.torch_utils import copy_attr, time_sync def autopad(k, p=None): # kernel, padding @@ -25,15 +35,10 @@ def autopad(k, p=None): # kernel, padding return p -def DWConv(c1, c2, k=1, s=1, act=True): - # Depthwise convolution - return Conv(c1, c2, k, s, g=math.gcd(c1, c2), act=act) - - class Conv(nn.Module): # Standard convolution def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups - super(Conv, self).__init__() + super().__init__() self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False) self.bn = nn.BatchNorm2d(c2) self.act = nn.SiLU() if act is True else (act if isinstance(act, nn.Module) else nn.Identity()) @@ -41,10 +46,16 @@ def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, k def forward(self, x): return self.act(self.bn(self.conv(x))) - def fuseforward(self, x): + def forward_fuse(self, x): return self.act(self.conv(x)) +class DWConv(Conv): + # Depth-wise convolution class + def __init__(self, c1, c2, k=1, s=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super().__init__(c1, c2, k, s, g=math.gcd(c1, c2), act=act) + + class TransformerLayer(nn.Module): # Transformer layer https://arxiv.org/abs/2010.11929 (LayerNorm layers removed for better performance) def __init__(self, c, num_heads): @@ -70,31 +81,21 @@ def __init__(self, c1, c2, num_heads, num_layers): if c1 != c2: self.conv = Conv(c1, c2) self.linear = nn.Linear(c2, c2) # learnable position embedding - self.tr = nn.Sequential(*[TransformerLayer(c2, num_heads) for _ in range(num_layers)]) + self.tr = nn.Sequential(*(TransformerLayer(c2, num_heads) for _ in range(num_layers))) self.c2 = c2 def forward(self, x): if self.conv is not None: x = self.conv(x) b, _, w, h = x.shape - p = x.flatten(2) - p = p.unsqueeze(0) - p = p.transpose(0, 3) - p = p.squeeze(3) - e = self.linear(p) - x = p + e - - x = self.tr(x) - x = x.unsqueeze(3) - x = x.transpose(0, 3) - x = x.reshape(b, self.c2, w, h) - return x + p = x.flatten(2).permute(2, 0, 1) + return self.tr(p + self.linear(p)).permute(1, 2, 0).reshape(b, self.c2, w, h) class Bottleneck(nn.Module): # Standard bottleneck def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): # ch_in, ch_out, shortcut, groups, expansion - super(Bottleneck, self).__init__() + super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = Conv(c1, c_, 1, 1) self.cv2 = Conv(c_, c2, 3, 1, g=g) @@ -107,15 +108,15 @@ def forward(self, x): class BottleneckCSP(nn.Module): # CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion - super(BottleneckCSP, self).__init__() + super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = Conv(c1, c_, 1, 1) self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False) self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False) self.cv4 = Conv(2 * c_, c2, 1, 1) self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3) - self.act = nn.LeakyReLU(0.1, inplace=True) - self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)]) + self.act = nn.SiLU() + self.m = nn.Sequential(*(Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n))) def forward(self, x): y1 = self.cv3(self.m(self.cv1(x))) @@ -126,12 +127,12 @@ def forward(self, x): class C3(nn.Module): # CSP Bottleneck with 3 convolutions def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion - super(C3, self).__init__() + super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = Conv(c1, c_, 1, 1) self.cv2 = Conv(c1, c_, 1, 1) self.cv3 = Conv(2 * c_, c2, 1) # act=FReLU(c2) - self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)]) + self.m = nn.Sequential(*(Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n))) # self.m = nn.Sequential(*[CrossConv(c_, c_, 3, 1, g, 1.0, shortcut) for _ in range(n)]) def forward(self, x): @@ -146,10 +147,26 @@ def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): self.m = TransformerBlock(c_, c_, 4, n) +class C3SPP(C3): + # C3 module with SPP() + def __init__(self, c1, c2, k=(5, 9, 13), n=1, shortcut=True, g=1, e=0.5): + super().__init__(c1, c2, n, shortcut, g, e) + c_ = int(c2 * e) + self.m = SPP(c_, c_, k) + + +class C3Ghost(C3): + # C3 module with GhostBottleneck() + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): + super().__init__(c1, c2, n, shortcut, g, e) + c_ = int(c2 * e) # hidden channels + self.m = nn.Sequential(*(GhostBottleneck(c_, c_) for _ in range(n))) + + class SPP(nn.Module): - # Spatial pyramid pooling layer used in YOLOv3-SPP + # Spatial Pyramid Pooling (SPP) layer https://arxiv.org/abs/1406.4729 def __init__(self, c1, c2, k=(5, 9, 13)): - super(SPP, self).__init__() + super().__init__() c_ = c1 // 2 # hidden channels self.cv1 = Conv(c1, c_, 1, 1) self.cv2 = Conv(c_ * (len(k) + 1), c2, 1, 1) @@ -157,13 +174,33 @@ def __init__(self, c1, c2, k=(5, 9, 13)): def forward(self, x): x = self.cv1(x) - return self.cv2(torch.cat([x] + [m(x) for m in self.m], 1)) + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress torch 1.9.0 max_pool2d() warning + return self.cv2(torch.cat([x] + [m(x) for m in self.m], 1)) + + +class SPPF(nn.Module): + # Spatial Pyramid Pooling - Fast (SPPF) layer for YOLOv5 by Glenn Jocher + def __init__(self, c1, c2, k=5): # equivalent to SPP(k=(5, 9, 13)) + super().__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c_ * 4, c2, 1, 1) + self.m = nn.MaxPool2d(kernel_size=k, stride=1, padding=k // 2) + + def forward(self, x): + x = self.cv1(x) + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress torch 1.9.0 max_pool2d() warning + y1 = self.m(x) + y2 = self.m(y1) + return self.cv2(torch.cat([x, y1, y2, self.m(y2)], 1)) class Focus(nn.Module): # Focus wh information into c-space def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups - super(Focus, self).__init__() + super().__init__() self.conv = Conv(c1 * 4, c2, k, s, p, g, act) # self.contract = Contract(gain=2) @@ -172,6 +209,34 @@ def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2) # return self.conv(self.contract(x)) +class GhostConv(nn.Module): + # Ghost Convolution https://github.com/huawei-noah/ghostnet + def __init__(self, c1, c2, k=1, s=1, g=1, act=True): # ch_in, ch_out, kernel, stride, groups + super().__init__() + c_ = c2 // 2 # hidden channels + self.cv1 = Conv(c1, c_, k, s, None, g, act) + self.cv2 = Conv(c_, c_, 5, 1, None, c_, act) + + def forward(self, x): + y = self.cv1(x) + return torch.cat([y, self.cv2(y)], 1) + + +class GhostBottleneck(nn.Module): + # Ghost Bottleneck https://github.com/huawei-noah/ghostnet + def __init__(self, c1, c2, k=3, s=1): # ch_in, ch_out, kernel, stride + super().__init__() + c_ = c2 // 2 + self.conv = nn.Sequential(GhostConv(c1, c_, 1, 1), # pw + DWConv(c_, c_, k, s, act=False) if s == 2 else nn.Identity(), # dw + GhostConv(c_, c2, 1, 1, act=False)) # pw-linear + self.shortcut = nn.Sequential(DWConv(c1, c1, k, s, act=False), + Conv(c1, c2, 1, 1, act=False)) if s == 2 else nn.Identity() + + def forward(self, x): + return self.conv(x) + self.shortcut(x) + + class Contract(nn.Module): # Contract width-height into channels, i.e. x(1,64,80,80) to x(1,256,40,40) def __init__(self, gain=2): @@ -179,11 +244,11 @@ def __init__(self, gain=2): self.gain = gain def forward(self, x): - N, C, H, W = x.size() # assert (H / s == 0) and (W / s == 0), 'Indivisible gain' + b, c, h, w = x.size() # assert (h / s == 0) and (W / s == 0), 'Indivisible gain' s = self.gain - x = x.view(N, C, H // s, s, W // s, s) # x(1,64,40,2,40,2) + x = x.view(b, c, h // s, s, w // s, s) # x(1,64,40,2,40,2) x = x.permute(0, 3, 5, 1, 2, 4).contiguous() # x(1,2,2,64,40,40) - return x.view(N, C * s * s, H // s, W // s) # x(1,256,40,40) + return x.view(b, c * s * s, h // s, w // s) # x(1,256,40,40) class Expand(nn.Module): @@ -193,67 +258,276 @@ def __init__(self, gain=2): self.gain = gain def forward(self, x): - N, C, H, W = x.size() # assert C / s ** 2 == 0, 'Indivisible gain' + b, c, h, w = x.size() # assert C / s ** 2 == 0, 'Indivisible gain' s = self.gain - x = x.view(N, s, s, C // s ** 2, H, W) # x(1,2,2,16,80,80) + x = x.view(b, s, s, c // s ** 2, h, w) # x(1,2,2,16,80,80) x = x.permute(0, 3, 4, 1, 5, 2).contiguous() # x(1,16,80,2,80,2) - return x.view(N, C // s ** 2, H * s, W * s) # x(1,16,160,160) + return x.view(b, c // s ** 2, h * s, w * s) # x(1,16,160,160) class Concat(nn.Module): # Concatenate a list of tensors along dimension def __init__(self, dimension=1): - super(Concat, self).__init__() + super().__init__() self.d = dimension def forward(self, x): return torch.cat(x, self.d) -class NMS(nn.Module): - # Non-Maximum Suppression (NMS) module - conf = 0.25 # confidence threshold - iou = 0.45 # IoU threshold - classes = None # (optional list) filter by class - max_det = 1000 # maximum number of detections per image - - def __init__(self): - super(NMS, self).__init__() +class DetectMultiBackend(nn.Module): + # YOLOv5 MultiBackend class for python inference on various backends + def __init__(self, weights='yolov5s.pt', model=None, device=torch.device('cpu'), dnn=False, data=None, fp16=False): + # Usage: + # PyTorch: weights = *.pt + # TorchScript: *.torchscript + # ONNX Runtime: *.onnx + # ONNX OpenCV DNN: *.onnx with --dnn + # OpenVINO: *.xml + # CoreML: *.mlmodel + # TensorRT: *.engine + # TensorFlow SavedModel: *_saved_model + # TensorFlow GraphDef: *.pb + # TensorFlow Lite: *.tflite + # TensorFlow Edge TPU: *_edgetpu.tflite + from models.experimental import attempt_download, attempt_load # scoped to avoid circular import - def forward(self, x): - return non_max_suppression(x[0], self.conf, iou_thres=self.iou, classes=self.classes, max_det=self.max_det) + super().__init__() + w = str(weights[0] if isinstance(weights, list) else weights) + pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs = self.model_type(w) # get backend + stride, names = 64, [f'class{i}' for i in range(1000)] # assign defaults + w = attempt_download(w) # download if not local + fp16 &= (pt or jit or onnx or engine) and device.type != 'cpu' # FP16 + if data: # data.yaml path (optional) + with open(data, errors='ignore') as f: + names = yaml.safe_load(f)['names'] # class names + + if pt: # PyTorch + model = model or (attempt_load(weights if isinstance(weights, list) else w, map_location=device)) + stride = max(int(model.stride.max()), 32) # model stride + names = model.module.names if hasattr(model, 'module') else model.names # get class names + model.half() if fp16 else model.float() + self.model = model.model # explicitly assign for to(), cpu(), cuda(), half() + elif jit: # TorchScript + LOGGER.info(f'Loading {w} for TorchScript inference...') + extra_files = {'config.txt': ''} # model metadata + model = torch.jit.load(w, _extra_files=extra_files) + model.half() if fp16 else model.float() + if extra_files['config.txt']: + d = json.loads(extra_files['config.txt']) # extra_files dict + stride, names = int(d['stride']), d['names'] + elif dnn: # ONNX OpenCV DNN + LOGGER.info(f'Loading {w} for ONNX OpenCV DNN inference...') + check_requirements(('opencv-python>=4.5.4',)) + net = cv2.dnn.readNetFromONNX(w) + elif onnx: # ONNX Runtime + LOGGER.info(f'Loading {w} for ONNX Runtime inference...') + cuda = torch.cuda.is_available() + check_requirements(('onnx', 'onnxruntime-gpu' if cuda else 'onnxruntime')) + import onnxruntime + providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] if cuda else ['CPUExecutionProvider'] + session = onnxruntime.InferenceSession(w, providers=providers) + elif xml: # OpenVINO + LOGGER.info(f'Loading {w} for OpenVINO inference...') + check_requirements(('openvino-dev',)) # requires openvino-dev: https://pypi.org/project/openvino-dev/ + import openvino.inference_engine as ie + core = ie.IECore() + if not Path(w).is_file(): # if not *.xml + w = next(Path(w).glob('*.xml')) # get *.xml file from *_openvino_model dir + network = core.read_network(model=w, weights=Path(w).with_suffix('.bin')) # *.xml, *.bin paths + executable_network = core.load_network(network, device_name='CPU', num_requests=1) + elif engine: # TensorRT + LOGGER.info(f'Loading {w} for TensorRT inference...') + import tensorrt as trt # https://developer.nvidia.com/nvidia-tensorrt-download + check_version(trt.__version__, '7.0.0', hard=True) # require tensorrt>=7.0.0 + Binding = namedtuple('Binding', ('name', 'dtype', 'shape', 'data', 'ptr')) + logger = trt.Logger(trt.Logger.INFO) + with open(w, 'rb') as f, trt.Runtime(logger) as runtime: + model = runtime.deserialize_cuda_engine(f.read()) + bindings = OrderedDict() + fp16 = False # default updated below + for index in range(model.num_bindings): + name = model.get_binding_name(index) + dtype = trt.nptype(model.get_binding_dtype(index)) + shape = tuple(model.get_binding_shape(index)) + data = torch.from_numpy(np.empty(shape, dtype=np.dtype(dtype))).to(device) + bindings[name] = Binding(name, dtype, shape, data, int(data.data_ptr())) + if model.binding_is_input(index) and dtype == np.float16: + fp16 = True + binding_addrs = OrderedDict((n, d.ptr) for n, d in bindings.items()) + context = model.create_execution_context() + batch_size = bindings['images'].shape[0] + elif coreml: # CoreML + LOGGER.info(f'Loading {w} for CoreML inference...') + import coremltools as ct + model = ct.models.MLModel(w) + else: # TensorFlow (SavedModel, GraphDef, Lite, Edge TPU) + if saved_model: # SavedModel + LOGGER.info(f'Loading {w} for TensorFlow SavedModel inference...') + import tensorflow as tf + keras = False # assume TF1 saved_model + model = tf.keras.models.load_model(w) if keras else tf.saved_model.load(w) + elif pb: # GraphDef https://www.tensorflow.org/guide/migrate#a_graphpb_or_graphpbtxt + LOGGER.info(f'Loading {w} for TensorFlow GraphDef inference...') + import tensorflow as tf + + def wrap_frozen_graph(gd, inputs, outputs): + x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=""), []) # wrapped + ge = x.graph.as_graph_element + return x.prune(tf.nest.map_structure(ge, inputs), tf.nest.map_structure(ge, outputs)) + + gd = tf.Graph().as_graph_def() # graph_def + gd.ParseFromString(open(w, 'rb').read()) + frozen_func = wrap_frozen_graph(gd, inputs="x:0", outputs="Identity:0") + elif tflite or edgetpu: # https://www.tensorflow.org/lite/guide/python#install_tensorflow_lite_for_python + try: # https://coral.ai/docs/edgetpu/tflite-python/#update-existing-tf-lite-code-for-the-edge-tpu + from tflite_runtime.interpreter import Interpreter, load_delegate + except ImportError: + import tensorflow as tf + Interpreter, load_delegate = tf.lite.Interpreter, tf.lite.experimental.load_delegate, + if edgetpu: # Edge TPU https://coral.ai/software/#edgetpu-runtime + LOGGER.info(f'Loading {w} for TensorFlow Lite Edge TPU inference...') + delegate = {'Linux': 'libedgetpu.so.1', + 'Darwin': 'libedgetpu.1.dylib', + 'Windows': 'edgetpu.dll'}[platform.system()] + interpreter = Interpreter(model_path=w, experimental_delegates=[load_delegate(delegate)]) + else: # Lite + LOGGER.info(f'Loading {w} for TensorFlow Lite inference...') + interpreter = Interpreter(model_path=w) # load TFLite model + interpreter.allocate_tensors() # allocate + input_details = interpreter.get_input_details() # inputs + output_details = interpreter.get_output_details() # outputs + elif tfjs: + raise Exception('ERROR: YOLOv5 TF.js inference is not supported') + self.__dict__.update(locals()) # assign all variables to self + + def forward(self, im, augment=False, visualize=False, val=False): + # YOLOv5 MultiBackend inference + b, ch, h, w = im.shape # batch, channel, height, width + if self.pt or self.jit: # PyTorch + y = self.model(im) if self.jit else self.model(im, augment=augment, visualize=visualize) + return y if val else y[0] + elif self.dnn: # ONNX OpenCV DNN + im = im.cpu().numpy() # torch to numpy + self.net.setInput(im) + y = self.net.forward() + elif self.onnx: # ONNX Runtime + im = im.cpu().numpy() # torch to numpy + y = self.session.run([self.session.get_outputs()[0].name], {self.session.get_inputs()[0].name: im})[0] + elif self.xml: # OpenVINO + im = im.cpu().numpy() # FP32 + desc = self.ie.TensorDesc(precision='FP32', dims=im.shape, layout='NCHW') # Tensor Description + request = self.executable_network.requests[0] # inference request + request.set_blob(blob_name='images', blob=self.ie.Blob(desc, im)) # name=next(iter(request.input_blobs)) + request.infer() + y = request.output_blobs['output'].buffer # name=next(iter(request.output_blobs)) + elif self.engine: # TensorRT + assert im.shape == self.bindings['images'].shape, (im.shape, self.bindings['images'].shape) + self.binding_addrs['images'] = int(im.data_ptr()) + self.context.execute_v2(list(self.binding_addrs.values())) + y = self.bindings['output'].data + elif self.coreml: # CoreML + im = im.permute(0, 2, 3, 1).cpu().numpy() # torch BCHW to numpy BHWC shape(1,320,192,3) + im = Image.fromarray((im[0] * 255).astype('uint8')) + # im = im.resize((192, 320), Image.ANTIALIAS) + y = self.model.predict({'image': im}) # coordinates are xywh normalized + if 'confidence' in y: + box = xywh2xyxy(y['coordinates'] * [[w, h, w, h]]) # xyxy pixels + conf, cls = y['confidence'].max(1), y['confidence'].argmax(1).astype(np.float) + y = np.concatenate((box, conf.reshape(-1, 1), cls.reshape(-1, 1)), 1) + else: + k = 'var_' + str(sorted(int(k.replace('var_', '')) for k in y)[-1]) # output key + y = y[k] # output + else: # TensorFlow (SavedModel, GraphDef, Lite, Edge TPU) + im = im.permute(0, 2, 3, 1).cpu().numpy() # torch BCHW to numpy BHWC shape(1,320,192,3) + if self.saved_model: # SavedModel + y = (self.model(im, training=False) if self.keras else self.model(im)).numpy() + elif self.pb: # GraphDef + y = self.frozen_func(x=self.tf.constant(im)).numpy() + else: # Lite or Edge TPU + input, output = self.input_details[0], self.output_details[0] + int8 = input['dtype'] == np.uint8 # is TFLite quantized uint8 model + if int8: + scale, zero_point = input['quantization'] + im = (im / scale + zero_point).astype(np.uint8) # de-scale + self.interpreter.set_tensor(input['index'], im) + self.interpreter.invoke() + y = self.interpreter.get_tensor(output['index']) + if int8: + scale, zero_point = output['quantization'] + y = (y.astype(np.float32) - zero_point) * scale # re-scale + y[..., :4] *= [w, h, w, h] # xywh normalized to pixels + + if isinstance(y, np.ndarray): + y = torch.tensor(y, device=self.device) + return (y, []) if val else y + + def warmup(self, imgsz=(1, 3, 640, 640)): + # Warmup model by running inference once + if any((self.pt, self.jit, self.onnx, self.engine, self.saved_model, self.pb)): # warmup types + if self.device.type != 'cpu': # only warmup GPU models + im = torch.zeros(*imgsz, dtype=torch.half if self.fp16 else torch.float, device=self.device) # input + for _ in range(2 if self.jit else 1): # + self.forward(im) # warmup + + @staticmethod + def model_type(p='path/to/model.pt'): + # Return model type from model path, i.e. path='path/to/model.onnx' -> type=onnx + from export import export_formats + suffixes = list(export_formats().Suffix) + ['.xml'] # export suffixes + check_suffix(p, suffixes) # checks + p = Path(p).name # eliminate trailing separators + pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs, xml2 = (s in p for s in suffixes) + xml |= xml2 # *_openvino_model or *.xml + tflite &= not edgetpu # *.tflite + return pt, jit, onnx, xml, engine, coreml, saved_model, pb, tflite, edgetpu, tfjs class AutoShape(nn.Module): - # input-robust model wrapper for passing cv2/np/PIL/torch inputs. Includes preprocessing, inference and NMS + # YOLOv5 input-robust model wrapper for passing cv2/np/PIL/torch inputs. Includes preprocessing, inference and NMS conf = 0.25 # NMS confidence threshold iou = 0.45 # NMS IoU threshold - classes = None # (optional list) filter by class + agnostic = False # NMS class-agnostic + multi_label = False # NMS multiple labels per box + classes = None # (optional list) filter by class, i.e. = [0, 15, 16] for COCO persons, cats and dogs max_det = 1000 # maximum number of detections per image + amp = False # Automatic Mixed Precision (AMP) inference def __init__(self, model): - super(AutoShape, self).__init__() + super().__init__() + LOGGER.info('Adding AutoShape... ') + copy_attr(self, model, include=('yaml', 'nc', 'hyp', 'names', 'stride', 'abc'), exclude=()) # copy attributes + self.dmb = isinstance(model, DetectMultiBackend) # DetectMultiBackend() instance + self.pt = not self.dmb or model.pt # PyTorch model self.model = model.eval() - def autoshape(self): - print('AutoShape already enabled, skipping... ') # model already converted to model.autoshape() + def _apply(self, fn): + # Apply to(), cpu(), cuda(), half() to model tensors that are not parameters or registered buffers + self = super()._apply(fn) + if self.pt: + m = self.model.model.model[-1] if self.dmb else self.model.model[-1] # Detect() + m.stride = fn(m.stride) + m.grid = list(map(fn, m.grid)) + if isinstance(m.anchor_grid, list): + m.anchor_grid = list(map(fn, m.anchor_grid)) return self @torch.no_grad() def forward(self, imgs, size=640, augment=False, profile=False): # Inference from various sources. For height=640, width=1280, RGB images example inputs are: - # filename: imgs = 'data/images/zidane.jpg' - # URI: = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/zidane.jpg' + # file: imgs = 'data/images/zidane.jpg' # str or PosixPath + # URI: = 'https://ultralytics.com/images/zidane.jpg' # OpenCV: = cv2.imread('image.jpg')[:,:,::-1] # HWC BGR to RGB x(640,1280,3) - # PIL: = Image.open('image.jpg') # HWC x(640,1280,3) + # PIL: = Image.open('image.jpg') or ImageGrab.grab() # HWC x(640,1280,3) # numpy: = np.zeros((640,1280,3)) # HWC # torch: = torch.zeros(16,3,320,640) # BCHW (scaled to size=640, 0-1 values) # multiple: = [Image.open('image1.jpg'), Image.open('image2.jpg'), ...] # list of images - t = [time_synchronized()] - p = next(self.model.parameters()) # for device and type + t = [time_sync()] + p = next(self.model.parameters()) if self.pt else torch.zeros(1) # for device and type + autocast = self.amp and (p.device.type != 'cpu') # Automatic Mixed Precision (AMP) inference if isinstance(imgs, torch.Tensor): # torch - with amp.autocast(enabled=p.device.type != 'cpu'): + with amp.autocast(enabled=autocast): return self.model(imgs.to(p.device).type_as(p), augment, profile) # inference # Pre-process @@ -261,50 +535,52 @@ def forward(self, imgs, size=640, augment=False, profile=False): shape0, shape1, files = [], [], [] # image and inference shapes, filenames for i, im in enumerate(imgs): f = f'image{i}' # filename - if isinstance(im, str): # filename or uri - im, f = np.asarray(Image.open(requests.get(im, stream=True).raw if im.startswith('http') else im)), im + if isinstance(im, (str, Path)): # filename or uri + im, f = Image.open(requests.get(im, stream=True).raw if str(im).startswith('http') else im), im + im = np.asarray(exif_transpose(im)) elif isinstance(im, Image.Image): # PIL Image - im, f = np.asarray(im), getattr(im, 'filename', f) or f + im, f = np.asarray(exif_transpose(im)), getattr(im, 'filename', f) or f files.append(Path(f).with_suffix('.jpg').name) if im.shape[0] < 5: # image in CHW im = im.transpose((1, 2, 0)) # reverse dataloader .transpose(2, 0, 1) - im = im[:, :, :3] if im.ndim == 3 else np.tile(im[:, :, None], 3) # enforce 3ch input + im = im[..., :3] if im.ndim == 3 else np.tile(im[..., None], 3) # enforce 3ch input s = im.shape[:2] # HWC shape0.append(s) # image shape g = (size / max(s)) # gain shape1.append([y * g for y in s]) imgs[i] = im if im.data.contiguous else np.ascontiguousarray(im) # update - shape1 = [make_divisible(x, int(self.stride.max())) for x in np.stack(shape1, 0).max(0)] # inference shape + shape1 = [make_divisible(x, self.stride) if self.pt else size for x in np.array(shape1).max(0)] # inf shape x = [letterbox(im, new_shape=shape1, auto=False)[0] for im in imgs] # pad - x = np.stack(x, 0) if n > 1 else x[0][None] # stack - x = np.ascontiguousarray(x.transpose((0, 3, 1, 2))) # BHWC to BCHW - x = torch.from_numpy(x).to(p.device).type_as(p) / 255. # uint8 to fp16/32 - t.append(time_synchronized()) + x = np.ascontiguousarray(np.array(x).transpose((0, 3, 1, 2))) # stack and BHWC to BCHW + x = torch.from_numpy(x).to(p.device).type_as(p) / 255 # uint8 to fp16/32 + t.append(time_sync()) - with amp.autocast(enabled=p.device.type != 'cpu'): + with amp.autocast(enabled=autocast): # Inference - y = self.model(x, augment, profile)[0] # forward - t.append(time_synchronized()) + y = self.model(x, augment, profile) # forward + t.append(time_sync()) # Post-process - y = non_max_suppression(y, self.conf, iou_thres=self.iou, classes=self.classes, max_det=self.max_det) # NMS + y = non_max_suppression(y if self.dmb else y[0], self.conf, iou_thres=self.iou, classes=self.classes, + agnostic=self.agnostic, multi_label=self.multi_label, max_det=self.max_det) # NMS for i in range(n): scale_coords(shape1, y[i][:, :4], shape0[i]) - t.append(time_synchronized()) + t.append(time_sync()) return Detections(imgs, y, files, t, self.names, x.shape) class Detections: - # detections class for YOLOv5 inference results - def __init__(self, imgs, pred, files, times=None, names=None, shape=None): - super(Detections, self).__init__() + # YOLOv5 detections class for inference results + def __init__(self, imgs, pred, files, times=(0, 0, 0, 0), names=None, shape=None): + super().__init__() d = pred[0].device # device - gn = [torch.tensor([*[im.shape[i] for i in [1, 0, 1, 0]], 1., 1.], device=d) for im in imgs] # normalizations + gn = [torch.tensor([*(im.shape[i] for i in [1, 0, 1, 0]), 1, 1], device=d) for im in imgs] # normalizations self.imgs = imgs # list of images as numpy arrays self.pred = pred # list of tensors pred[0] = (xyxy, conf, cls) self.names = names # class names self.files = files # image filenames + self.times = times # profiling times self.xyxy = pred # xyxy pixels self.xywh = [xyxy2xywh(x) for x in pred] # xywh pixels self.xyxyn = [x / g for x, g in zip(self.xyxy, gn)] # xyxy normalized @@ -314,47 +590,59 @@ def __init__(self, imgs, pred, files, times=None, names=None, shape=None): self.s = shape # inference BCHW shape def display(self, pprint=False, show=False, save=False, crop=False, render=False, save_dir=Path('')): + crops = [] for i, (im, pred) in enumerate(zip(self.imgs, self.pred)): - str = f'image {i + 1}/{len(self.pred)}: {im.shape[0]}x{im.shape[1]} ' - if pred is not None: + s = f'image {i + 1}/{len(self.pred)}: {im.shape[0]}x{im.shape[1]} ' # string + if pred.shape[0]: for c in pred[:, -1].unique(): n = (pred[:, -1] == c).sum() # detections per class - str += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, " # add to string + s += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, " # add to string if show or save or render or crop: - for *box, conf, cls in pred: # xyxy, confidence, class + annotator = Annotator(im, example=str(self.names)) + for *box, conf, cls in reversed(pred): # xyxy, confidence, class label = f'{self.names[int(cls)]} {conf:.2f}' if crop: - save_one_box(box, im, file=save_dir / 'crops' / self.names[int(cls)] / self.files[i]) + file = save_dir / 'crops' / self.names[int(cls)] / self.files[i] if save else None + crops.append({'box': box, 'conf': conf, 'cls': cls, 'label': label, + 'im': save_one_box(box, im, file=file, save=save)}) else: # all others - plot_one_box(box, im, label=label, color=colors(cls)) + annotator.box_label(box, label, color=colors(cls)) + im = annotator.im + else: + s += '(no detections)' im = Image.fromarray(im.astype(np.uint8)) if isinstance(im, np.ndarray) else im # from np if pprint: - print(str.rstrip(', ')) + LOGGER.info(s.rstrip(', ')) if show: im.show(self.files[i]) # show if save: f = self.files[i] im.save(save_dir / f) # save - print(f"{'Saved' * (i == 0)} {f}", end=',' if i < self.n - 1 else f' to {save_dir}\n') + if i == self.n - 1: + LOGGER.info(f"Saved {self.n} image{'s' * (self.n > 1)} to {colorstr('bold', save_dir)}") if render: self.imgs[i] = np.asarray(im) + if crop: + if save: + LOGGER.info(f'Saved results to {save_dir}\n') + return crops def print(self): self.display(pprint=True) # print results - print(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {tuple(self.s)}' % self.t) + LOGGER.info(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {tuple(self.s)}' % + self.t) def show(self): self.display(show=True) # show results - def save(self, save_dir='runs/hub/exp'): - save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/hub/exp', mkdir=True) # increment save_dir + def save(self, save_dir='runs/detect/exp'): + save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/detect/exp', mkdir=True) # increment save_dir self.display(save=True, save_dir=save_dir) # save results - def crop(self, save_dir='runs/hub/exp'): - save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/hub/exp', mkdir=True) # increment save_dir - self.display(crop=True, save_dir=save_dir) # crop results - print(f'Saved results to {save_dir}\n') + def crop(self, save=True, save_dir='runs/detect/exp'): + save_dir = increment_path(save_dir, exist_ok=save_dir != 'runs/detect/exp', mkdir=True) if save else None + return self.display(crop=True, save=save, save_dir=save_dir) # crop results def render(self): self.display(render=True) # render results @@ -372,10 +660,11 @@ def pandas(self): def tolist(self): # return a list of Detections objects, i.e. 'for result in results.tolist():' - x = [Detections([self.imgs[i]], [self.pred[i]], self.names, self.s) for i in range(self.n)] - for d in x: - for k in ['imgs', 'pred', 'xyxy', 'xyxyn', 'xywh', 'xywhn']: - setattr(d, k, getattr(d, k)[0]) # pop out of list + r = range(self.n) # iterable + x = [Detections([self.imgs[i]], [self.pred[i]], [self.files[i]], self.times, self.names, self.s) for i in r] + # for d in x: + # for k in ['imgs', 'pred', 'xyxy', 'xyxyn', 'xywh', 'xywhn']: + # setattr(d, k, getattr(d, k)[0]) # pop out of list return x def __len__(self): @@ -385,7 +674,7 @@ def __len__(self): class Classify(nn.Module): # Classification head, i.e. x(b,c1,20,20) to x(b,c2) def __init__(self, c1, c2, k=1, s=1, p=None, g=1): # ch_in, ch_out, kernel, stride, padding, groups - super(Classify, self).__init__() + super().__init__() self.aap = nn.AdaptiveAvgPool2d(1) # to x(b,c1,1,1) self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g) # to x(b,c2,1,1) self.flat = nn.Flatten() diff --git a/models/experimental.py b/models/experimental.py index afa787907104..1230f4656c8f 100644 --- a/models/experimental.py +++ b/models/experimental.py @@ -1,18 +1,22 @@ -# YOLOv5 experimental modules +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Experimental modules +""" +import math import numpy as np import torch import torch.nn as nn -from models.common import Conv, DWConv -from utils.google_utils import attempt_download +from models.common import Conv +from utils.downloads import attempt_download class CrossConv(nn.Module): # Cross Convolution Downsample def __init__(self, c1, c2, k=3, s=1, g=1, e=1.0, shortcut=False): # ch_in, ch_out, kernel, stride, groups, expansion, shortcut - super(CrossConv, self).__init__() + super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = Conv(c1, c_, (1, k), (1, s)) self.cv2 = Conv(c_, c2, (k, 1), (s, 1), g=g) @@ -25,11 +29,11 @@ def forward(self, x): class Sum(nn.Module): # Weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 def __init__(self, n, weight=False): # n: number of inputs - super(Sum, self).__init__() + super().__init__() self.weight = weight # apply weights boolean self.iter = range(n - 1) # iter object if weight: - self.w = nn.Parameter(-torch.arange(1., n) / 2, requires_grad=True) # layer weights + self.w = nn.Parameter(-torch.arange(1.0, n) / 2, requires_grad=True) # layer weights def forward(self, x): y = x[0] # no weight @@ -43,89 +47,69 @@ def forward(self, x): return y -class GhostConv(nn.Module): - # Ghost Convolution https://github.com/huawei-noah/ghostnet - def __init__(self, c1, c2, k=1, s=1, g=1, act=True): # ch_in, ch_out, kernel, stride, groups - super(GhostConv, self).__init__() - c_ = c2 // 2 # hidden channels - self.cv1 = Conv(c1, c_, k, s, None, g, act) - self.cv2 = Conv(c_, c_, 5, 1, None, c_, act) - - def forward(self, x): - y = self.cv1(x) - return torch.cat([y, self.cv2(y)], 1) - - -class GhostBottleneck(nn.Module): - # Ghost Bottleneck https://github.com/huawei-noah/ghostnet - def __init__(self, c1, c2, k=3, s=1): # ch_in, ch_out, kernel, stride - super(GhostBottleneck, self).__init__() - c_ = c2 // 2 - self.conv = nn.Sequential(GhostConv(c1, c_, 1, 1), # pw - DWConv(c_, c_, k, s, act=False) if s == 2 else nn.Identity(), # dw - GhostConv(c_, c2, 1, 1, act=False)) # pw-linear - self.shortcut = nn.Sequential(DWConv(c1, c1, k, s, act=False), - Conv(c1, c2, 1, 1, act=False)) if s == 2 else nn.Identity() - - def forward(self, x): - return self.conv(x) + self.shortcut(x) - - class MixConv2d(nn.Module): - # Mixed Depthwise Conv https://arxiv.org/abs/1907.09595 - def __init__(self, c1, c2, k=(1, 3), s=1, equal_ch=True): - super(MixConv2d, self).__init__() - groups = len(k) + # Mixed Depth-wise Conv https://arxiv.org/abs/1907.09595 + def __init__(self, c1, c2, k=(1, 3), s=1, equal_ch=True): # ch_in, ch_out, kernel, stride, ch_strategy + super().__init__() + n = len(k) # number of convolutions if equal_ch: # equal c_ per group - i = torch.linspace(0, groups - 1E-6, c2).floor() # c2 indices - c_ = [(i == g).sum() for g in range(groups)] # intermediate channels + i = torch.linspace(0, n - 1E-6, c2).floor() # c2 indices + c_ = [(i == g).sum() for g in range(n)] # intermediate channels else: # equal weight.numel() per group - b = [c2] + [0] * groups - a = np.eye(groups + 1, groups, k=-1) + b = [c2] + [0] * n + a = np.eye(n + 1, n, k=-1) a -= np.roll(a, 1, axis=1) a *= np.array(k) ** 2 a[0] = 1 c_ = np.linalg.lstsq(a, b, rcond=None)[0].round() # solve for equal weight indices, ax = b - self.m = nn.ModuleList([nn.Conv2d(c1, int(c_[g]), k[g], s, k[g] // 2, bias=False) for g in range(groups)]) + self.m = nn.ModuleList( + [nn.Conv2d(c1, int(c_), k, s, k // 2, groups=math.gcd(c1, int(c_)), bias=False) for k, c_ in zip(k, c_)]) self.bn = nn.BatchNorm2d(c2) - self.act = nn.LeakyReLU(0.1, inplace=True) + self.act = nn.SiLU() def forward(self, x): - return x + self.act(self.bn(torch.cat([m(x) for m in self.m], 1))) + return self.act(self.bn(torch.cat([m(x) for m in self.m], 1))) class Ensemble(nn.ModuleList): # Ensemble of models def __init__(self): - super(Ensemble, self).__init__() + super().__init__() - def forward(self, x, augment=False): + def forward(self, x, augment=False, profile=False, visualize=False): y = [] for module in self: - y.append(module(x, augment)[0]) + y.append(module(x, augment, profile, visualize)[0]) # y = torch.stack(y).max(0)[0] # max ensemble # y = torch.stack(y).mean(0) # mean ensemble y = torch.cat(y, 1) # nms ensemble return y, None # inference, train output -def attempt_load(weights, map_location=None, inplace=True): +def attempt_load(weights, map_location=None, inplace=True, fuse=True): from models.yolo import Detect, Model # Loads an ensemble of models weights=[a,b,c] or a single model weights=[a] or weights=a model = Ensemble() for w in weights if isinstance(weights, list) else [weights]: - attempt_download(w) - ckpt = torch.load(w, map_location=map_location) # load - model.append(ckpt['ema' if ckpt.get('ema') else 'model'].float().fuse().eval()) # FP32 model + ckpt = torch.load(attempt_download(w), map_location=map_location) # load + ckpt = (ckpt.get('ema') or ckpt['model']).float() # FP32 model + model.append(ckpt.fuse().eval() if fuse else ckpt.eval()) # fused or un-fused model in eval mode # Compatibility updates for m in model.modules(): - if type(m) in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6, nn.SiLU, Detect, Model]: - m.inplace = inplace # pytorch 1.7.0 compatibility - elif type(m) is Conv: - m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility + t = type(m) + if t in (nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6, nn.SiLU, Detect, Model): + m.inplace = inplace # torch 1.7.0 compatibility + if t is Detect: + if not isinstance(m.anchor_grid, list): # new Detect Layer compatibility + delattr(m, 'anchor_grid') + setattr(m, 'anchor_grid', [torch.zeros(1)] * m.nl) + elif t is Conv: + m._non_persistent_buffers_set = set() # torch 1.6.0 compatibility + elif t is nn.Upsample and not hasattr(m, 'recompute_scale_factor'): + m.recompute_scale_factor = None # torch 1.11.0 compatibility if len(model) == 1: return model[-1] # return model diff --git a/models/export.py b/models/export.py deleted file mode 100644 index 6043c84c6246..000000000000 --- a/models/export.py +++ /dev/null @@ -1,272 +0,0 @@ -"""Exports a YOLOv5 *.pt model to TorchScript, ONNX, CoreML formats - -Usage: - $ python path/to/models/export.py --weights yolov5s.pt --img 640 --batch 1 -""" - -import argparse -from copy import deepcopy -from pathlib import Path -import sys -import time -import os - -sys.path.append('./') # to run '$ python *.py' files in subdirectories - -import torch -import torch.nn as nn -from torch.utils.mobile_optimizer import optimize_for_mobile - -from sparseml.pytorch.utils import ModuleExporter -from sparseml.pytorch.sparsification.quantization import skip_onnx_input_quantize - -import models -from models.experimental import attempt_load -from models.yolo import Model -from utils.activations import Hardswish, SiLU -from utils.general import colorstr, check_img_size, check_requirements, file_size, set_logging -from utils.google_utils import attempt_download -from utils.sparse import SparseMLWrapper, check_download_sparsezoo_weights -from utils.torch_utils import select_device, intersect_dicts, is_parallel, torch_distributed_zero_first - - -def create_checkpoint(epoch, model, optimizer, ema, sparseml_wrapper, **kwargs): - pickle = not sparseml_wrapper.qat_active(epoch) # qat does not support pickled exports - ckpt_model = deepcopy(model.module if is_parallel(model) else model).float() - yaml = ckpt_model.yaml - if not pickle: - ckpt_model = ckpt_model.state_dict() - - return {'epoch': epoch, - 'model': ckpt_model, - 'optimizer': optimizer.state_dict(), - 'yaml': yaml, - 'hyp': model.hyp, - **ema.state_dict(pickle), - **sparseml_wrapper.state_dict(), - **kwargs} - - -def load_checkpoint(type_, weights, device, cfg=None, hyp=None, nc=None, recipe=None, resume=None, rank=-1): - with torch_distributed_zero_first(rank): - attempt_download(weights) # download if not found locally - weights = check_download_sparsezoo_weights(weights) # download from sparsezoo if zoo stub - ckpt = torch.load(weights[0] if isinstance(weights, list) or isinstance(weights, tuple) - else weights, map_location=device) # load checkpoint - start_epoch = ckpt['epoch'] + 1 if 'epoch' in ckpt else 0 - pickled = isinstance(ckpt['model'], nn.Module) - train_type = type_ == 'train' - ensemble_type = type_ == 'ensemble' - - if pickled and ensemble_type: - # load ensemble using pickled - cfg = None - model = attempt_load(weights, map_location=device) # load FP32 model - state_dict = model.state_dict() - else: - # load model from config and weights - cfg = cfg or (ckpt['yaml'] if 'yaml' in ckpt else None) or \ - (ckpt['model'].yaml if pickled else None) - model = Model(cfg, ch=3, nc=ckpt['nc'] if ('nc' in ckpt and not nc) else nc, - anchors=hyp.get('anchors') if hyp else None).to(device) - model_key = 'ema' if (not train_type and 'ema' in ckpt and ckpt['ema']) else 'model' - state_dict = ckpt[model_key].float().state_dict() if pickled else ckpt[model_key] - - # turn gradients for params back on in case they were removed - for p in model.parameters(): - p.requires_grad = True - - # load sparseml recipe for applying pruning and quantization - recipe = recipe or (ckpt['recipe'] if 'recipe' in ckpt else None) - sparseml_wrapper = SparseMLWrapper(model, recipe) - exclude_anchors = train_type and (cfg or hyp.get('anchors')) and not resume - loaded = False - - if not train_type: - # apply the recipe to create the final state of the model when not training - sparseml_wrapper.apply() - else: - # intialize the recipe for training and restore the weights before if no quantized weights - quantized_state_dict = any([name.endswith('.zero_point') for name in state_dict.keys()]) - if not quantized_state_dict: - state_dict = load_state_dict(model, state_dict, train=True, exclude_anchors=exclude_anchors) - loaded = True - sparseml_wrapper.initialize(start_epoch) - - if not loaded: - state_dict = load_state_dict(model, state_dict, train=train_type, exclude_anchors=exclude_anchors) - - model.float() - report = 'Transferred %g/%g items from %s' % (len(state_dict), len(model.state_dict()), weights) - - return model, { - 'ckpt': ckpt, - 'state_dict': state_dict, - 'start_epoch': start_epoch, - 'sparseml_wrapper': sparseml_wrapper, - 'report': report, - } - - -def load_state_dict(model, state_dict, train, exclude_anchors): - # fix older state_dict names not porting to the new model setup - state_dict = {key if not key.startswith("module.") else key[7:]: val for key, val in state_dict.items()} - - if train: - # load any missing weights from the model - state_dict = intersect_dicts(state_dict, model.state_dict(), exclude=['anchor'] if exclude_anchors else []) - - model.load_state_dict(state_dict, strict=not train) # load - - return state_dict - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--weights', type=str, default='./yolov3.pt', help='weights path') # from yolov3/models/ - parser.add_argument('--img-size', nargs='+', type=int, default=[640, 640], help='image size') # height, width - parser.add_argument('--batch-size', type=int, default=1, help='batch size') - parser.add_argument('--device', default='cpu', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') - parser.add_argument('--include', nargs='+', default=['torchscript', 'onnx', 'coreml'], help='include formats') - parser.add_argument('--half', action='store_true', help='FP16 half-precision export') - parser.add_argument('--inplace', action='store_true', help='set YOLOv5 Detect() inplace=True') - parser.add_argument('--train', action='store_true', help='model.train() mode') - parser.add_argument('--optimize', action='store_true', help='optimize TorchScript for mobile') # TorchScript-only - parser.add_argument('--dynamic', action='store_true', help='dynamic ONNX axes') # ONNX-only - parser.add_argument('--simplify', action='store_true', help='simplify ONNX model') # ONNX-only - parser.add_argument('--opset-version', type=int, default=12, help='ONNX opset version') # ONNX-only - parser.add_argument("--remove-grid", action="store_true", help="remove export of Detect() layer grid") - opt = parser.parse_args() - opt.img_size *= 2 if len(opt.img_size) == 1 else 1 # expand - opt.include = [x.lower() for x in opt.include] - print(opt) - set_logging() - t = time.time() - - # Load PyTorch model - device = select_device(opt.device) - model, extras = load_checkpoint('ensemble', opt.weights, device) # load FP32 model - sparseml_wrapper = extras['sparseml_wrapper'] - labels = model.names - - # Checks - gs = int(max(model.stride)) # grid size (max stride) - opt.img_size = [check_img_size(x, gs) for x in opt.img_size] # verify img_size are gs-multiples - assert not (opt.device.lower() == 'cpu' and opt.half), '--half only compatible with GPU export, i.e. use --device 0' - - # Input - img = torch.zeros(opt.batch_size, 3, *opt.img_size).to(device) # image size(1,3,320,192) iDetection - - # Update model - if opt.half: - img, model = img.half(), model.half() # to FP16 - if opt.train: - model.train() # training mode (no grid construction in Detect layer) - else: - model.eval() - for k, m in model.named_modules(): - m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility - if isinstance(m, models.common.Conv): # assign export-friendly activations - if isinstance(m.act, nn.Hardswish): - m.act = Hardswish() - elif isinstance(m.act, nn.SiLU): - m.act = SiLU() - elif isinstance(m, models.yolo.Detect): - m.inplace = opt.inplace - m.onnx_dynamic = opt.dynamic - # m.forward = m.forward_export # assign forward (optional) - model.model[-1].export = not opt.remove_grid # set Detect() layer grid export - - for _ in range(2): - y = model(img) # dry runs - print(f"\n{colorstr('PyTorch:')} starting from {opt.weights} ({file_size(opt.weights):.1f} MB)") - - # TorchScript export ----------------------------------------------------------------------------------------------- - if 'torchscript' in opt.include or 'coreml' in opt.include: - prefix = colorstr('TorchScript:') - try: - print(f'\n{prefix} starting export with torch {torch.__version__}...') - f = opt.weights.replace('.pt', '.torchscript.pt') # filename - ts = torch.jit.trace(model, img, strict=False) - (optimize_for_mobile(ts) if opt.optimize else ts).save(f) - print(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') - except Exception as e: - print(f'{prefix} export failure: {e}') - - # ONNX export ------------------------------------------------------------------------------------------------------ - if 'onnx' in opt.include: - prefix = colorstr('ONNX:') - try: - import onnx - - print(f'{prefix} starting export with onnx {onnx.__version__}...') - f = opt.weights.replace('.pt', '.onnx') # filename - # export through SparseML so quantized and pruned graphs can be corrected - save_dir = Path(f).parent.absolute() - save_name = f.split(os.path.sep)[-1] - - # get the number of outputs so we know how to name and change dynamic axes - # nested outputs can be returned if model is exported with dynamic - def _count_outputs(outputs): - count = 0 - if isinstance(outputs, list) or isinstance(outputs, tuple): - for out in outputs: - count += _count_outputs(out) - else: - count += 1 - return count - - outputs = model(img) - num_outputs = _count_outputs(outputs) - input_names = ['input'] - output_names = [f'out_{i}' for i in range(num_outputs)] - dynamic_axes = {k: {0: 'batch'} for k in (input_names + output_names)} if opt.dynamic else None - exporter = ModuleExporter(model, save_dir) - exporter.export_onnx(img, name=save_name, convert_qat=True, - input_names=input_names, output_names=output_names, dynamic_axes=dynamic_axes) - try: - skip_onnx_input_quantize(f, f) - except: - pass - - # Checks - model_onnx = onnx.load(f) # load onnx model - onnx.checker.check_model(model_onnx) # check onnx model - # print(onnx.helper.printable_graph(model_onnx.graph)) # print - - # Simplify - if opt.simplify: - try: - check_requirements(['onnx-simplifier']) - import onnxsim - - print(f'{prefix} simplifying with onnx-simplifier {onnxsim.__version__}...') - model_onnx, check = onnxsim.simplify( - model_onnx, - dynamic_input_shape=opt.dynamic, - input_shapes={'images': list(img.shape)} if opt.dynamic else None) - assert check, 'assert check failed' - onnx.save(model_onnx, f) - except Exception as e: - print(f'{prefix} simplifier failure: {e}') - print(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') - except Exception as e: - print(f'{prefix} export failure: {e}') - - # CoreML export ---------------------------------------------------------------------------------------------------- - if 'coreml' in opt.include: - prefix = colorstr('CoreML:') - try: - import coremltools as ct - - print(f'{prefix} starting export with coremltools {ct.__version__}...') - assert opt.train, 'CoreML exports should be placed in model.train() mode with `python export.py --train`' - model = ct.convert(ts, inputs=[ct.ImageType('image', shape=img.shape, scale=1 / 255.0, bias=[0, 0, 0])]) - f = opt.weights.replace('.pt', '.mlmodel') # filename - model.save(f) - print(f'{prefix} export success, saved as {f} ({file_size(f):.1f} MB)') - except Exception as e: - print(f'{prefix} export failure: {e}') - - # Finish - print('\nExport complete (%.2fs). Visualize with https://github.com/lutzroeder/netron.' % (time.time() - t)) diff --git a/models/hub/anchors.yaml b/models/hub/anchors.yaml index a07a4dc72387..e4d7beb06e07 100644 --- a/models/hub/anchors.yaml +++ b/models/hub/anchors.yaml @@ -1,58 +1,59 @@ -# Default YOLOv5 anchors for COCO data +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +# Default anchors for COCO data # P5 ------------------------------------------------------------------------------------------------------------------- # P5-640: anchors_p5_640: - - [ 10,13, 16,30, 33,23 ] # P3/8 - - [ 30,61, 62,45, 59,119 ] # P4/16 - - [ 116,90, 156,198, 373,326 ] # P5/32 + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 # P6 ------------------------------------------------------------------------------------------------------------------- # P6-640: thr=0.25: 0.9964 BPR, 5.54 anchors past thr, n=12, img_size=640, metric_all=0.281/0.716-mean/best, past_thr=0.469-mean: 9,11, 21,19, 17,41, 43,32, 39,70, 86,64, 65,131, 134,130, 120,265, 282,180, 247,354, 512,387 anchors_p6_640: - - [ 9,11, 21,19, 17,41 ] # P3/8 - - [ 43,32, 39,70, 86,64 ] # P4/16 - - [ 65,131, 134,130, 120,265 ] # P5/32 - - [ 282,180, 247,354, 512,387 ] # P6/64 + - [9,11, 21,19, 17,41] # P3/8 + - [43,32, 39,70, 86,64] # P4/16 + - [65,131, 134,130, 120,265] # P5/32 + - [282,180, 247,354, 512,387] # P6/64 # P6-1280: thr=0.25: 0.9950 BPR, 5.55 anchors past thr, n=12, img_size=1280, metric_all=0.281/0.714-mean/best, past_thr=0.468-mean: 19,27, 44,40, 38,94, 96,68, 86,152, 180,137, 140,301, 303,264, 238,542, 436,615, 739,380, 925,792 anchors_p6_1280: - - [ 19,27, 44,40, 38,94 ] # P3/8 - - [ 96,68, 86,152, 180,137 ] # P4/16 - - [ 140,301, 303,264, 238,542 ] # P5/32 - - [ 436,615, 739,380, 925,792 ] # P6/64 + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 # P6-1920: thr=0.25: 0.9950 BPR, 5.55 anchors past thr, n=12, img_size=1920, metric_all=0.281/0.714-mean/best, past_thr=0.468-mean: 28,41, 67,59, 57,141, 144,103, 129,227, 270,205, 209,452, 455,396, 358,812, 653,922, 1109,570, 1387,1187 anchors_p6_1920: - - [ 28,41, 67,59, 57,141 ] # P3/8 - - [ 144,103, 129,227, 270,205 ] # P4/16 - - [ 209,452, 455,396, 358,812 ] # P5/32 - - [ 653,922, 1109,570, 1387,1187 ] # P6/64 + - [28,41, 67,59, 57,141] # P3/8 + - [144,103, 129,227, 270,205] # P4/16 + - [209,452, 455,396, 358,812] # P5/32 + - [653,922, 1109,570, 1387,1187] # P6/64 # P7 ------------------------------------------------------------------------------------------------------------------- # P7-640: thr=0.25: 0.9962 BPR, 6.76 anchors past thr, n=15, img_size=640, metric_all=0.275/0.733-mean/best, past_thr=0.466-mean: 11,11, 13,30, 29,20, 30,46, 61,38, 39,92, 78,80, 146,66, 79,163, 149,150, 321,143, 157,303, 257,402, 359,290, 524,372 anchors_p7_640: - - [ 11,11, 13,30, 29,20 ] # P3/8 - - [ 30,46, 61,38, 39,92 ] # P4/16 - - [ 78,80, 146,66, 79,163 ] # P5/32 - - [ 149,150, 321,143, 157,303 ] # P6/64 - - [ 257,402, 359,290, 524,372 ] # P7/128 + - [11,11, 13,30, 29,20] # P3/8 + - [30,46, 61,38, 39,92] # P4/16 + - [78,80, 146,66, 79,163] # P5/32 + - [149,150, 321,143, 157,303] # P6/64 + - [257,402, 359,290, 524,372] # P7/128 # P7-1280: thr=0.25: 0.9968 BPR, 6.71 anchors past thr, n=15, img_size=1280, metric_all=0.273/0.732-mean/best, past_thr=0.463-mean: 19,22, 54,36, 32,77, 70,83, 138,71, 75,173, 165,159, 148,334, 375,151, 334,317, 251,626, 499,474, 750,326, 534,814, 1079,818 anchors_p7_1280: - - [ 19,22, 54,36, 32,77 ] # P3/8 - - [ 70,83, 138,71, 75,173 ] # P4/16 - - [ 165,159, 148,334, 375,151 ] # P5/32 - - [ 334,317, 251,626, 499,474 ] # P6/64 - - [ 750,326, 534,814, 1079,818 ] # P7/128 + - [19,22, 54,36, 32,77] # P3/8 + - [70,83, 138,71, 75,173] # P4/16 + - [165,159, 148,334, 375,151] # P5/32 + - [334,317, 251,626, 499,474] # P6/64 + - [750,326, 534,814, 1079,818] # P7/128 # P7-1920: thr=0.25: 0.9968 BPR, 6.71 anchors past thr, n=15, img_size=1920, metric_all=0.273/0.732-mean/best, past_thr=0.463-mean: 29,34, 81,55, 47,115, 105,124, 207,107, 113,259, 247,238, 222,500, 563,227, 501,476, 376,939, 749,711, 1126,489, 801,1222, 1618,1227 anchors_p7_1920: - - [ 29,34, 81,55, 47,115 ] # P3/8 - - [ 105,124, 207,107, 113,259 ] # P4/16 - - [ 247,238, 222,500, 563,227 ] # P5/32 - - [ 501,476, 376,939, 749,711 ] # P6/64 - - [ 1126,489, 801,1222, 1618,1227 ] # P7/128 + - [29,34, 81,55, 47,115] # P3/8 + - [105,124, 207,107, 113,259] # P4/16 + - [247,238, 222,500, 563,227] # P5/32 + - [501,476, 376,939, 749,711] # P6/64 + - [1126,489, 801,1222, 1618,1227] # P7/128 diff --git a/models/hub/yolov3-spp.yaml b/models/hub/yolov3-spp.yaml index 38dcc449f0d0..c66982158ce8 100644 --- a/models/hub/yolov3-spp.yaml +++ b/models/hub/yolov3-spp.yaml @@ -1,9 +1,9 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 diff --git a/models/hub/yolov3-tiny.yaml b/models/hub/yolov3-tiny.yaml index ff7638cad3be..b28b44315248 100644 --- a/models/hub/yolov3-tiny.yaml +++ b/models/hub/yolov3-tiny.yaml @@ -1,9 +1,9 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple - -# anchors anchors: - [10,14, 23,27, 37,58] # P4/16 - [81,82, 135,169, 344,319] # P5/32 diff --git a/models/hub/yolov3.yaml b/models/hub/yolov3.yaml index f2e761355469..d1ef91290a8d 100644 --- a/models/hub/yolov3.yaml +++ b/models/hub/yolov3.yaml @@ -1,9 +1,9 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 @@ -28,7 +28,7 @@ backbone: # YOLOv3 head head: [[-1, 1, Bottleneck, [1024, False]], - [-1, 1, Conv, [512, [1, 1]]], + [-1, 1, Conv, [512, 1, 1]], [-1, 1, Conv, [1024, 3, 1]], [-1, 1, Conv, [512, 1, 1]], [-1, 1, Conv, [1024, 3, 1]], # 15 (P5/32-large) diff --git a/models/hub/yolov5-bifpn.yaml b/models/hub/yolov5-bifpn.yaml new file mode 100644 index 000000000000..504815f5cfa0 --- /dev/null +++ b/models/hub/yolov5-bifpn.yaml @@ -0,0 +1,48 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 BiFPN head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14, 6], 1, Concat, [1]], # cat P4 <--- BiFPN change + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models/hub/yolov5-fpn.yaml b/models/hub/yolov5-fpn.yaml index e772bffecbbc..a23e9c6fbf9f 100644 --- a/models/hub/yolov5-fpn.yaml +++ b/models/hub/yolov5-fpn.yaml @@ -1,42 +1,42 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 - [116,90, 156,198, 373,326] # P5/32 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 - [-1, 3, Bottleneck, [128]], + [-1, 3, C3, [128]], [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 - [-1, 9, BottleneckCSP, [256]], + [-1, 6, C3, [256]], [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 - [-1, 9, BottleneckCSP, [512]], + [-1, 9, C3, [512]], [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 - [-1, 1, SPP, [1024, [5, 9, 13]]], - [-1, 6, BottleneckCSP, [1024]], # 9 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 ] -# YOLOv5 FPN head +# YOLOv5 v6.0 FPN head head: - [[-1, 3, BottleneckCSP, [1024, False]], # 10 (P5/32-large) + [[-1, 3, C3, [1024, False]], # 10 (P5/32-large) [-1, 1, nn.Upsample, [None, 2, 'nearest']], [[-1, 6], 1, Concat, [1]], # cat backbone P4 [-1, 1, Conv, [512, 1, 1]], - [-1, 3, BottleneckCSP, [512, False]], # 14 (P4/16-medium) + [-1, 3, C3, [512, False]], # 14 (P4/16-medium) [-1, 1, nn.Upsample, [None, 2, 'nearest']], [[-1, 4], 1, Concat, [1]], # cat backbone P3 [-1, 1, Conv, [256, 1, 1]], - [-1, 3, BottleneckCSP, [256, False]], # 18 (P3/8-small) + [-1, 3, C3, [256, False]], # 18 (P3/8-small) [[18, 14, 10], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) ] diff --git a/models/hub/yolov5-p2.yaml b/models/hub/yolov5-p2.yaml index 0633a90fd065..554117dda59a 100644 --- a/models/hub/yolov5-p2.yaml +++ b/models/hub/yolov5-p2.yaml @@ -1,54 +1,54 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple +anchors: 3 # AutoAnchor evolves 3 anchors per P output layer -# anchors -anchors: 3 - -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [ [ -1, 1, Focus, [ 64, 3 ] ], # 0-P1/2 - [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 - [ -1, 3, C3, [ 128 ] ], - [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 - [ -1, 9, C3, [ 256 ] ], - [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 - [ -1, 9, C3, [ 512 ] ], - [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 7-P5/32 - [ -1, 1, SPP, [ 1024, [ 5, 9, 13 ] ] ], - [ -1, 3, C3, [ 1024, False ] ], # 9 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 ] -# YOLOv5 head +# YOLOv5 v6.0 head with (P2, P3, P4, P5) outputs head: - [ [ -1, 1, Conv, [ 512, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 - [ -1, 3, C3, [ 512, False ] ], # 13 - - [ -1, 1, Conv, [ 256, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 - [ -1, 3, C3, [ 256, False ] ], # 17 (P3/8-small) - - [ -1, 1, Conv, [ 128, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 2 ], 1, Concat, [ 1 ] ], # cat backbone P2 - [ -1, 1, C3, [ 128, False ] ], # 21 (P2/4-xsmall) - - [ -1, 1, Conv, [ 128, 3, 2 ] ], - [ [ -1, 18 ], 1, Concat, [ 1 ] ], # cat head P3 - [ -1, 3, C3, [ 256, False ] ], # 24 (P3/8-small) - - [ -1, 1, Conv, [ 256, 3, 2 ] ], - [ [ -1, 14 ], 1, Concat, [ 1 ] ], # cat head P4 - [ -1, 3, C3, [ 512, False ] ], # 27 (P4/16-medium) - - [ -1, 1, Conv, [ 512, 3, 2 ] ], - [ [ -1, 10 ], 1, Concat, [ 1 ] ], # cat head P5 - [ -1, 3, C3, [ 1024, False ] ], # 30 (P5/32-large) - - [ [ 24, 27, 30 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4, P5) + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [128, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 2], 1, Concat, [1]], # cat backbone P2 + [-1, 1, C3, [128, False]], # 21 (P2/4-xsmall) + + [-1, 1, Conv, [128, 3, 2]], + [[-1, 18], 1, Concat, [1]], # cat head P3 + [-1, 3, C3, [256, False]], # 24 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 27 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 30 (P5/32-large) + + [[21, 24, 27, 30], 1, Detect, [nc, anchors]], # Detect(P2, P3, P4, P5) ] diff --git a/models/hub/yolov5-p34.yaml b/models/hub/yolov5-p34.yaml new file mode 100644 index 000000000000..dbf0f850083e --- /dev/null +++ b/models/hub/yolov5-p34.yaml @@ -0,0 +1,41 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple +anchors: 3 # AutoAnchor evolves 3 anchors per P output layer + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [ [ -1, 1, Conv, [ 64, 6, 2, 2 ] ], # 0-P1/2 + [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 + [ -1, 3, C3, [ 128 ] ], + [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 + [ -1, 6, C3, [ 256 ] ], + [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 + [ -1, 9, C3, [ 512 ] ], + [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 7-P5/32 + [ -1, 3, C3, [ 1024 ] ], + [ -1, 1, SPPF, [ 1024, 5 ] ], # 9 + ] + +# YOLOv5 v6.0 head with (P3, P4) outputs +head: + [ [ -1, 1, Conv, [ 512, 1, 1 ] ], + [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], + [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 + [ -1, 3, C3, [ 512, False ] ], # 13 + + [ -1, 1, Conv, [ 256, 1, 1 ] ], + [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], + [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 + [ -1, 3, C3, [ 256, False ] ], # 17 (P3/8-small) + + [ -1, 1, Conv, [ 256, 3, 2 ] ], + [ [ -1, 14 ], 1, Concat, [ 1 ] ], # cat head P4 + [ -1, 3, C3, [ 512, False ] ], # 20 (P4/16-medium) + + [ [ 17, 20 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4) + ] diff --git a/models/hub/yolov5-p6.yaml b/models/hub/yolov5-p6.yaml index 3728a118f090..a17202f22044 100644 --- a/models/hub/yolov5-p6.yaml +++ b/models/hub/yolov5-p6.yaml @@ -1,56 +1,56 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple +anchors: 3 # AutoAnchor evolves 3 anchors per P output layer -# anchors -anchors: 3 - -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [ [ -1, 1, Focus, [ 64, 3 ] ], # 0-P1/2 - [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 - [ -1, 3, C3, [ 128 ] ], - [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 - [ -1, 9, C3, [ 256 ] ], - [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 - [ -1, 9, C3, [ 512 ] ], - [ -1, 1, Conv, [ 768, 3, 2 ] ], # 7-P5/32 - [ -1, 3, C3, [ 768 ] ], - [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 9-P6/64 - [ -1, 1, SPP, [ 1024, [ 3, 5, 7 ] ] ], - [ -1, 3, C3, [ 1024, False ] ], # 11 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 ] -# YOLOv5 head +# YOLOv5 v6.0 head with (P3, P4, P5, P6) outputs head: - [ [ -1, 1, Conv, [ 768, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 8 ], 1, Concat, [ 1 ] ], # cat backbone P5 - [ -1, 3, C3, [ 768, False ] ], # 15 - - [ -1, 1, Conv, [ 512, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 - [ -1, 3, C3, [ 512, False ] ], # 19 - - [ -1, 1, Conv, [ 256, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 - [ -1, 3, C3, [ 256, False ] ], # 23 (P3/8-small) - - [ -1, 1, Conv, [ 256, 3, 2 ] ], - [ [ -1, 20 ], 1, Concat, [ 1 ] ], # cat head P4 - [ -1, 3, C3, [ 512, False ] ], # 26 (P4/16-medium) - - [ -1, 1, Conv, [ 512, 3, 2 ] ], - [ [ -1, 16 ], 1, Concat, [ 1 ] ], # cat head P5 - [ -1, 3, C3, [ 768, False ] ], # 29 (P5/32-large) - - [ -1, 1, Conv, [ 768, 3, 2 ] ], - [ [ -1, 12 ], 1, Concat, [ 1 ] ], # cat head P6 - [ -1, 3, C3, [ 1024, False ] ], # 32 (P5/64-xlarge) - - [ [ 23, 26, 29, 32 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4, P5, P6) + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) ] diff --git a/models/hub/yolov5-p7.yaml b/models/hub/yolov5-p7.yaml index ca8f8492ce0e..edd7d13a34a6 100644 --- a/models/hub/yolov5-p7.yaml +++ b/models/hub/yolov5-p7.yaml @@ -1,67 +1,67 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple +anchors: 3 # AutoAnchor evolves 3 anchors per P output layer -# anchors -anchors: 3 - -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [ [ -1, 1, Focus, [ 64, 3 ] ], # 0-P1/2 - [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 - [ -1, 3, C3, [ 128 ] ], - [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 - [ -1, 9, C3, [ 256 ] ], - [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 - [ -1, 9, C3, [ 512 ] ], - [ -1, 1, Conv, [ 768, 3, 2 ] ], # 7-P5/32 - [ -1, 3, C3, [ 768 ] ], - [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 9-P6/64 - [ -1, 3, C3, [ 1024 ] ], - [ -1, 1, Conv, [ 1280, 3, 2 ] ], # 11-P7/128 - [ -1, 1, SPP, [ 1280, [ 3, 5 ] ] ], - [ -1, 3, C3, [ 1280, False ] ], # 13 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, Conv, [1280, 3, 2]], # 11-P7/128 + [-1, 3, C3, [1280]], + [-1, 1, SPPF, [1280, 5]], # 13 ] -# YOLOv5 head +# YOLOv5 v6.0 head with (P3, P4, P5, P6, P7) outputs head: - [ [ -1, 1, Conv, [ 1024, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 10 ], 1, Concat, [ 1 ] ], # cat backbone P6 - [ -1, 3, C3, [ 1024, False ] ], # 17 + [[-1, 1, Conv, [1024, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 10], 1, Concat, [1]], # cat backbone P6 + [-1, 3, C3, [1024, False]], # 17 - [ -1, 1, Conv, [ 768, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 8 ], 1, Concat, [ 1 ] ], # cat backbone P5 - [ -1, 3, C3, [ 768, False ] ], # 21 + [-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 21 - [ -1, 1, Conv, [ 512, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 - [ -1, 3, C3, [ 512, False ] ], # 25 + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 25 - [ -1, 1, Conv, [ 256, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 - [ -1, 3, C3, [ 256, False ] ], # 29 (P3/8-small) + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 29 (P3/8-small) - [ -1, 1, Conv, [ 256, 3, 2 ] ], - [ [ -1, 26 ], 1, Concat, [ 1 ] ], # cat head P4 - [ -1, 3, C3, [ 512, False ] ], # 32 (P4/16-medium) + [-1, 1, Conv, [256, 3, 2]], + [[-1, 26], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 32 (P4/16-medium) - [ -1, 1, Conv, [ 512, 3, 2 ] ], - [ [ -1, 22 ], 1, Concat, [ 1 ] ], # cat head P5 - [ -1, 3, C3, [ 768, False ] ], # 35 (P5/32-large) + [-1, 1, Conv, [512, 3, 2]], + [[-1, 22], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 35 (P5/32-large) - [ -1, 1, Conv, [ 768, 3, 2 ] ], - [ [ -1, 18 ], 1, Concat, [ 1 ] ], # cat head P6 - [ -1, 3, C3, [ 1024, False ] ], # 38 (P6/64-xlarge) + [-1, 1, Conv, [768, 3, 2]], + [[-1, 18], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 38 (P6/64-xlarge) - [ -1, 1, Conv, [ 1024, 3, 2 ] ], - [ [ -1, 14 ], 1, Concat, [ 1 ] ], # cat head P7 - [ -1, 3, C3, [ 1280, False ] ], # 41 (P7/128-xxlarge) + [-1, 1, Conv, [1024, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P7 + [-1, 3, C3, [1280, False]], # 41 (P7/128-xxlarge) - [ [ 29, 32, 35, 38, 41 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4, P5, P6, P7) + [[29, 32, 35, 38, 41], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6, P7) ] diff --git a/models/hub/yolov5-panet.yaml b/models/hub/yolov5-panet.yaml index 340f95a4dbc9..ccfbf900691c 100644 --- a/models/hub/yolov5-panet.yaml +++ b/models/hub/yolov5-panet.yaml @@ -1,48 +1,48 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 - [116,90, 156,198, 373,326] # P5/32 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 - [-1, 3, BottleneckCSP, [128]], + [-1, 3, C3, [128]], [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 - [-1, 9, BottleneckCSP, [256]], + [-1, 6, C3, [256]], [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 - [-1, 9, BottleneckCSP, [512]], + [-1, 9, C3, [512]], [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 - [-1, 1, SPP, [1024, [5, 9, 13]]], - [-1, 3, BottleneckCSP, [1024, False]], # 9 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 ] -# YOLOv5 PANet head +# YOLOv5 v6.0 PANet head head: [[-1, 1, Conv, [512, 1, 1]], [-1, 1, nn.Upsample, [None, 2, 'nearest']], [[-1, 6], 1, Concat, [1]], # cat backbone P4 - [-1, 3, BottleneckCSP, [512, False]], # 13 + [-1, 3, C3, [512, False]], # 13 [-1, 1, Conv, [256, 1, 1]], [-1, 1, nn.Upsample, [None, 2, 'nearest']], [[-1, 4], 1, Concat, [1]], # cat backbone P3 - [-1, 3, BottleneckCSP, [256, False]], # 17 (P3/8-small) + [-1, 3, C3, [256, False]], # 17 (P3/8-small) [-1, 1, Conv, [256, 3, 2]], [[-1, 14], 1, Concat, [1]], # cat head P4 - [-1, 3, BottleneckCSP, [512, False]], # 20 (P4/16-medium) + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) [-1, 1, Conv, [512, 3, 2]], [[-1, 10], 1, Concat, [1]], # cat head P5 - [-1, 3, BottleneckCSP, [1024, False]], # 23 (P5/32-large) + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) ] diff --git a/models/hub/yolov5l6.yaml b/models/hub/yolov5l6.yaml index 11298b01f479..632c2cb699e3 100644 --- a/models/hub/yolov5l6.yaml +++ b/models/hub/yolov5l6.yaml @@ -1,60 +1,60 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple - -# anchors anchors: - - [ 19,27, 44,40, 38,94 ] # P3/8 - - [ 96,68, 86,152, 180,137 ] # P4/16 - - [ 140,301, 303,264, 238,542 ] # P5/32 - - [ 436,615, 739,380, 925,792 ] # P6/64 + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [ [ -1, 1, Focus, [ 64, 3 ] ], # 0-P1/2 - [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 - [ -1, 3, C3, [ 128 ] ], - [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 - [ -1, 9, C3, [ 256 ] ], - [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 - [ -1, 9, C3, [ 512 ] ], - [ -1, 1, Conv, [ 768, 3, 2 ] ], # 7-P5/32 - [ -1, 3, C3, [ 768 ] ], - [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 9-P6/64 - [ -1, 1, SPP, [ 1024, [ 3, 5, 7 ] ] ], - [ -1, 3, C3, [ 1024, False ] ], # 11 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: - [ [ -1, 1, Conv, [ 768, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 8 ], 1, Concat, [ 1 ] ], # cat backbone P5 - [ -1, 3, C3, [ 768, False ] ], # 15 - - [ -1, 1, Conv, [ 512, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 - [ -1, 3, C3, [ 512, False ] ], # 19 - - [ -1, 1, Conv, [ 256, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 - [ -1, 3, C3, [ 256, False ] ], # 23 (P3/8-small) - - [ -1, 1, Conv, [ 256, 3, 2 ] ], - [ [ -1, 20 ], 1, Concat, [ 1 ] ], # cat head P4 - [ -1, 3, C3, [ 512, False ] ], # 26 (P4/16-medium) - - [ -1, 1, Conv, [ 512, 3, 2 ] ], - [ [ -1, 16 ], 1, Concat, [ 1 ] ], # cat head P5 - [ -1, 3, C3, [ 768, False ] ], # 29 (P5/32-large) - - [ -1, 1, Conv, [ 768, 3, 2 ] ], - [ [ -1, 12 ], 1, Concat, [ 1 ] ], # cat head P6 - [ -1, 3, C3, [ 1024, False ] ], # 32 (P6/64-xlarge) - - [ [ 23, 26, 29, 32 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4, P5, P6) + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) ] diff --git a/models/hub/yolov5m6.yaml b/models/hub/yolov5m6.yaml index 48afc865593a..ecc53fd68ba6 100644 --- a/models/hub/yolov5m6.yaml +++ b/models/hub/yolov5m6.yaml @@ -1,60 +1,60 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 0.67 # model depth multiple width_multiple: 0.75 # layer channel multiple - -# anchors anchors: - - [ 19,27, 44,40, 38,94 ] # P3/8 - - [ 96,68, 86,152, 180,137 ] # P4/16 - - [ 140,301, 303,264, 238,542 ] # P5/32 - - [ 436,615, 739,380, 925,792 ] # P6/64 + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [ [ -1, 1, Focus, [ 64, 3 ] ], # 0-P1/2 - [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 - [ -1, 3, C3, [ 128 ] ], - [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 - [ -1, 9, C3, [ 256 ] ], - [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 - [ -1, 9, C3, [ 512 ] ], - [ -1, 1, Conv, [ 768, 3, 2 ] ], # 7-P5/32 - [ -1, 3, C3, [ 768 ] ], - [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 9-P6/64 - [ -1, 1, SPP, [ 1024, [ 3, 5, 7 ] ] ], - [ -1, 3, C3, [ 1024, False ] ], # 11 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: - [ [ -1, 1, Conv, [ 768, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 8 ], 1, Concat, [ 1 ] ], # cat backbone P5 - [ -1, 3, C3, [ 768, False ] ], # 15 - - [ -1, 1, Conv, [ 512, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 - [ -1, 3, C3, [ 512, False ] ], # 19 - - [ -1, 1, Conv, [ 256, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 - [ -1, 3, C3, [ 256, False ] ], # 23 (P3/8-small) - - [ -1, 1, Conv, [ 256, 3, 2 ] ], - [ [ -1, 20 ], 1, Concat, [ 1 ] ], # cat head P4 - [ -1, 3, C3, [ 512, False ] ], # 26 (P4/16-medium) - - [ -1, 1, Conv, [ 512, 3, 2 ] ], - [ [ -1, 16 ], 1, Concat, [ 1 ] ], # cat head P5 - [ -1, 3, C3, [ 768, False ] ], # 29 (P5/32-large) - - [ -1, 1, Conv, [ 768, 3, 2 ] ], - [ [ -1, 12 ], 1, Concat, [ 1 ] ], # cat head P6 - [ -1, 3, C3, [ 1024, False ] ], # 32 (P6/64-xlarge) - - [ [ 23, 26, 29, 32 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4, P5, P6) + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) ] diff --git a/models/hub/yolov5n6.yaml b/models/hub/yolov5n6.yaml new file mode 100644 index 000000000000..0c0c71d32551 --- /dev/null +++ b/models/hub/yolov5n6.yaml @@ -0,0 +1,60 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.25 # layer channel multiple +anchors: + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) + ] diff --git a/models/hub/yolov5s-ghost.yaml b/models/hub/yolov5s-ghost.yaml new file mode 100644 index 000000000000..ff9519c3f1aa --- /dev/null +++ b/models/hub/yolov5s-ghost.yaml @@ -0,0 +1,48 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, GhostConv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3Ghost, [128]], + [-1, 1, GhostConv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3Ghost, [256]], + [-1, 1, GhostConv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3Ghost, [512]], + [-1, 1, GhostConv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3Ghost, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, GhostConv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3Ghost, [512, False]], # 13 + + [-1, 1, GhostConv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3Ghost, [256, False]], # 17 (P3/8-small) + + [-1, 1, GhostConv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3Ghost, [512, False]], # 20 (P4/16-medium) + + [-1, 1, GhostConv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3Ghost, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models/hub/yolov5s-transformer.yaml b/models/hub/yolov5s-transformer.yaml index f2d666722b30..100d7c447527 100644 --- a/models/hub/yolov5s-transformer.yaml +++ b/models/hub/yolov5s-transformer.yaml @@ -1,30 +1,30 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 0.33 # model depth multiple width_multiple: 0.50 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 - [116,90, 156,198, 373,326] # P5/32 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 [-1, 3, C3, [128]], [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 - [-1, 9, C3, [256]], + [-1, 6, C3, [256]], [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 [-1, 9, C3, [512]], [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 - [-1, 1, SPP, [1024, [5, 9, 13]]], - [-1, 3, C3TR, [1024, False]], # 9 <-------- C3TR() Transformer module + [-1, 3, C3TR, [1024]], # 9 <--- C3TR() Transformer module + [-1, 1, SPPF, [1024, 5]], # 9 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: [[-1, 1, Conv, [512, 1, 1]], [-1, 1, nn.Upsample, [None, 2, 'nearest']], diff --git a/models/hub/yolov5s6.yaml b/models/hub/yolov5s6.yaml index 1df577a2cc97..a28fb559482b 100644 --- a/models/hub/yolov5s6.yaml +++ b/models/hub/yolov5s6.yaml @@ -1,60 +1,60 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 0.33 # model depth multiple width_multiple: 0.50 # layer channel multiple - -# anchors anchors: - - [ 19,27, 44,40, 38,94 ] # P3/8 - - [ 96,68, 86,152, 180,137 ] # P4/16 - - [ 140,301, 303,264, 238,542 ] # P5/32 - - [ 436,615, 739,380, 925,792 ] # P6/64 + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [ [ -1, 1, Focus, [ 64, 3 ] ], # 0-P1/2 - [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 - [ -1, 3, C3, [ 128 ] ], - [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 - [ -1, 9, C3, [ 256 ] ], - [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 - [ -1, 9, C3, [ 512 ] ], - [ -1, 1, Conv, [ 768, 3, 2 ] ], # 7-P5/32 - [ -1, 3, C3, [ 768 ] ], - [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 9-P6/64 - [ -1, 1, SPP, [ 1024, [ 3, 5, 7 ] ] ], - [ -1, 3, C3, [ 1024, False ] ], # 11 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: - [ [ -1, 1, Conv, [ 768, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 8 ], 1, Concat, [ 1 ] ], # cat backbone P5 - [ -1, 3, C3, [ 768, False ] ], # 15 - - [ -1, 1, Conv, [ 512, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 - [ -1, 3, C3, [ 512, False ] ], # 19 - - [ -1, 1, Conv, [ 256, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 - [ -1, 3, C3, [ 256, False ] ], # 23 (P3/8-small) - - [ -1, 1, Conv, [ 256, 3, 2 ] ], - [ [ -1, 20 ], 1, Concat, [ 1 ] ], # cat head P4 - [ -1, 3, C3, [ 512, False ] ], # 26 (P4/16-medium) - - [ -1, 1, Conv, [ 512, 3, 2 ] ], - [ [ -1, 16 ], 1, Concat, [ 1 ] ], # cat head P5 - [ -1, 3, C3, [ 768, False ] ], # 29 (P5/32-large) - - [ -1, 1, Conv, [ 768, 3, 2 ] ], - [ [ -1, 12 ], 1, Concat, [ 1 ] ], # cat head P6 - [ -1, 3, C3, [ 1024, False ] ], # 32 (P6/64-xlarge) - - [ [ 23, 26, 29, 32 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4, P5, P6) + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) ] diff --git a/models/hub/yolov5x6.yaml b/models/hub/yolov5x6.yaml index 5ebc02124fe7..ba795c4aad31 100644 --- a/models/hub/yolov5x6.yaml +++ b/models/hub/yolov5x6.yaml @@ -1,60 +1,60 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.33 # model depth multiple width_multiple: 1.25 # layer channel multiple - -# anchors anchors: - - [ 19,27, 44,40, 38,94 ] # P3/8 - - [ 96,68, 86,152, 180,137 ] # P4/16 - - [ 140,301, 303,264, 238,542 ] # P5/32 - - [ 436,615, 739,380, 925,792 ] # P6/64 + - [19,27, 44,40, 38,94] # P3/8 + - [96,68, 86,152, 180,137] # P4/16 + - [140,301, 303,264, 238,542] # P5/32 + - [436,615, 739,380, 925,792] # P6/64 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [ [ -1, 1, Focus, [ 64, 3 ] ], # 0-P1/2 - [ -1, 1, Conv, [ 128, 3, 2 ] ], # 1-P2/4 - [ -1, 3, C3, [ 128 ] ], - [ -1, 1, Conv, [ 256, 3, 2 ] ], # 3-P3/8 - [ -1, 9, C3, [ 256 ] ], - [ -1, 1, Conv, [ 512, 3, 2 ] ], # 5-P4/16 - [ -1, 9, C3, [ 512 ] ], - [ -1, 1, Conv, [ 768, 3, 2 ] ], # 7-P5/32 - [ -1, 3, C3, [ 768 ] ], - [ -1, 1, Conv, [ 1024, 3, 2 ] ], # 9-P6/64 - [ -1, 1, SPP, [ 1024, [ 3, 5, 7 ] ] ], - [ -1, 3, C3, [ 1024, False ] ], # 11 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [768, 3, 2]], # 7-P5/32 + [-1, 3, C3, [768]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P6/64 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 11 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: - [ [ -1, 1, Conv, [ 768, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 8 ], 1, Concat, [ 1 ] ], # cat backbone P5 - [ -1, 3, C3, [ 768, False ] ], # 15 - - [ -1, 1, Conv, [ 512, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 6 ], 1, Concat, [ 1 ] ], # cat backbone P4 - [ -1, 3, C3, [ 512, False ] ], # 19 - - [ -1, 1, Conv, [ 256, 1, 1 ] ], - [ -1, 1, nn.Upsample, [ None, 2, 'nearest' ] ], - [ [ -1, 4 ], 1, Concat, [ 1 ] ], # cat backbone P3 - [ -1, 3, C3, [ 256, False ] ], # 23 (P3/8-small) - - [ -1, 1, Conv, [ 256, 3, 2 ] ], - [ [ -1, 20 ], 1, Concat, [ 1 ] ], # cat head P4 - [ -1, 3, C3, [ 512, False ] ], # 26 (P4/16-medium) - - [ -1, 1, Conv, [ 512, 3, 2 ] ], - [ [ -1, 16 ], 1, Concat, [ 1 ] ], # cat head P5 - [ -1, 3, C3, [ 768, False ] ], # 29 (P5/32-large) - - [ -1, 1, Conv, [ 768, 3, 2 ] ], - [ [ -1, 12 ], 1, Concat, [ 1 ] ], # cat head P6 - [ -1, 3, C3, [ 1024, False ] ], # 32 (P6/64-xlarge) - - [ [ 23, 26, 29, 32 ], 1, Detect, [ nc, anchors ] ], # Detect(P3, P4, P5, P6) + [[-1, 1, Conv, [768, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P5 + [-1, 3, C3, [768, False]], # 15 + + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 19 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 23 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 20], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 26 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 16], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [768, False]], # 29 (P5/32-large) + + [-1, 1, Conv, [768, 3, 2]], + [[-1, 12], 1, Concat, [1]], # cat head P6 + [-1, 3, C3, [1024, False]], # 32 (P6/64-xlarge) + + [[23, 26, 29, 32], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5, P6) ] diff --git a/models/tf.py b/models/tf.py new file mode 100644 index 000000000000..728907f8fb47 --- /dev/null +++ b/models/tf.py @@ -0,0 +1,466 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +TensorFlow, Keras and TFLite versions of YOLOv5 +Authored by https://github.com/zldrobit in PR https://github.com/ultralytics/yolov5/pull/1127 + +Usage: + $ python models/tf.py --weights yolov5s.pt + +Export: + $ python path/to/export.py --weights yolov5s.pt --include saved_model pb tflite tfjs +""" + +import argparse +import sys +from copy import deepcopy +from pathlib import Path + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[1] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +# ROOT = ROOT.relative_to(Path.cwd()) # relative + +import numpy as np +import tensorflow as tf +import torch +import torch.nn as nn +from tensorflow import keras + +from models.common import C3, SPP, SPPF, Bottleneck, BottleneckCSP, Concat, Conv, DWConv, Focus, autopad +from models.experimental import CrossConv, MixConv2d, attempt_load +from models.yolo import Detect +from utils.activations import SiLU +from utils.general import LOGGER, make_divisible, print_args + + +class TFBN(keras.layers.Layer): + # TensorFlow BatchNormalization wrapper + def __init__(self, w=None): + super().__init__() + self.bn = keras.layers.BatchNormalization( + beta_initializer=keras.initializers.Constant(w.bias.numpy()), + gamma_initializer=keras.initializers.Constant(w.weight.numpy()), + moving_mean_initializer=keras.initializers.Constant(w.running_mean.numpy()), + moving_variance_initializer=keras.initializers.Constant(w.running_var.numpy()), + epsilon=w.eps) + + def call(self, inputs): + return self.bn(inputs) + + +class TFPad(keras.layers.Layer): + def __init__(self, pad): + super().__init__() + self.pad = tf.constant([[0, 0], [pad, pad], [pad, pad], [0, 0]]) + + def call(self, inputs): + return tf.pad(inputs, self.pad, mode='constant', constant_values=0) + + +class TFConv(keras.layers.Layer): + # Standard convolution + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True, w=None): + # ch_in, ch_out, weights, kernel, stride, padding, groups + super().__init__() + assert g == 1, "TF v2.2 Conv2D does not support 'groups' argument" + assert isinstance(k, int), "Convolution with multiple kernels are not allowed." + # TensorFlow convolution padding is inconsistent with PyTorch (e.g. k=3 s=2 'SAME' padding) + # see https://stackoverflow.com/questions/52975843/comparing-conv2d-with-padding-between-tensorflow-and-pytorch + + conv = keras.layers.Conv2D( + c2, k, s, 'SAME' if s == 1 else 'VALID', use_bias=False if hasattr(w, 'bn') else True, + kernel_initializer=keras.initializers.Constant(w.conv.weight.permute(2, 3, 1, 0).numpy()), + bias_initializer='zeros' if hasattr(w, 'bn') else keras.initializers.Constant(w.conv.bias.numpy())) + self.conv = conv if s == 1 else keras.Sequential([TFPad(autopad(k, p)), conv]) + self.bn = TFBN(w.bn) if hasattr(w, 'bn') else tf.identity + + # YOLOv5 activations + if isinstance(w.act, nn.LeakyReLU): + self.act = (lambda x: keras.activations.relu(x, alpha=0.1)) if act else tf.identity + elif isinstance(w.act, nn.Hardswish): + self.act = (lambda x: x * tf.nn.relu6(x + 3) * 0.166666667) if act else tf.identity + elif isinstance(w.act, (nn.SiLU, SiLU)): + self.act = (lambda x: keras.activations.swish(x)) if act else tf.identity + else: + raise Exception(f'no matching TensorFlow activation found for {w.act}') + + def call(self, inputs): + return self.act(self.bn(self.conv(inputs))) + + +class TFFocus(keras.layers.Layer): + # Focus wh information into c-space + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True, w=None): + # ch_in, ch_out, kernel, stride, padding, groups + super().__init__() + self.conv = TFConv(c1 * 4, c2, k, s, p, g, act, w.conv) + + def call(self, inputs): # x(b,w,h,c) -> y(b,w/2,h/2,4c) + # inputs = inputs / 255 # normalize 0-255 to 0-1 + return self.conv(tf.concat([inputs[:, ::2, ::2, :], + inputs[:, 1::2, ::2, :], + inputs[:, ::2, 1::2, :], + inputs[:, 1::2, 1::2, :]], 3)) + + +class TFBottleneck(keras.layers.Layer): + # Standard bottleneck + def __init__(self, c1, c2, shortcut=True, g=1, e=0.5, w=None): # ch_in, ch_out, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv(c_, c2, 3, 1, g=g, w=w.cv2) + self.add = shortcut and c1 == c2 + + def call(self, inputs): + return inputs + self.cv2(self.cv1(inputs)) if self.add else self.cv2(self.cv1(inputs)) + + +class TFConv2d(keras.layers.Layer): + # Substitution for PyTorch nn.Conv2D + def __init__(self, c1, c2, k, s=1, g=1, bias=True, w=None): + super().__init__() + assert g == 1, "TF v2.2 Conv2D does not support 'groups' argument" + self.conv = keras.layers.Conv2D( + c2, k, s, 'VALID', use_bias=bias, + kernel_initializer=keras.initializers.Constant(w.weight.permute(2, 3, 1, 0).numpy()), + bias_initializer=keras.initializers.Constant(w.bias.numpy()) if bias else None, ) + + def call(self, inputs): + return self.conv(inputs) + + +class TFBottleneckCSP(keras.layers.Layer): + # CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5, w=None): + # ch_in, ch_out, number, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv2d(c1, c_, 1, 1, bias=False, w=w.cv2) + self.cv3 = TFConv2d(c_, c_, 1, 1, bias=False, w=w.cv3) + self.cv4 = TFConv(2 * c_, c2, 1, 1, w=w.cv4) + self.bn = TFBN(w.bn) + self.act = lambda x: keras.activations.relu(x, alpha=0.1) + self.m = keras.Sequential([TFBottleneck(c_, c_, shortcut, g, e=1.0, w=w.m[j]) for j in range(n)]) + + def call(self, inputs): + y1 = self.cv3(self.m(self.cv1(inputs))) + y2 = self.cv2(inputs) + return self.cv4(self.act(self.bn(tf.concat((y1, y2), axis=3)))) + + +class TFC3(keras.layers.Layer): + # CSP Bottleneck with 3 convolutions + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5, w=None): + # ch_in, ch_out, number, shortcut, groups, expansion + super().__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv(c1, c_, 1, 1, w=w.cv2) + self.cv3 = TFConv(2 * c_, c2, 1, 1, w=w.cv3) + self.m = keras.Sequential([TFBottleneck(c_, c_, shortcut, g, e=1.0, w=w.m[j]) for j in range(n)]) + + def call(self, inputs): + return self.cv3(tf.concat((self.m(self.cv1(inputs)), self.cv2(inputs)), axis=3)) + + +class TFSPP(keras.layers.Layer): + # Spatial pyramid pooling layer used in YOLOv3-SPP + def __init__(self, c1, c2, k=(5, 9, 13), w=None): + super().__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv(c_ * (len(k) + 1), c2, 1, 1, w=w.cv2) + self.m = [keras.layers.MaxPool2D(pool_size=x, strides=1, padding='SAME') for x in k] + + def call(self, inputs): + x = self.cv1(inputs) + return self.cv2(tf.concat([x] + [m(x) for m in self.m], 3)) + + +class TFSPPF(keras.layers.Layer): + # Spatial pyramid pooling-Fast layer + def __init__(self, c1, c2, k=5, w=None): + super().__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = TFConv(c1, c_, 1, 1, w=w.cv1) + self.cv2 = TFConv(c_ * 4, c2, 1, 1, w=w.cv2) + self.m = keras.layers.MaxPool2D(pool_size=k, strides=1, padding='SAME') + + def call(self, inputs): + x = self.cv1(inputs) + y1 = self.m(x) + y2 = self.m(y1) + return self.cv2(tf.concat([x, y1, y2, self.m(y2)], 3)) + + +class TFDetect(keras.layers.Layer): + def __init__(self, nc=80, anchors=(), ch=(), imgsz=(640, 640), w=None): # detection layer + super().__init__() + self.stride = tf.convert_to_tensor(w.stride.numpy(), dtype=tf.float32) + self.nc = nc # number of classes + self.no = nc + 5 # number of outputs per anchor + self.nl = len(anchors) # number of detection layers + self.na = len(anchors[0]) // 2 # number of anchors + self.grid = [tf.zeros(1)] * self.nl # init grid + self.anchors = tf.convert_to_tensor(w.anchors.numpy(), dtype=tf.float32) + self.anchor_grid = tf.reshape(self.anchors * tf.reshape(self.stride, [self.nl, 1, 1]), + [self.nl, 1, -1, 1, 2]) + self.m = [TFConv2d(x, self.no * self.na, 1, w=w.m[i]) for i, x in enumerate(ch)] + self.training = False # set to False after building model + self.imgsz = imgsz + for i in range(self.nl): + ny, nx = self.imgsz[0] // self.stride[i], self.imgsz[1] // self.stride[i] + self.grid[i] = self._make_grid(nx, ny) + + def call(self, inputs): + z = [] # inference output + x = [] + for i in range(self.nl): + x.append(self.m[i](inputs[i])) + # x(bs,20,20,255) to x(bs,3,20,20,85) + ny, nx = self.imgsz[0] // self.stride[i], self.imgsz[1] // self.stride[i] + x[i] = tf.reshape(x[i], [-1, ny * nx, self.na, self.no]) + + if not self.training: # inference + y = tf.sigmoid(x[i]) + grid = tf.transpose(self.grid[i], [0, 2, 1, 3]) - 0.5 + anchor_grid = tf.transpose(self.anchor_grid[i], [0, 2, 1, 3]) * 4 + xy = (y[..., 0:2] * 2 + grid) * self.stride[i] # xy + wh = y[..., 2:4] ** 2 * anchor_grid + # Normalize xywh to 0-1 to reduce calibration error + xy /= tf.constant([[self.imgsz[1], self.imgsz[0]]], dtype=tf.float32) + wh /= tf.constant([[self.imgsz[1], self.imgsz[0]]], dtype=tf.float32) + y = tf.concat([xy, wh, y[..., 4:]], -1) + z.append(tf.reshape(y, [-1, self.na * ny * nx, self.no])) + + return tf.transpose(x, [0, 2, 1, 3]) if self.training else (tf.concat(z, 1), x) + + @staticmethod + def _make_grid(nx=20, ny=20): + # yv, xv = torch.meshgrid([torch.arange(ny), torch.arange(nx)]) + # return torch.stack((xv, yv), 2).view((1, 1, ny, nx, 2)).float() + xv, yv = tf.meshgrid(tf.range(nx), tf.range(ny)) + return tf.cast(tf.reshape(tf.stack([xv, yv], 2), [1, 1, ny * nx, 2]), dtype=tf.float32) + + +class TFUpsample(keras.layers.Layer): + def __init__(self, size, scale_factor, mode, w=None): # warning: all arguments needed including 'w' + super().__init__() + assert scale_factor == 2, "scale_factor must be 2" + self.upsample = lambda x: tf.image.resize(x, (x.shape[1] * 2, x.shape[2] * 2), method=mode) + # self.upsample = keras.layers.UpSampling2D(size=scale_factor, interpolation=mode) + # with default arguments: align_corners=False, half_pixel_centers=False + # self.upsample = lambda x: tf.raw_ops.ResizeNearestNeighbor(images=x, + # size=(x.shape[1] * 2, x.shape[2] * 2)) + + def call(self, inputs): + return self.upsample(inputs) + + +class TFConcat(keras.layers.Layer): + def __init__(self, dimension=1, w=None): + super().__init__() + assert dimension == 1, "convert only NCHW to NHWC concat" + self.d = 3 + + def call(self, inputs): + return tf.concat(inputs, self.d) + + +def parse_model(d, ch, model, imgsz): # model_dict, input_channels(3) + LOGGER.info(f"\n{'':>3}{'from':>18}{'n':>3}{'params':>10} {'module':<40}{'arguments':<30}") + anchors, nc, gd, gw = d['anchors'], d['nc'], d['depth_multiple'], d['width_multiple'] + na = (len(anchors[0]) // 2) if isinstance(anchors, list) else anchors # number of anchors + no = na * (nc + 5) # number of outputs = anchors * (classes + 5) + + layers, save, c2 = [], [], ch[-1] # layers, savelist, ch out + for i, (f, n, m, args) in enumerate(d['backbone'] + d['head']): # from, number, module, args + m_str = m + m = eval(m) if isinstance(m, str) else m # eval strings + for j, a in enumerate(args): + try: + args[j] = eval(a) if isinstance(a, str) else a # eval strings + except NameError: + pass + + n = max(round(n * gd), 1) if n > 1 else n # depth gain + if m in [nn.Conv2d, Conv, Bottleneck, SPP, SPPF, DWConv, MixConv2d, Focus, CrossConv, BottleneckCSP, C3]: + c1, c2 = ch[f], args[0] + c2 = make_divisible(c2 * gw, 8) if c2 != no else c2 + + args = [c1, c2, *args[1:]] + if m in [BottleneckCSP, C3]: + args.insert(2, n) + n = 1 + elif m is nn.BatchNorm2d: + args = [ch[f]] + elif m is Concat: + c2 = sum(ch[-1 if x == -1 else x + 1] for x in f) + elif m is Detect: + args.append([ch[x + 1] for x in f]) + if isinstance(args[1], int): # number of anchors + args[1] = [list(range(args[1] * 2))] * len(f) + args.append(imgsz) + else: + c2 = ch[f] + + tf_m = eval('TF' + m_str.replace('nn.', '')) + m_ = keras.Sequential([tf_m(*args, w=model.model[i][j]) for j in range(n)]) if n > 1 \ + else tf_m(*args, w=model.model[i]) # module + + torch_m_ = nn.Sequential(*(m(*args) for _ in range(n))) if n > 1 else m(*args) # module + t = str(m)[8:-2].replace('__main__.', '') # module type + np = sum(x.numel() for x in torch_m_.parameters()) # number params + m_.i, m_.f, m_.type, m_.np = i, f, t, np # attach index, 'from' index, type, number params + LOGGER.info(f'{i:>3}{str(f):>18}{str(n):>3}{np:>10} {t:<40}{str(args):<30}') # print + save.extend(x % i for x in ([f] if isinstance(f, int) else f) if x != -1) # append to savelist + layers.append(m_) + ch.append(c2) + return keras.Sequential(layers), sorted(save) + + +class TFModel: + def __init__(self, cfg='yolov5s.yaml', ch=3, nc=None, model=None, imgsz=(640, 640)): # model, channels, classes + super().__init__() + if isinstance(cfg, dict): + self.yaml = cfg # model dict + else: # is *.yaml + import yaml # for torch hub + self.yaml_file = Path(cfg).name + with open(cfg) as f: + self.yaml = yaml.load(f, Loader=yaml.FullLoader) # model dict + + # Define model + if nc and nc != self.yaml['nc']: + LOGGER.info(f"Overriding {cfg} nc={self.yaml['nc']} with nc={nc}") + self.yaml['nc'] = nc # override yaml value + self.model, self.savelist = parse_model(deepcopy(self.yaml), ch=[ch], model=model, imgsz=imgsz) + + def predict(self, inputs, tf_nms=False, agnostic_nms=False, topk_per_class=100, topk_all=100, iou_thres=0.45, + conf_thres=0.25): + y = [] # outputs + x = inputs + for i, m in enumerate(self.model.layers): + if m.f != -1: # if not from previous layer + x = y[m.f] if isinstance(m.f, int) else [x if j == -1 else y[j] for j in m.f] # from earlier layers + + x = m(x) # run + y.append(x if m.i in self.savelist else None) # save output + + # Add TensorFlow NMS + if tf_nms: + boxes = self._xywh2xyxy(x[0][..., :4]) + probs = x[0][:, :, 4:5] + classes = x[0][:, :, 5:] + scores = probs * classes + if agnostic_nms: + nms = AgnosticNMS()((boxes, classes, scores), topk_all, iou_thres, conf_thres) + return nms, x[1] + else: + boxes = tf.expand_dims(boxes, 2) + nms = tf.image.combined_non_max_suppression( + boxes, scores, topk_per_class, topk_all, iou_thres, conf_thres, clip_boxes=False) + return nms, x[1] + + return x[0] # output only first tensor [1,6300,85] = [xywh, conf, class0, class1, ...] + # x = x[0][0] # [x(1,6300,85), ...] to x(6300,85) + # xywh = x[..., :4] # x(6300,4) boxes + # conf = x[..., 4:5] # x(6300,1) confidences + # cls = tf.reshape(tf.cast(tf.argmax(x[..., 5:], axis=1), tf.float32), (-1, 1)) # x(6300,1) classes + # return tf.concat([conf, cls, xywh], 1) + + @staticmethod + def _xywh2xyxy(xywh): + # Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right + x, y, w, h = tf.split(xywh, num_or_size_splits=4, axis=-1) + return tf.concat([x - w / 2, y - h / 2, x + w / 2, y + h / 2], axis=-1) + + +class AgnosticNMS(keras.layers.Layer): + # TF Agnostic NMS + def call(self, input, topk_all, iou_thres, conf_thres): + # wrap map_fn to avoid TypeSpec related error https://stackoverflow.com/a/65809989/3036450 + return tf.map_fn(lambda x: self._nms(x, topk_all, iou_thres, conf_thres), input, + fn_output_signature=(tf.float32, tf.float32, tf.float32, tf.int32), + name='agnostic_nms') + + @staticmethod + def _nms(x, topk_all=100, iou_thres=0.45, conf_thres=0.25): # agnostic NMS + boxes, classes, scores = x + class_inds = tf.cast(tf.argmax(classes, axis=-1), tf.float32) + scores_inp = tf.reduce_max(scores, -1) + selected_inds = tf.image.non_max_suppression( + boxes, scores_inp, max_output_size=topk_all, iou_threshold=iou_thres, score_threshold=conf_thres) + selected_boxes = tf.gather(boxes, selected_inds) + padded_boxes = tf.pad(selected_boxes, + paddings=[[0, topk_all - tf.shape(selected_boxes)[0]], [0, 0]], + mode="CONSTANT", constant_values=0.0) + selected_scores = tf.gather(scores_inp, selected_inds) + padded_scores = tf.pad(selected_scores, + paddings=[[0, topk_all - tf.shape(selected_boxes)[0]]], + mode="CONSTANT", constant_values=-1.0) + selected_classes = tf.gather(class_inds, selected_inds) + padded_classes = tf.pad(selected_classes, + paddings=[[0, topk_all - tf.shape(selected_boxes)[0]]], + mode="CONSTANT", constant_values=-1.0) + valid_detections = tf.shape(selected_inds)[0] + return padded_boxes, padded_scores, padded_classes, valid_detections + + +def representative_dataset_gen(dataset, ncalib=100): + # Representative dataset generator for use with converter.representative_dataset, returns a generator of np arrays + for n, (path, img, im0s, vid_cap, string) in enumerate(dataset): + input = np.transpose(img, [1, 2, 0]) + input = np.expand_dims(input, axis=0).astype(np.float32) + input /= 255 + yield [input] + if n >= ncalib: + break + + +def run(weights=ROOT / 'yolov5s.pt', # weights path + imgsz=(640, 640), # inference size h,w + batch_size=1, # batch size + dynamic=False, # dynamic batch size + ): + # PyTorch model + im = torch.zeros((batch_size, 3, *imgsz)) # BCHW image + model = attempt_load(weights, map_location=torch.device('cpu'), inplace=True, fuse=False) + _ = model(im) # inference + model.info() + + # TensorFlow model + im = tf.zeros((batch_size, *imgsz, 3)) # BHWC image + tf_model = TFModel(cfg=model.yaml, model=model, nc=model.nc, imgsz=imgsz) + _ = tf_model.predict(im) # inference + + # Keras model + im = keras.Input(shape=(*imgsz, 3), batch_size=None if dynamic else batch_size) + keras_model = keras.Model(inputs=im, outputs=tf_model.predict(im)) + keras_model.summary() + + LOGGER.info('PyTorch, TensorFlow and Keras models successfully verified.\nUse export.py for TF model export.') + + +def parse_opt(): + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default=ROOT / 'yolov5s.pt', help='weights path') + parser.add_argument('--imgsz', '--img', '--img-size', nargs='+', type=int, default=[640], help='inference size h,w') + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + parser.add_argument('--dynamic', action='store_true', help='dynamic batch size') + opt = parser.parse_args() + opt.imgsz *= 2 if len(opt.imgsz) == 1 else 1 # expand + print_args(FILE.stem, opt) + return opt + + +def main(opt): + run(**vars(opt)) + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) diff --git a/models/yolo.py b/models/yolo.py index f16e69b17df5..f08d41ce1585 100644 --- a/models/yolo.py +++ b/models/yolo.py @@ -1,24 +1,32 @@ -# YOLOv5 YOLO-specific modules +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +YOLO-specific modules + +Usage: + $ python path/to/models/yolo.py --cfg yolov5s.yaml +""" import argparse -import logging import sys from copy import deepcopy from pathlib import Path -sys.path.append(Path(__file__).parent.parent.absolute().__str__()) # to run '$ python *.py' files in subdirectories -logger = logging.getLogger(__name__) +FILE = Path(__file__).resolve() +ROOT = FILE.parents[1] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +# ROOT = ROOT.relative_to(Path.cwd()) # relative from models.common import * from models.experimental import * from utils.activations import replace_activations from utils.autoanchor import check_anchor_order -from utils.general import make_divisible, check_file, set_logging -from utils.torch_utils import time_synchronized, fuse_conv_and_bn, model_info, scale_img, initialize_weights, \ - select_device, copy_attr +from utils.general import LOGGER, check_version, check_yaml, make_divisible, print_args +from utils.plots import feature_visualization +from utils.torch_utils import fuse_conv_and_bn, initialize_weights, model_info, scale_img, select_device, time_sync try: - import thop # for FLOPS computation + import thop # for FLOPs computation except ImportError: thop = None @@ -29,20 +37,18 @@ class Detect(nn.Module): export = True # onnx export def __init__(self, nc=80, anchors=(), ch=(), inplace=True): # detection layer - super(Detect, self).__init__() + super().__init__() self.nc = nc # number of classes self.no = nc + 5 # number of outputs per anchor self.nl = len(anchors) # number of detection layers self.na = len(anchors[0]) // 2 # number of anchors self.grid = [torch.zeros(1)] * self.nl # init grid - a = torch.tensor(anchors).float().view(self.nl, -1, 2) - self.register_buffer('anchors', a) # shape(nl,na,2) - self.register_buffer('anchor_grid', a.clone().view(self.nl, 1, -1, 1, 1, 2)) # shape(nl,1,na,1,1,2) + self.anchor_grid = [torch.zeros(1)] * self.nl # init anchor grid + self.register_buffer('anchors', torch.tensor(anchors).float().view(self.nl, -1, 2)) # shape(nl,na,2) self.m = nn.ModuleList(nn.Conv2d(x, self.no * self.na, 1) for x in ch) # output conv self.inplace = inplace # use in-place ops (e.g. slice assignment) def forward(self, x): - # x = x.copy() # for profiling z = [] # inference output for i in range(self.nl): x[i] = self.m[i](x[i]) # conv @@ -50,50 +56,55 @@ def forward(self, x): x[i] = x[i].view(bs, self.na, self.no, ny, nx).permute(0, 1, 3, 4, 2).contiguous() if not self.training and self.export: # inference - if self.grid[i].shape[2:4] != x[i].shape[2:4] or self.onnx_dynamic: - self.grid[i] = self._make_grid(nx, ny).to(x[i].device) + if self.onnx_dynamic or self.grid[i].shape[2:4] != x[i].shape[2:4]: + self.grid[i], self.anchor_grid[i] = self._make_grid(nx, ny, i) y = x[i].sigmoid() if self.inplace: - y[..., 0:2] = (y[..., 0:2] * 2. - 0.5 + self.grid[i]) * self.stride[i] # xy + y[..., 0:2] = (y[..., 0:2] * 2 - 0.5 + self.grid[i]) * self.stride[i] # xy y[..., 2:4] = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] # wh else: # for YOLOv5 on AWS Inferentia https://github.com/ultralytics/yolov5/pull/2953 - xy = (y[..., 0:2] * 2. - 0.5 + self.grid[i]) * self.stride[i] # xy - wh = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i].view(1, self.na, 1, 1, 2) # wh + xy = (y[..., 0:2] * 2 - 0.5 + self.grid[i]) * self.stride[i] # xy + wh = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] # wh y = torch.cat((xy, wh, y[..., 4:]), -1) z.append(y.view(bs, -1, self.no)) return x if self.training or not self.export else (torch.cat(z, 1), x) - @staticmethod - def _make_grid(nx=20, ny=20): - yv, xv = torch.meshgrid([torch.arange(ny), torch.arange(nx)]) - return torch.stack((xv, yv), 2).view((1, 1, ny, nx, 2)).float() + def _make_grid(self, nx=20, ny=20, i=0): + d = self.anchors[i].device + shape = 1, self.na, ny, nx, 2 # grid shape + if check_version(torch.__version__, '1.10.0'): # torch>=1.10.0 meshgrid workaround for torch>=0.7 compatibility + yv, xv = torch.meshgrid(torch.arange(ny, device=d), torch.arange(nx, device=d), indexing='ij') + else: + yv, xv = torch.meshgrid(torch.arange(ny, device=d), torch.arange(nx, device=d)) + grid = torch.stack((xv, yv), 2).expand(shape).float() + anchor_grid = (self.anchors[i] * self.stride[i]).view((1, self.na, 1, 1, 2)).expand(shape).float() + return grid, anchor_grid class Model(nn.Module): def __init__(self, cfg='yolov5s.yaml', ch=3, nc=None, anchors=None): # model, input channels, number of classes - super(Model, self).__init__() + super().__init__() if isinstance(cfg, dict): self.yaml = cfg # model dict else: # is *.yaml import yaml # for torch hub self.yaml_file = Path(cfg).name - with open(cfg) as f: + with open(cfg, encoding='ascii', errors='ignore') as f: self.yaml = yaml.safe_load(f) # model dict # Define model ch = self.yaml['ch'] = self.yaml.get('ch', ch) # input channels if nc and nc != self.yaml['nc']: - logger.info(f"Overriding model.yaml nc={self.yaml['nc']} with nc={nc}") + LOGGER.info(f"Overriding model.yaml nc={self.yaml['nc']} with nc={nc}") self.yaml['nc'] = nc # override yaml value if anchors: - logger.info(f'Overriding model.yaml anchors with anchors={anchors}') + LOGGER.info(f'Overriding model.yaml anchors with anchors={anchors}') self.yaml['anchors'] = round(anchors) # override yaml value self.model, self.save = parse_model(deepcopy(self.yaml), ch=[ch]) # model, savelist self.names = [str(i) for i in range(self.yaml['nc'])] # default names self.inplace = self.yaml.get('inplace', True) - # logger.info([x.shape for x in self.forward(torch.zeros(1, ch, 64, 64))]) # Build strides, anchors m = self.model[-1] # Detect() @@ -101,57 +112,46 @@ def __init__(self, cfg='yolov5s.yaml', ch=3, nc=None, anchors=None): # model, i s = 256 # 2x min stride m.inplace = self.inplace m.stride = torch.tensor([s / x.shape[-2] for x in self.forward(torch.zeros(1, ch, s, s))]) # forward + check_anchor_order(m) # must be in pixel-space (not grid-space) m.anchors /= m.stride.view(-1, 1, 1) - check_anchor_order(m) self.stride = m.stride self._initialize_biases() # only run once - # logger.info('Strides: %s' % m.stride.tolist()) # Init weights, biases initialize_weights(self) self.info() - logger.info('') + LOGGER.info('') - def forward(self, x, augment=False, profile=False): + def forward(self, x, augment=False, profile=False, visualize=False): if augment: - return self.forward_augment(x) # augmented inference, None - else: - return self.forward_once(x, profile) # single-scale inference, train + return self._forward_augment(x) # augmented inference, None + return self._forward_once(x, profile, visualize) # single-scale inference, train - def forward_augment(self, x): + def _forward_augment(self, x): img_size = x.shape[-2:] # height, width s = [1, 0.83, 0.67] # scales f = [None, 3, None] # flips (2-ud, 3-lr) y = [] # outputs for si, fi in zip(s, f): xi = scale_img(x.flip(fi) if fi else x, si, gs=int(self.stride.max())) - yi = self.forward_once(xi)[0] # forward + yi = self._forward_once(xi)[0] # forward # cv2.imwrite(f'img_{si}.jpg', 255 * xi[0].cpu().numpy().transpose((1, 2, 0))[:, :, ::-1]) # save yi = self._descale_pred(yi, fi, si, img_size) y.append(yi) + y = self._clip_augmented(y) # clip augmented tails return torch.cat(y, 1), None # augmented inference, train - def forward_once(self, x, profile=False): + def _forward_once(self, x, profile=False, visualize=False): y, dt = [], [] # outputs for m in self.model: if m.f != -1: # if not from previous layer x = y[m.f] if isinstance(m.f, int) else [x if j == -1 else y[j] for j in m.f] # from earlier layers - if profile: - o = thop.profile(m, inputs=(x,), verbose=False)[0] / 1E9 * 2 if thop else 0 # FLOPS - t = time_synchronized() - for _ in range(10): - _ = m(x) - dt.append((time_synchronized() - t) * 100) - if m == self.model[0]: - logger.info(f"{'time (ms)':>10s} {'GFLOPS':>10s} {'params':>10s} {'module'}") - logger.info(f'{dt[-1]:10.2f} {o:10.2f} {m.np:10.0f} {m.type}') - + self._profile_one_layer(m, x, dt) x = m(x) # run y.append(x if m.i in self.save else None) # save output - - if profile: - logger.info('%.1fms total' % sum(dt)) + if visualize: + feature_visualization(x, m.type, m.i, save_dir=visualize) return x def _descale_pred(self, p, flips, scale, img_size): @@ -171,6 +171,30 @@ def _descale_pred(self, p, flips, scale, img_size): p = torch.cat((x, y, wh, p[..., 4:]), -1) return p + def _clip_augmented(self, y): + # Clip YOLOv5 augmented inference tails + nl = self.model[-1].nl # number of detection layers (P3-P5) + g = sum(4 ** x for x in range(nl)) # grid points + e = 1 # exclude layer count + i = (y[0].shape[1] // g) * sum(4 ** x for x in range(e)) # indices + y[0] = y[0][:, :-i] # large + i = (y[-1].shape[1] // g) * sum(4 ** (nl - 1 - x) for x in range(e)) # indices + y[-1] = y[-1][:, i:] # small + return y + + def _profile_one_layer(self, m, x, dt): + c = isinstance(m, Detect) # is final layer, copy input as inplace fix + o = thop.profile(m, inputs=(x.copy() if c else x,), verbose=False)[0] / 1E9 * 2 if thop else 0 # FLOPs + t = time_sync() + for _ in range(10): + m(x.copy() if c else x) + dt.append((time_sync() - t) * 100) + if m == self.model[0]: + LOGGER.info(f"{'time (ms)':>10s} {'GFLOPs':>10s} {'params':>10s} {'module'}") + LOGGER.info(f'{dt[-1]:10.2f} {o:10.2f} {m.np:10.0f} {m.type}') + if c: + LOGGER.info(f"{sum(dt):10.2f} {'-':>10s} {'-':>10s} Total") + def _initialize_biases(self, cf=None): # initialize biases into Detect(), cf is class frequency # https://arxiv.org/abs/1708.02002 section 3.3 # cf = torch.bincount(torch.tensor(np.concatenate(dataset.labels, 0)[:, 0]).long(), minlength=nc) + 1. @@ -178,57 +202,48 @@ def _initialize_biases(self, cf=None): # initialize biases into Detect(), cf is for mi, s in zip(m.m, m.stride): # from b = mi.bias.view(m.na, -1) # conv.bias(255) to (3,85) b.data[:, 4] += math.log(8 / (640 / s) ** 2) # obj (8 objects per 640 image) - b.data[:, 5:] += math.log(0.6 / (m.nc - 0.99)) if cf is None else torch.log(cf / cf.sum()) # cls + b.data[:, 5:] += math.log(0.6 / (m.nc - 0.999999)) if cf is None else torch.log(cf / cf.sum()) # cls mi.bias = torch.nn.Parameter(b.view(-1), requires_grad=True) def _print_biases(self): m = self.model[-1] # Detect() module for mi in m.m: # from b = mi.bias.detach().view(m.na, -1).T # conv.bias(255) to (3,85) - logger.info( + LOGGER.info( ('%6g Conv2d.bias:' + '%10.3g' * 6) % (mi.weight.shape[1], *b[:5].mean(1).tolist(), b[5:].mean())) # def _print_weights(self): # for m in self.model.modules(): # if type(m) is Bottleneck: - # logger.info('%10.3g' % (m.w.detach().sigmoid() * 2)) # shortcut weights + # LOGGER.info('%10.3g' % (m.w.detach().sigmoid() * 2)) # shortcut weights def fuse(self): # fuse model Conv2d() + BatchNorm2d() layers - logger.info('Fusing layers... ') + LOGGER.info('Fusing layers... ') for m in self.model.modules(): - if type(m) is Conv and hasattr(m, 'bn'): + if isinstance(m, (Conv, DWConv)) and hasattr(m, 'bn'): m.conv = fuse_conv_and_bn(m.conv, m.bn) # update conv delattr(m, 'bn') # remove batchnorm - m.forward = m.fuseforward # update forward + m.forward = m.forward_fuse # update forward self.info() return self - def nms(self, mode=True): # add or remove NMS module - present = type(self.model[-1]) is NMS # last layer is NMS - if mode and not present: - logger.info('Adding NMS... ') - m = NMS() # module - m.f = -1 # from - m.i = self.model[-1].i + 1 # index - self.model.add_module(name='%s' % m.i, module=m) # add - self.eval() - elif not mode and present: - logger.info('Removing NMS... ') - self.model = self.model[:-1] # remove - return self - - def autoshape(self): # add AutoShape module - logger.info('Adding AutoShape... ') - m = AutoShape(self) # wrap model - copy_attr(m, self, include=('yaml', 'nc', 'hyp', 'names', 'stride'), exclude=()) # copy attributes - return m - def info(self, verbose=False, img_size=640): # print model information model_info(self, verbose, img_size) + def _apply(self, fn): + # Apply to(), cpu(), cuda(), half() to model tensors that are not parameters or registered buffers + self = super()._apply(fn) + m = self.model[-1] # Detect() + if isinstance(m, Detect): + m.stride = fn(m.stride) + m.grid = list(map(fn, m.grid)) + if isinstance(m.anchor_grid, list): + m.anchor_grid = list(map(fn, m.anchor_grid)) + return self + def parse_model(d, ch): # model_dict, input_channels(3) - logger.info('\n%3s%18s%3s%10s %-40s%-30s' % ('', 'from', 'n', 'params', 'module', 'arguments')) + LOGGER.info(f"\n{'':>3}{'from':>18}{'n':>3}{'params':>10} {'module':<40}{'arguments':<30}") anchors, nc, gd, gw = d['anchors'], d['nc'], d['depth_multiple'], d['width_multiple'] na = (len(anchors[0]) // 2) if isinstance(anchors, list) else anchors # number of anchors no = na * (nc + 5) # number of outputs = anchors * (classes + 5) @@ -239,24 +254,24 @@ def parse_model(d, ch): # model_dict, input_channels(3) for j, a in enumerate(args): try: args[j] = eval(a) if isinstance(a, str) else a # eval strings - except: + except NameError: pass - n = max(round(n * gd), 1) if n > 1 else n # depth gain - if m in [Conv, GhostConv, Bottleneck, GhostBottleneck, SPP, DWConv, MixConv2d, Focus, CrossConv, BottleneckCSP, - C3, C3TR]: + n = n_ = max(round(n * gd), 1) if n > 1 else n # depth gain + if m in [Conv, GhostConv, Bottleneck, GhostBottleneck, SPP, SPPF, DWConv, MixConv2d, Focus, CrossConv, + BottleneckCSP, C3, C3TR, C3SPP, C3Ghost]: c1, c2 = ch[f], args[0] if c2 != no: # if not output c2 = make_divisible(c2 * gw, 8) args = [c1, c2, *args[1:]] - if m in [BottleneckCSP, C3, C3TR]: + if m in [BottleneckCSP, C3, C3TR, C3Ghost]: args.insert(2, n) # number of repeats n = 1 elif m is nn.BatchNorm2d: args = [ch[f]] elif m is Concat: - c2 = sum([ch[x] for x in f]) + c2 = sum(ch[x] for x in f) elif m is Detect: args.append([ch[x] for x in f]) if isinstance(args[1], int): # number of anchors @@ -268,11 +283,11 @@ def parse_model(d, ch): # model_dict, input_channels(3) else: c2 = ch[f] - m_ = nn.Sequential(*[m(*args) for _ in range(n)]) if n > 1 else m(*args) # module + m_ = nn.Sequential(*(m(*args) for _ in range(n))) if n > 1 else m(*args) # module t = str(m)[8:-2].replace('__main__.', '') # module type - np = sum([x.numel() for x in m_.parameters()]) # number params + np = sum(x.numel() for x in m_.parameters()) # number params m_.i, m_.f, m_.type, m_.np = i, f, t, np # attach index, 'from' index, type, number params - logger.info('%3s%18s%3s%10.0f %-40s%-30s' % (i, f, n, np, t, args)) # print + LOGGER.info(f'{i:>3}{str(f):>18}{n_:>3}{np:10.0f} {t:<40}{str(args):<30}') # print save.extend(x % i for x in ([f] if isinstance(f, int) else f) if x != -1) # append to savelist layers.append(m_) if i == 0: @@ -283,7 +298,7 @@ def parse_model(d, ch): # model_dict, input_channels(3) # override all activations in model if provided in config if 'act' in d: - logger.info(f'overriding activations in model to {d["act"]}') + LOGGER.info(f'overriding activations in model to {d["act"]}') replace_activations(model, d["act"]) return model, sorted(save) @@ -293,9 +308,11 @@ def parse_model(d, ch): # model_dict, input_channels(3) parser = argparse.ArgumentParser() parser.add_argument('--cfg', type=str, default='yolov5s.yaml', help='model.yaml') parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--profile', action='store_true', help='profile model speed') + parser.add_argument('--test', action='store_true', help='test all yolo*.yaml') opt = parser.parse_args() - opt.cfg = check_file(opt.cfg) # check file - set_logging() + opt.cfg = check_yaml(opt.cfg) # check YAML + print_args(FILE.stem, opt) device = select_device(opt.device) # Create model @@ -303,12 +320,20 @@ def parse_model(d, ch): # model_dict, input_channels(3) model.train() # Profile - # img = torch.rand(8 if torch.cuda.is_available() else 1, 3, 320, 320).to(device) - # y = model(img, profile=True) + if opt.profile: + img = torch.rand(8 if torch.cuda.is_available() else 1, 3, 640, 640).to(device) + y = model(img, profile=True) + + # Test all models + if opt.test: + for cfg in Path(ROOT / 'models').rglob('yolo*.yaml'): + try: + _ = Model(cfg) + except Exception as e: + print(f'Error in {cfg}: {e}') # Tensorboard (not working https://github.com/ultralytics/yolov5/issues/2898) # from torch.utils.tensorboard import SummaryWriter # tb_writer = SummaryWriter('.') - # logger.info("Run 'tensorboard --logdir=models' to view tensorboard at http://localhost:6006/") + # LOGGER.info("Run 'tensorboard --logdir=models' to view tensorboard at http://localhost:6006/") # tb_writer.add_graph(torch.jit.trace(model, img, strict=False), []) # add model graph - # tb_writer.add_image('test', img[0], dataformats='CWH') # add model to tensorboard diff --git a/models/yolov5l.yaml b/models/yolov5l.yaml index 71ebf86e5791..ce8a5de46a27 100644 --- a/models/yolov5l.yaml +++ b/models/yolov5l.yaml @@ -1,30 +1,30 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.0 # model depth multiple width_multiple: 1.0 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 - [116,90, 156,198, 373,326] # P5/32 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 [-1, 3, C3, [128]], [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 - [-1, 9, C3, [256]], + [-1, 6, C3, [256]], [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 [-1, 9, C3, [512]], [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 - [-1, 1, SPP, [1024, [5, 9, 13]]], - [-1, 3, C3, [1024, False]], # 9 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: [[-1, 1, Conv, [512, 1, 1]], [-1, 1, nn.Upsample, [None, 2, 'nearest']], diff --git a/models/yolov5m.yaml b/models/yolov5m.yaml index 3c749c916246..ad13ab370ff6 100644 --- a/models/yolov5m.yaml +++ b/models/yolov5m.yaml @@ -1,30 +1,30 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 0.67 # model depth multiple width_multiple: 0.75 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 - [116,90, 156,198, 373,326] # P5/32 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 [-1, 3, C3, [128]], [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 - [-1, 9, C3, [256]], + [-1, 6, C3, [256]], [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 [-1, 9, C3, [512]], [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 - [-1, 1, SPP, [1024, [5, 9, 13]]], - [-1, 3, C3, [1024, False]], # 9 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: [[-1, 1, Conv, [512, 1, 1]], [-1, 1, nn.Upsample, [None, 2, 'nearest']], diff --git a/models/yolov5n.yaml b/models/yolov5n.yaml new file mode 100644 index 000000000000..8a28a40d6e20 --- /dev/null +++ b/models/yolov5n.yaml @@ -0,0 +1,48 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.25 # layer channel multiple +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 v6.0 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 6, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 + ] + +# YOLOv5 v6.0 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models/yolov5s.yaml b/models/yolov5s.yaml index aca669d60d8b..f35beabb1e1c 100644 --- a/models/yolov5s.yaml +++ b/models/yolov5s.yaml @@ -1,30 +1,30 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 0.33 # model depth multiple width_multiple: 0.50 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 - [116,90, 156,198, 373,326] # P5/32 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 [-1, 3, C3, [128]], [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 - [-1, 9, C3, [256]], + [-1, 6, C3, [256]], [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 [-1, 9, C3, [512]], [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 - [-1, 1, SPP, [1024, [5, 9, 13]]], - [-1, 3, C3, [1024, False]], # 9 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: [[-1, 1, Conv, [512, 1, 1]], [-1, 1, nn.Upsample, [None, 2, 'nearest']], diff --git a/models/yolov5x.yaml b/models/yolov5x.yaml index d3babdf7baf0..f617a027d8a2 100644 --- a/models/yolov5x.yaml +++ b/models/yolov5x.yaml @@ -1,30 +1,30 @@ -# parameters +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license + +# Parameters nc: 80 # number of classes depth_multiple: 1.33 # model depth multiple width_multiple: 1.25 # layer channel multiple - -# anchors anchors: - [10,13, 16,30, 33,23] # P3/8 - [30,61, 62,45, 59,119] # P4/16 - [116,90, 156,198, 373,326] # P5/32 -# YOLOv5 backbone +# YOLOv5 v6.0 backbone backbone: # [from, number, module, args] - [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2 [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 [-1, 3, C3, [128]], [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 - [-1, 9, C3, [256]], + [-1, 6, C3, [256]], [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 [-1, 9, C3, [512]], [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 - [-1, 1, SPP, [1024, [5, 9, 13]]], - [-1, 3, C3, [1024, False]], # 9 + [-1, 3, C3, [1024]], + [-1, 1, SPPF, [1024, 5]], # 9 ] -# YOLOv5 head +# YOLOv5 v6.0 head head: [[-1, 1, Conv, [512, 1, 1]], [-1, 1, nn.Upsample, [None, 2, 'nearest']], diff --git a/models_v5.0/yolov5l.yaml b/models_v5.0/yolov5l.yaml new file mode 100644 index 000000000000..71ebf86e5791 --- /dev/null +++ b/models_v5.0/yolov5l.yaml @@ -0,0 +1,48 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple + +# anchors +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 3, C3, [1024, False]], # 9 + ] + +# YOLOv5 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models_v5.0/yolov5m.yaml b/models_v5.0/yolov5m.yaml new file mode 100644 index 000000000000..3c749c916246 --- /dev/null +++ b/models_v5.0/yolov5m.yaml @@ -0,0 +1,48 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 0.67 # model depth multiple +width_multiple: 0.75 # layer channel multiple + +# anchors +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 3, C3, [1024, False]], # 9 + ] + +# YOLOv5 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models_v5.0/yolov5s.yaml b/models_v5.0/yolov5s.yaml new file mode 100644 index 000000000000..aca669d60d8b --- /dev/null +++ b/models_v5.0/yolov5s.yaml @@ -0,0 +1,48 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple + +# anchors +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 3, C3, [1024, False]], # 9 + ] + +# YOLOv5 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models_v5.0/yolov5x.yaml b/models_v5.0/yolov5x.yaml new file mode 100644 index 000000000000..d3babdf7baf0 --- /dev/null +++ b/models_v5.0/yolov5x.yaml @@ -0,0 +1,48 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 1.33 # model depth multiple +width_multiple: 1.25 # layer channel multiple + +# anchors +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, C3, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, C3, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, C3, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 3, C3, [1024, False]], # 9 + ] + +# YOLOv5 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, C3, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, C3, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, C3, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, C3, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/requirements.txt b/requirements.txt index 2a1d702cad90..36f39017d6af 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,31 +1,38 @@ # pip install -r requirements.txt -# base ---------------------------------------- +# Base ---------------------------------------- matplotlib>=3.2.2 numpy>=1.18.5 opencv-python>=4.1.2 -Pillow +Pillow>=7.1.2 PyYAML>=5.3.1 +requests>=2.23.0 scipy>=1.4.1 -torch~=1.7.0 # sparseml requires 1.7 right now for quantization +torch>=1.7.0 torchvision>=0.8.1 tqdm>=4.41.0 -# logging ------------------------------------- +# Logging ------------------------------------- tensorboard>=2.4.1 # wandb -# plotting ------------------------------------ +# Plotting ------------------------------------ +pandas>=1.1.4 seaborn>=0.11.0 -pandas -# export -------------------------------------- -# coremltools>=4.1 -# onnx>=1.9.0 -# scikit-learn==0.19.2 # for coreml quantization +# Export -------------------------------------- +# coremltools>=4.1 # CoreML export +# onnx>=1.9.0 # ONNX export +# onnx-simplifier>=0.3.6 # ONNX simplifier +# scikit-learn==0.19.2 # CoreML quantization +# tensorflow>=2.4.1 # TFLite export +# tensorflowjs>=3.9.0 # TF.js export +# openvino-dev # OpenVINO export -# extras -------------------------------------- +# Extras -------------------------------------- +# albumentations>=1.0.3 # Cython # for pycocotools https://github.com/cocodataset/cocoapi/issues/172 -pycocotools>=2.0 # COCO mAP -sparseml[torch,torchvision]>=0.5 # Pruning and Quantization -thop # FLOPS computation +# pycocotools>=2.0 # COCO mAP +# roboflow +thop # FLOPs computation +sparseml[torch,torchvision] >= 0.12 \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 000000000000..20ea49a8b4d6 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,45 @@ +# Project-wide configuration file, can be used for package metadata and other toll configurations +# Example usage: global configuration for PEP8 (via flake8) setting or default pytest arguments + +[metadata] +license_file = LICENSE +description-file = README.md + + +[tool:pytest] +norecursedirs = + .git + dist + build +addopts = + --doctest-modules + --durations=25 + --color=yes + + +[flake8] +max-line-length = 120 +exclude = .tox,*.egg,build,temp +select = E,W,F +doctests = True +verbose = 2 +# https://pep8.readthedocs.io/en/latest/intro.html#error-codes +format = pylint +# see: https://www.flake8rules.com/ +ignore = + E731 # Do not assign a lambda expression, use a def + F405 # name may be undefined, or defined from star imports: module + E402 # module level import not at top of file + F401 # module imported but unused + W504 # line break after binary operator + E127 # continuation line over-indented for visual indent + W504 # line break after binary operator + E231 # missing whitespace after β€˜,’, β€˜;’, or β€˜:’ + E501 # line too long + F403 # β€˜from module import *’ used; unable to detect undefined names + + +[isort] +# https://pycqa.github.io/isort/docs/configuration/options.html +line_length = 120 +multi_line_output = 0 diff --git a/test.py b/test.py deleted file mode 100644 index 4196dfe1ef40..000000000000 --- a/test.py +++ /dev/null @@ -1,349 +0,0 @@ -import argparse -import json -import os -from pathlib import Path -from threading import Thread - -import numpy as np -import torch -import yaml -from tqdm import tqdm - -from models.export import load_checkpoint -from utils.datasets import create_dataloader -from utils.general import coco80_to_coco91_class, check_dataset, check_file, check_img_size, check_requirements, \ - box_iou, non_max_suppression, scale_coords, xyxy2xywh, xywh2xyxy, set_logging, increment_path, colorstr -from utils.metrics import ap_per_class, ConfusionMatrix -from utils.plots import plot_images, output_to_target, plot_study_txt -from utils.torch_utils import select_device, time_synchronized - - -def test(data, - weights=None, - batch_size=32, - imgsz=640, - conf_thres=0.001, - iou_thres=0.6, # for NMS - save_json=False, - single_cls=False, - augment=False, - verbose=False, - model=None, - dataloader=None, - save_dir=Path(''), # for saving images - save_txt=False, # for auto-labelling - save_hybrid=False, # for hybrid auto-labelling - save_conf=False, # save auto-label confidences - plots=True, - wandb_logger=None, - compute_loss=None, - half_precision=True, - is_coco=False, - opt=None): - # Initialize/load model and set device - training = model is not None - if training: # called by train.py - device = next(model.parameters()).device # get model device - - else: # called directly - set_logging() - device = select_device(opt.device, batch_size=batch_size) - - # Directories - save_dir = increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok) # increment run - (save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir - - # Load model - model, extras = load_checkpoint('ensemble', weights, device) # load FP32 model - gs = max(int(model.stride.max()), 32) # grid size (max stride) - imgsz = check_img_size(imgsz, s=gs) # check img_size - - # Multi-GPU disabled, incompatible with .half() https://github.com/ultralytics/yolov5/issues/99 - # if device.type != 'cpu' and torch.cuda.device_count() > 1: - # model = nn.DataParallel(model) - - # Half - half = device.type != 'cpu' and half_precision # half precision only supported on CUDA - if half: - model.half() - - # Configure - model.eval() - if isinstance(data, str): - is_coco = data.endswith('coco.yaml') - with open(data) as f: - data = yaml.safe_load(f) - check_dataset(data) # check - nc = 1 if single_cls else int(data['nc']) # number of classes - iouv = torch.linspace(0.5, 0.95, 10).to(device) # iou vector for mAP@0.5:0.95 - niou = iouv.numel() - - # Logging - log_imgs = 0 - if wandb_logger and wandb_logger.wandb: - log_imgs = min(wandb_logger.log_imgs, 100) - # Dataloader - if not training: - if device.type != 'cpu': - model(torch.zeros(1, 3, imgsz, imgsz).to(device).type_as(next(model.parameters()))) # run once - task = opt.task if opt.task in ('train', 'val', 'test') else 'val' # path to train/val/test images - dataloader = create_dataloader(data[task], imgsz, batch_size, gs, opt, pad=0.5, rect=True, - prefix=colorstr(f'{task}: '))[0] - - seen = 0 - confusion_matrix = ConfusionMatrix(nc=nc) - names = {k: v for k, v in enumerate(model.names if hasattr(model, 'names') else model.module.names)} - coco91class = coco80_to_coco91_class() - s = ('%20s' + '%12s' * 6) % ('Class', 'Images', 'Labels', 'P', 'R', 'mAP@.5', 'mAP@.5:.95') - p, r, f1, mp, mr, map50, map, t0, t1 = 0., 0., 0., 0., 0., 0., 0., 0., 0. - loss = torch.zeros(3, device=device) - jdict, stats, ap, ap_class, wandb_images = [], [], [], [], [] - for batch_i, (img, targets, paths, shapes) in enumerate(tqdm(dataloader, desc=s)): - img = img.to(device, non_blocking=True) - img = img.half() if half else img.float() # uint8 to fp16/32 - img /= 255.0 # 0 - 255 to 0.0 - 1.0 - targets = targets.to(device) - nb, _, height, width = img.shape # batch size, channels, height, width - - with torch.no_grad(): - # Run model - t = time_synchronized() - out, train_out = model(img, augment=augment) # inference and training outputs - t0 += time_synchronized() - t - - # Compute loss - if compute_loss: - loss += compute_loss([x.float() for x in train_out], targets)[1][:3] # box, obj, cls - - # Run NMS - targets[:, 2:] *= torch.Tensor([width, height, width, height]).to(device) # to pixels - lb = [targets[targets[:, 0] == i, 1:] for i in range(nb)] if save_hybrid else [] # for autolabelling - t = time_synchronized() - out = non_max_suppression(out, conf_thres, iou_thres, labels=lb, multi_label=True, agnostic=single_cls) - t1 += time_synchronized() - t - - # Statistics per image - for si, pred in enumerate(out): - labels = targets[targets[:, 0] == si, 1:] - nl = len(labels) - tcls = labels[:, 0].tolist() if nl else [] # target class - path = Path(paths[si]) - seen += 1 - - if len(pred) == 0: - if nl: - stats.append((torch.zeros(0, niou, dtype=torch.bool), torch.Tensor(), torch.Tensor(), tcls)) - continue - - # Predictions - if single_cls: - pred[:, 5] = 0 - predn = pred.clone() - scale_coords(img[si].shape[1:], predn[:, :4], shapes[si][0], shapes[si][1]) # native-space pred - - # Append to text file - if save_txt: - gn = torch.tensor(shapes[si][0])[[1, 0, 1, 0]] # normalization gain whwh - for *xyxy, conf, cls in predn.tolist(): - xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh - line = (cls, *xywh, conf) if save_conf else (cls, *xywh) # label format - with open(save_dir / 'labels' / (path.stem + '.txt'), 'a') as f: - f.write(('%g ' * len(line)).rstrip() % line + '\n') - - # W&B logging - Media Panel Plots - if len(wandb_images) < log_imgs and wandb_logger.current_epoch > 0: # Check for test operation - if wandb_logger.current_epoch % wandb_logger.bbox_interval == 0: - box_data = [{"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]}, - "class_id": int(cls), - "box_caption": "%s %.3f" % (names[cls], conf), - "scores": {"class_score": conf}, - "domain": "pixel"} for *xyxy, conf, cls in pred.tolist()] - boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space - wandb_images.append(wandb_logger.wandb.Image(img[si], boxes=boxes, caption=path.name)) - wandb_logger.log_training_progress(predn, path, names) if wandb_logger and wandb_logger.wandb_run else None - - # Append to pycocotools JSON dictionary - if save_json: - # [{"image_id": 42, "category_id": 18, "bbox": [258.15, 41.29, 348.26, 243.78], "score": 0.236}, ... - image_id = int(path.stem) if path.stem.isnumeric() else path.stem - box = xyxy2xywh(predn[:, :4]) # xywh - box[:, :2] -= box[:, 2:] / 2 # xy center to top-left corner - for p, b in zip(pred.tolist(), box.tolist()): - jdict.append({'image_id': image_id, - 'category_id': coco91class[int(p[5])] if is_coco else int(p[5]), - 'bbox': [round(x, 3) for x in b], - 'score': round(p[4], 5)}) - - # Assign all predictions as incorrect - correct = torch.zeros(pred.shape[0], niou, dtype=torch.bool, device=device) - if nl: - detected = [] # target indices - tcls_tensor = labels[:, 0] - - # target boxes - tbox = xywh2xyxy(labels[:, 1:5]) - scale_coords(img[si].shape[1:], tbox, shapes[si][0], shapes[si][1]) # native-space labels - if plots: - confusion_matrix.process_batch(predn, torch.cat((labels[:, 0:1], tbox), 1)) - - # Per target class - for cls in torch.unique(tcls_tensor): - ti = (cls == tcls_tensor).nonzero(as_tuple=False).view(-1) # target indices - pi = (cls == pred[:, 5]).nonzero(as_tuple=False).view(-1) # prediction indices - - # Search for detections - if pi.shape[0]: - # Prediction to target ious - ious, i = box_iou(predn[pi, :4], tbox[ti]).max(1) # best ious, indices - - # Append detections - detected_set = set() - for j in (ious > iouv[0]).nonzero(as_tuple=False): - d = ti[i[j]] # detected target - if d.item() not in detected_set: - detected_set.add(d.item()) - detected.append(d) - correct[pi[j]] = ious[j] > iouv # iou_thres is 1xn - if len(detected) == nl: # all targets already located in image - break - - # Append statistics (correct, conf, pcls, tcls) - stats.append((correct.cpu(), pred[:, 4].cpu(), pred[:, 5].cpu(), tcls)) - - # Plot images - if plots and batch_i < 3: - f = save_dir / f'test_batch{batch_i}_labels.jpg' # labels - Thread(target=plot_images, args=(img, targets, paths, f, names), daemon=True).start() - f = save_dir / f'test_batch{batch_i}_pred.jpg' # predictions - Thread(target=plot_images, args=(img, output_to_target(out), paths, f, names), daemon=True).start() - - # Compute statistics - stats = [np.concatenate(x, 0) for x in zip(*stats)] # to numpy - if len(stats) and stats[0].any(): - p, r, ap, f1, ap_class = ap_per_class(*stats, plot=plots, save_dir=save_dir, names=names) - ap50, ap = ap[:, 0], ap.mean(1) # AP@0.5, AP@0.5:0.95 - mp, mr, map50, map = p.mean(), r.mean(), ap50.mean(), ap.mean() - nt = np.bincount(stats[3].astype(np.int64), minlength=nc) # number of targets per class - else: - nt = torch.zeros(1) - - # Print results - pf = '%20s' + '%12i' * 2 + '%12.3g' * 4 # print format - print(pf % ('all', seen, nt.sum(), mp, mr, map50, map)) - - # Print results per class - if (verbose or (nc < 50 and not training)) and nc > 1 and len(stats): - for i, c in enumerate(ap_class): - print(pf % (names[c], seen, nt[c], p[i], r[i], ap50[i], ap[i])) - - # Print speeds - t = tuple(x / seen * 1E3 for x in (t0, t1, t0 + t1)) + (imgsz, imgsz, batch_size) # tuple - if not training: - print('Speed: %.1f/%.1f/%.1f ms inference/NMS/total per %gx%g image at batch-size %g' % t) - - # Plots - if plots: - confusion_matrix.plot(save_dir=save_dir, names=list(names.values())) - if wandb_logger and wandb_logger.wandb: - val_batches = [wandb_logger.wandb.Image(str(f), caption=f.name) for f in sorted(save_dir.glob('test*.jpg'))] - wandb_logger.log({"Validation": val_batches}) - if wandb_images: - wandb_logger.log({"Bounding Box Debugger/Images": wandb_images}) - - # Save JSON - if save_json and len(jdict): - w = Path(weights[0] if isinstance(weights, list) else weights).stem if weights is not None else '' # weights - anno_json = '../coco/annotations/instances_val2017.json' # annotations json - pred_json = str(save_dir / f"{w}_predictions.json") # predictions json - print('\nEvaluating pycocotools mAP... saving %s...' % pred_json) - with open(pred_json, 'w') as f: - json.dump(jdict, f) - - try: # https://github.com/cocodataset/cocoapi/blob/master/PythonAPI/pycocoEvalDemo.ipynb - from pycocotools.coco import COCO - from pycocotools.cocoeval import COCOeval - - anno = COCO(anno_json) # init annotations api - pred = anno.loadRes(pred_json) # init predictions api - eval = COCOeval(anno, pred, 'bbox') - if is_coco: - eval.params.imgIds = [int(Path(x).stem) for x in dataloader.dataset.img_files] # image IDs to evaluate - eval.evaluate() - eval.accumulate() - eval.summarize() - map, map50 = eval.stats[:2] # update results (mAP@0.5:0.95, mAP@0.5) - except Exception as e: - print(f'pycocotools unable to run: {e}') - - # Return results - model.float() # for training - if not training: - s = f"\n{len(list(save_dir.glob('labels/*.txt')))} labels saved to {save_dir / 'labels'}" if save_txt else '' - print(f"Results saved to {save_dir}{s}") - maps = np.zeros(nc) + map - for i, c in enumerate(ap_class): - maps[c] = ap[i] - return (mp, mr, map50, map, *(loss.cpu() / len(dataloader)).tolist()), maps, t - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(prog='test.py') - parser.add_argument('--weights', nargs='+', type=str, default='yolov5s.pt', help='model.pt path(s)') - parser.add_argument('--data', type=str, default='data/coco128.yaml', help='*.data path') - parser.add_argument('--batch-size', type=int, default=32, help='size of each image batch') - parser.add_argument('--img-size', type=int, default=640, help='inference size (pixels)') - parser.add_argument('--conf-thres', type=float, default=0.001, help='object confidence threshold') - parser.add_argument('--iou-thres', type=float, default=0.6, help='IOU threshold for NMS') - parser.add_argument('--task', default='val', help='train, val, test, speed or study') - parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') - parser.add_argument('--single-cls', action='store_true', help='treat as single-class dataset') - parser.add_argument('--augment', action='store_true', help='augmented inference') - parser.add_argument('--verbose', action='store_true', help='report mAP by class') - parser.add_argument('--save-txt', action='store_true', help='save results to *.txt') - parser.add_argument('--save-hybrid', action='store_true', help='save label+prediction hybrid results to *.txt') - parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels') - parser.add_argument('--save-json', action='store_true', help='save a cocoapi-compatible JSON results file') - parser.add_argument('--project', default='runs/test', help='save to project/name') - parser.add_argument('--name', default='exp', help='save to project/name') - parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') - opt = parser.parse_args() - opt.save_json |= opt.data.endswith('coco.yaml') - opt.data = check_file(opt.data) # check file - print(opt) - check_requirements(exclude=('tensorboard', 'pycocotools', 'thop')) - - if opt.task in ('train', 'val', 'test'): # run normally - test(opt.data, - opt.weights, - opt.batch_size, - opt.img_size, - opt.conf_thres, - opt.iou_thres, - opt.save_json, - opt.single_cls, - opt.augment, - opt.verbose, - save_txt=opt.save_txt | opt.save_hybrid, - save_hybrid=opt.save_hybrid, - save_conf=opt.save_conf, - opt=opt - ) - - elif opt.task == 'speed': # speed benchmarks - for w in opt.weights: - test(opt.data, w, opt.batch_size, opt.img_size, 0.25, 0.45, save_json=False, plots=False, opt=opt) - - elif opt.task == 'study': # run over a range of settings and save/plot - # python test.py --task study --data coco.yaml --iou 0.7 --weights yolov5s.pt yolov5m.pt yolov5l.pt yolov5x.pt - x = list(range(256, 1536 + 128, 128)) # x axis (image sizes) - for w in opt.weights: - f = f'study_{Path(opt.data).stem}_{Path(w).stem}.txt' # filename to save to - y = [] # y axis - for i in x: # img-size - print(f'\nRunning {f} point {i}...') - r, _, t = test(opt.data, w, opt.batch_size, i, opt.conf_thres, opt.iou_thres, opt.save_json, - plots=False, opt=opt) - y.append(r + t) # results and times - np.savetxt(f, y, fmt='%10.4g') # save - os.system('zip -r study.zip study_*.txt') - plot_study_txt(x=x) # plot diff --git a/train.py b/train.py index b8dff33dc176..738155ad1f77 100644 --- a/train.py +++ b/train.py @@ -1,157 +1,210 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Train a YOLOv5 model on a custom dataset. + +Models and datasets download automatically from the latest YOLOv5 release. +Models: https://github.com/ultralytics/yolov5/tree/master/models +Datasets: https://github.com/ultralytics/yolov5/tree/master/data +Tutorial: https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data + +Usage: + $ python path/to/train.py --data coco128.yaml --weights yolov5s.pt --img 640 # from pretrained (RECOMMENDED) + $ python path/to/train.py --data coco128.yaml --weights '' --cfg yolov5s.yaml --img 640 # from scratch +""" + import argparse -import logging import math import os import random +import sys import time +from copy import deepcopy +from datetime import datetime from pathlib import Path -from threading import Thread import numpy as np +import torch import torch.distributed as dist import torch.nn as nn -import torch.nn.functional as F -import torch.optim as optim -import torch.optim.lr_scheduler as lr_scheduler -import torch.utils.data import yaml from torch.cuda import amp from torch.nn.parallel import DistributedDataParallel as DDP -from torch.utils.tensorboard import SummaryWriter +from torch.optim import SGD, Adam, AdamW, lr_scheduler from tqdm import tqdm -import test # import test.py to get mAP after each epoch -from models.export import load_checkpoint, create_checkpoint +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative + +import val # for end-of-epoch mAP +from models.experimental import attempt_load +from export import load_checkpoint, create_checkpoint from models.yolo import Model from utils.autoanchor import check_anchors +from utils.autobatch import check_train_batch_size +from utils.callbacks import Callbacks from utils.datasets import create_dataloader -from utils.general import labels_to_class_weights, increment_path, labels_to_image_weights, init_seeds, \ - fitness, strip_optimizer, get_latest_run, check_dataset, check_file, check_git_status, check_img_size, \ - check_requirements, print_mutation, set_logging, one_cycle, colorstr +from utils.downloads import attempt_download +from utils.general import (LOGGER, check_dataset, check_file, check_git_status, check_img_size, check_requirements, + check_suffix, check_yaml, colorstr, get_latest_run, increment_path, init_seeds, + intersect_dicts, labels_to_class_weights, labels_to_image_weights, methods, one_cycle, + print_args, print_mutation, strip_optimizer) +from utils.loggers import Loggers +from utils.loggers.wandb.wandb_utils import check_wandb_resume from utils.loss import ComputeLoss -from utils.plots import plot_images, plot_labels, plot_results, plot_evolution +from utils.metrics import fitness +from utils.plots import plot_evolve, plot_labels +from utils.torch_utils import EarlyStopping, ModelEMA, de_parallel, select_device, torch_distributed_zero_first from utils.sparse import SparseMLWrapper -from utils.torch_utils import ModelEMA, select_device, torch_distributed_zero_first -from utils.wandb_logging.wandb_utils import WandbLogger, check_wandb_resume -logger = logging.getLogger(__name__) +LOCAL_RANK = int(os.getenv('LOCAL_RANK', -1)) # https://pytorch.org/docs/stable/elastic/run.html +RANK = int(os.getenv('RANK', -1)) +WORLD_SIZE = int(os.getenv('WORLD_SIZE', 1)) -def train(hyp, opt, device, tb_writer=None): - logger.info(colorstr('hyperparameters: ') + ', '.join(f'{k}={v}' for k, v in hyp.items())) - save_dir, epochs, batch_size, total_batch_size, weights, rank = \ - Path(opt.save_dir), opt.epochs, opt.batch_size, opt.total_batch_size, opt.weights, opt.global_rank +def train(hyp, # path/to/hyp.yaml or hyp dictionary + opt, + device, + callbacks + ): + save_dir, epochs, batch_size, weights, single_cls, evolve, data, cfg, resume, noval, nosave, workers, freeze = \ + Path(opt.save_dir), opt.epochs, opt.batch_size, opt.weights, opt.single_cls, opt.evolve, opt.data, opt.cfg, \ + opt.resume, opt.noval, opt.nosave, opt.workers, opt.freeze # Directories - wdir = save_dir / 'weights' - wdir.mkdir(parents=True, exist_ok=True) # make dir - last = wdir / 'last.pt' - best = wdir / 'best.pt' - results_file = save_dir / 'results.txt' + w = save_dir / 'weights' # weights dir + (w.parent if evolve else w).mkdir(parents=True, exist_ok=True) # make dir + last, best = w / 'last.pt', w / 'best.pt' - # Save run settings - with open(save_dir / 'hyp.yaml', 'w') as f: - yaml.dump(hyp, f, sort_keys=False) - with open(save_dir / 'opt.yaml', 'w') as f: - yaml.dump(vars(opt), f, sort_keys=False) + # Hyperparameters + if isinstance(hyp, str): + with open(hyp, errors='ignore') as f: + hyp = yaml.safe_load(f) # load hyps dict + LOGGER.info(colorstr('hyperparameters: ') + ', '.join(f'{k}={v}' for k, v in hyp.items())) - # Configure - plots = not opt.evolve # create plots + # Save run settings + if not evolve: + with open(save_dir / 'hyp.yaml', 'w') as f: + yaml.dump(hyp, f, sort_keys=False) + with open(save_dir / 'opt.yaml', 'w') as f: + yaml.dump(vars(opt), f, sort_keys=False) + + # Loggers + data_dict = None + if RANK in [-1, 0]: + loggers = Loggers(save_dir, weights, opt, hyp, LOGGER) # loggers instance + if loggers.wandb: + data_dict = loggers.wandb.data_dict + if resume: + weights, epochs, hyp, batch_size = opt.weights, opt.epochs, opt.hyp, opt.batch_size + + # Register actions + for k in methods(loggers): + callbacks.register_action(k, callback=getattr(loggers, k)) + + # Config + plots = not evolve # create plots cuda = device.type != 'cpu' half_precision = cuda - init_seeds(2 + rank) - with open(opt.data) as f: - data_dict = yaml.load(f, Loader=yaml.SafeLoader) # data dict - is_coco = opt.data.endswith('coco.yaml') - - # Logging- Doing this before checking the dataset. Might update data_dict - loggers = {'wandb': None} # loggers dict - wandb_logger = None - if rank in [-1, 0]: - opt.hyp = hyp # add hyperparameters - run_id = torch.load(weights).get('wandb_id') if weights.endswith('.pt') and os.path.isfile(weights) else None - wandb_logger = WandbLogger(opt, Path(opt.save_dir).stem, run_id, data_dict) - loggers['wandb'] = wandb_logger.wandb - data_dict = wandb_logger.data_dict - if wandb_logger.wandb: - weights, epochs, hyp = opt.weights, opt.epochs, opt.hyp # WandbLogger might update weights, epochs if resuming - - nc = 1 if opt.single_cls else int(data_dict['nc']) # number of classes - names = ['item'] if opt.single_cls and len(data_dict['names']) != 1 else data_dict['names'] # class names - assert len(names) == nc, '%g names found for nc=%g dataset in %s' % (len(names), nc, opt.data) # check + init_seeds(1 + RANK) + with torch_distributed_zero_first(LOCAL_RANK): + data_dict = data_dict or check_dataset(data) # check if None + train_path, val_path = data_dict['train'], data_dict['val'] + nc = 1 if single_cls else int(data_dict['nc']) # number of classes + names = ['item'] if single_cls and len(data_dict['names']) != 1 else data_dict['names'] # class names + assert len(names) == nc, f'{len(names)} names found for nc={nc} dataset in {data}' # check + is_coco = isinstance(val_path, str) and val_path.endswith('coco/val2017.txt') # COCO dataset # Model + check_suffix(weights, ['.pt', '.pth']) # check weights pretrained = weights.endswith('.pt') or weights.endswith('.pth') or weights.startswith('zoo:') if pretrained: - model, extras = load_checkpoint('train', weights, device, opt.cfg, hyp, nc, opt.recipe, opt.resume, rank) + model, extras = load_checkpoint( + type_ = 'train', + weights=weights, + device=device, + cfg=opt.cfg, + hyp=hyp, + nc=nc, + recipe=opt.recipe, + resume=opt.resume, + rank=LOCAL_RANK + ) ckpt, state_dict, sparseml_wrapper = extras['ckpt'], extras['state_dict'], extras['sparseml_wrapper'] - logger.info(extras['report']) # report + LOGGER.info(extras['report']) else: - model = Model(opt.cfg, ch=3, nc=nc, anchors=hyp.get('anchors')).to(device) # create - sparseml_wrapper = SparseMLWrapper(model, opt.recipe) - sparseml_wrapper.initialize(start_epoch=0.0) + model = Model(cfg, ch=3, nc=nc, anchors=hyp.get('anchors')).to(device) # create + sparseml_wrapper = SparseMLWrapper(model, None, opt.recipe) + sparseml_wrapper.initialize(start_epoch=0) ckpt = None - with torch_distributed_zero_first(rank): - check_dataset(data_dict) # check - train_path = data_dict['train'] - test_path = data_dict['val'] # Freeze - freeze = [] # parameter names to freeze (full or partial) + freeze = [f'model.{x}.' for x in (freeze if len(freeze) > 1 else range(freeze[0]))] # layers to freeze for k, v in model.named_parameters(): v.requires_grad = True # train all layers if any(x in k for x in freeze): - print('freezing %s' % k) + LOGGER.info(f'freezing {k}') v.requires_grad = False + # Image size + gs = max(int(model.stride.max()), 32) # grid size (max stride) + imgsz = check_img_size(opt.imgsz, gs, floor=gs * 2) # verify imgsz is gs-multiple + + # Batch size + if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size + batch_size = check_train_batch_size(model, imgsz) + loggers.on_params_update({"batch_size": batch_size}) + # Optimizer nbs = 64 # nominal batch size - accumulate = max(round(nbs / total_batch_size), 1) # accumulate loss before optimizing - hyp['weight_decay'] *= total_batch_size * accumulate / nbs # scale weight_decay - logger.info(f"Scaled weight_decay = {hyp['weight_decay']}") - - pg0, pg1, pg2 = [], [], [] # optimizer parameter groups - for k, v in model.named_modules(): - if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): - pg2.append(v.bias) # biases - if isinstance(v, nn.BatchNorm2d): - pg0.append(v.weight) # no decay - elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): - pg1.append(v.weight) # apply decay - - if opt.adam: - optimizer = optim.Adam(pg0, lr=hyp['lr0'], betas=(hyp['momentum'], 0.999)) # adjust beta1 to momentum + accumulate = max(round(nbs / batch_size), 1) # accumulate loss before optimizing + hyp['weight_decay'] *= batch_size * accumulate / nbs # scale weight_decay + LOGGER.info(f"Scaled weight_decay = {hyp['weight_decay']}") + + g0, g1, g2 = [], [], [] # optimizer parameter groups + for v in model.modules(): + if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): # bias + g2.append(v.bias) + if isinstance(v, nn.BatchNorm2d): # weight (no decay) + g0.append(v.weight) + elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): # weight (with decay) + g1.append(v.weight) + + if opt.optimizer == 'Adam': + optimizer = Adam(g0, lr=hyp['lr0'], betas=(hyp['momentum'], 0.999)) # adjust beta1 to momentum + elif opt.optimizer == 'AdamW': + optimizer = AdamW(g0, lr=hyp['lr0'], betas=(hyp['momentum'], 0.999)) # adjust beta1 to momentum else: - optimizer = optim.SGD(pg0, lr=hyp['lr0'], momentum=hyp['momentum'], nesterov=True) + optimizer = SGD(g0, lr=hyp['lr0'], momentum=hyp['momentum'], nesterov=True) - optimizer.add_param_group({'params': pg1, 'weight_decay': hyp['weight_decay']}) # add pg1 with weight_decay - optimizer.add_param_group({'params': pg2}) # add pg2 (biases) - logger.info('Optimizer groups: %g .bias, %g conv.weight, %g other' % (len(pg2), len(pg1), len(pg0))) - del pg0, pg1, pg2 + optimizer.add_param_group({'params': g1, 'weight_decay': hyp['weight_decay']}) # add g1 with weight_decay + optimizer.add_param_group({'params': g2}) # add g2 (biases) + LOGGER.info(f"{colorstr('optimizer:')} {type(optimizer).__name__} with parameter groups " + f"{len(g0)} weight (no decay), {len(g1)} weight, {len(g2)} bias") + del g0, g1, g2 - # Scheduler https://arxiv.org/pdf/1812.01187.pdf - # https://pytorch.org/docs/stable/_modules/torch/optim/lr_scheduler.html#OneCycleLR - if opt.linear_lr: - lf = lambda x: (1 - x / (epochs - 1)) * (1.0 - hyp['lrf']) + hyp['lrf'] # linear - else: + # Scheduler + if opt.cos_lr: lf = one_cycle(1, hyp['lrf'], epochs) # cosine 1->hyp['lrf'] - scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf) - # plot_lr_scheduler(optimizer, scheduler, epochs) + else: + lf = lambda x: (1 - x / epochs) * (1.0 - hyp['lrf']) + hyp['lrf'] # linear + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf) # plot_lr_scheduler(optimizer, scheduler, epochs) # EMA - ema = ModelEMA(model, enabled=not opt.disable_ema) if rank in [-1, 0] else None + ema = ModelEMA(model, enabled=not opt.disable_ema) if RANK in [-1, 0] else None # Resume - start_epoch, best_fitness = 0, 0.0 + start_epoch = sparseml_wrapper.start_epoch or 0 + best_fitness = 0.0 if pretrained: - # Epochs - start_epoch = ckpt['epoch'] + 1 if opt.resume: assert start_epoch > 0, '%s training to %g epochs is finished, nothing to resume.' % (weights, epochs) if epochs < start_epoch: - logger.info('%s has been trained for %g epochs. Fine-tuning for %g additional epochs.' % - (weights, ckpt['epoch'], epochs)) - epochs += ckpt['epoch'] # finetune additional epochs + LOGGER.info('%s has been trained for %g epochs. Fine-tuning for %g additional epochs.' % + (weights, start_epoch-1, epochs)) + epochs += start_epoch # finetune additional epochs if sparseml_wrapper.qat_active(start_epoch): ema.enabled = False @@ -162,140 +215,127 @@ def train(hyp, opt, device, tb_writer=None): # EMA if ema and ckpt.get('ema'): - ema.load_state_dict(ckpt) - - # Results - if ckpt.get('training_results') is not None: - results_file.write_text(ckpt['training_results']) # write results.txt - - del ckpt, state_dict + ema.ema.load_state_dict(ckpt['ema'].float().state_dict()) + ema.updates = ckpt['updates'] - # Image sizes - gs = max(int(model.stride.max()), 32) # grid size (max stride) - nl = model.model[-1].nl # number of detection layers (used for scaling hyp['obj']) - imgsz, imgsz_test = [check_img_size(x, gs) for x in opt.img_size] # verify imgsz are gs-multiples + del ckpt # DP mode - if cuda and rank == -1 and torch.cuda.device_count() > 1: + if cuda and RANK == -1 and torch.cuda.device_count() > 1: + LOGGER.warning('WARNING: DP not recommended, use torch.distributed.run for best DDP Multi-GPU results.\n' + 'See Multi-GPU Tutorial at https://github.com/ultralytics/yolov5/issues/475 to get started.') model = torch.nn.DataParallel(model) # SyncBatchNorm - if opt.sync_bn and cuda and rank != -1: + if opt.sync_bn and cuda and RANK != -1: model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model).to(device) - logger.info('Using SyncBatchNorm()') + LOGGER.info('Using SyncBatchNorm()') # Trainloader - dataloader, dataset = create_dataloader(train_path, imgsz, batch_size, gs, opt, - hyp=hyp, augment=True, cache=opt.cache_images, rect=opt.rect, rank=rank, - world_size=opt.world_size, workers=opt.workers, - image_weights=opt.image_weights, quad=opt.quad, prefix=colorstr('train: ')) - mlc = np.concatenate(dataset.labels, 0)[:, 0].max() # max label class - nb = len(dataloader) # number of batches - assert mlc < nc, 'Label class %g exceeds nc=%g in %s. Possible class labels are 0-%g' % (mlc, nc, opt.data, nc - 1) + train_loader, dataset = create_dataloader(train_path, imgsz, batch_size // WORLD_SIZE, gs, single_cls, + hyp=hyp, augment=True, cache=None if opt.cache == 'val' else opt.cache, + rect=opt.rect, rank=LOCAL_RANK, workers=workers, + image_weights=opt.image_weights, quad=opt.quad, + prefix=colorstr('train: '), shuffle=True) + mlc = int(np.concatenate(dataset.labels, 0)[:, 0].max()) # max label class + nb = len(train_loader) # number of batches + assert mlc < nc, f'Label class {mlc} exceeds nc={nc} in {data}. Possible class labels are 0-{nc - 1}' # Process 0 - if rank in [-1, 0]: - testloader = create_dataloader(test_path, imgsz_test, batch_size * 2, gs, opt, # testloader - hyp=hyp, cache=opt.cache_images and not opt.notest, rect=True, rank=-1, - world_size=opt.world_size, workers=opt.workers, - pad=0.5, prefix=colorstr('val: '))[0] + if RANK in [-1, 0]: + val_loader = create_dataloader(val_path, imgsz, batch_size // WORLD_SIZE * 2, gs, single_cls, + hyp=hyp, cache=None if noval else opt.cache, + rect=True, rank=-1, workers=workers * 2, pad=0.5, + prefix=colorstr('val: '))[0] - if not opt.resume: + if not resume: labels = np.concatenate(dataset.labels, 0) - c = torch.tensor(labels[:, 0]) # classes + # c = torch.tensor(labels[:, 0]) # classes # cf = torch.bincount(c.long(), minlength=nc) + 1. # frequency # model._initialize_biases(cf.to(device)) if plots: - plot_labels(labels, names, save_dir, loggers) - if tb_writer: - tb_writer.add_histogram('classes', c, 0) + plot_labels(labels, names, save_dir) # Anchors if not opt.noautoanchor: check_anchors(dataset, model=model, thr=hyp['anchor_t'], imgsz=imgsz) + callbacks.run('on_pretrain_routine_end') + # DDP mode - if cuda and rank != -1: - model = DDP(model, device_ids=[opt.local_rank], output_device=opt.local_rank, - # nn.MultiheadAttention incompatibility with DDP https://github.com/pytorch/pytorch/issues/26698 - find_unused_parameters=any(isinstance(layer, nn.MultiheadAttention) for layer in model.modules())) - - # Model parameters - hyp['box'] *= 3. / nl # scale to layers - hyp['cls'] *= nc / 80. * 3. / nl # scale to classes and layers - hyp['obj'] *= (imgsz / 640) ** 2 * 3. / nl # scale to image size and layers + if cuda and RANK != -1: + model = DDP(model, device_ids=[LOCAL_RANK], output_device=LOCAL_RANK) + + # Model attributes + nl = de_parallel(model).model[-1].nl # number of detection layers (to scale hyps) + hyp['box'] *= 3 / nl # scale to layers + hyp['cls'] *= nc / 80 * 3 / nl # scale to classes and layers + hyp['obj'] *= (imgsz / 640) ** 2 * 3 / nl # scale to image size and layers hyp['label_smoothing'] = opt.label_smoothing model.nc = nc # attach number of classes to model model.hyp = hyp # attach hyperparameters to model - model.gr = 1.0 # iou loss ratio (obj_loss = 1.0 or iou) model.class_weights = labels_to_class_weights(dataset.labels, nc).to(device) * nc # attach class weights model.names = names # Start training t0 = time.time() - nw = max(round(hyp['warmup_epochs'] * nb), 1000) # number of warmup iterations, max(3 epochs, 1k iterations) + nw = max(round(hyp['warmup_epochs'] * nb), 100) # number of warmup iterations, max(3 epochs, 100 iterations) # nw = min(nw, (epochs - start_epoch) / 2 * nb) # limit warmup to < 1/2 of training + last_opt_step = -1 maps = np.zeros(nc) # mAP per class results = (0, 0, 0, 0, 0, 0, 0) # P, R, mAP@.5, mAP@.5-.95, val_loss(box, obj, cls) if scheduler: - scheduler.last_epoch = start_epoch - 1 # do not move + scheduler.last_epoch = start_epoch - 1 # do not mov scaler = amp.GradScaler(enabled=half_precision) + stopper = EarlyStopping(patience=opt.patience) compute_loss = ComputeLoss(model) # init loss class - logger.info(f'Image sizes {imgsz} train, {imgsz_test} test\n' - f'Using {dataloader.num_workers} dataloader workers\n' - f'Logging results to {save_dir}\n' + LOGGER.info(f'Image sizes {imgsz} train, {imgsz} val\n' + f'Using {train_loader.num_workers * WORLD_SIZE} dataloader workers\n' + f"Logging results to {colorstr('bold', save_dir)}\n" f'Starting training for {epochs} epochs...') - + # SparseML Integration - sparseml_wrapper.initialize_loggers(logger, tb_writer, wandb_logger, rank) - scaler = sparseml_wrapper.modify(scaler, optimizer, model, dataloader) - scheduler = sparseml_wrapper.check_lr_override(scheduler) - epochs = sparseml_wrapper.check_epoch_override(epochs) + if RANK in [-1, 0]: + sparseml_wrapper.initialize_loggers(loggers.logger, loggers.tb, loggers.wandb) + scaler = sparseml_wrapper.modify(scaler, optimizer, model, train_loader) + scheduler = sparseml_wrapper.check_lr_override(scheduler, RANK) + epochs = sparseml_wrapper.check_epoch_override(epochs, RANK) for epoch in range(start_epoch, epochs): # epoch ------------------------------------------------------------------ if sparseml_wrapper.qat_active(epoch): - logger.info('Disabling half precision and EMA, QAT scheduled to run') + LOGGER.info('Disabling half precision and EMA, QAT scheduled to run') half_precision = False scaler._enabled = False ema.enabled = False - model.train() - # Update image weights (optional) + # Update image weights (optional, single-GPU only) if opt.image_weights: - # Generate indices - if rank in [-1, 0]: - cw = model.class_weights.cpu().numpy() * (1 - maps) ** 2 / nc # class weights - iw = labels_to_image_weights(dataset.labels, nc=nc, class_weights=cw) # image weights - dataset.indices = random.choices(range(dataset.n), weights=iw, k=dataset.n) # rand weighted idx - # Broadcast if DDP - if rank != -1: - indices = (torch.tensor(dataset.indices) if rank == 0 else torch.zeros(dataset.n)).int() - dist.broadcast(indices, 0) - if rank != 0: - dataset.indices = indices.cpu().numpy() - - # Update mosaic border + cw = model.class_weights.cpu().numpy() * (1 - maps) ** 2 / nc # class weights + iw = labels_to_image_weights(dataset.labels, nc=nc, class_weights=cw) # image weights + dataset.indices = random.choices(range(dataset.n), weights=iw, k=dataset.n) # rand weighted idx + + # Update mosaic border (optional) # b = int(random.uniform(0.25 * imgsz, 0.75 * imgsz + gs) // gs * gs) # dataset.mosaic_border = [b - imgsz, -b] # height, width borders - mloss = torch.zeros(4, device=device) # mean losses - if rank != -1: - dataloader.sampler.set_epoch(epoch) - pbar = enumerate(dataloader) - logger.info(('\n' + '%10s' * 8) % ('Epoch', 'gpu_mem', 'box', 'obj', 'cls', 'total', 'labels', 'img_size')) - if rank in [-1, 0]: - pbar = tqdm(pbar, total=nb) # progress bar + mloss = torch.zeros(3, device=device) # mean losses + if RANK != -1: + train_loader.sampler.set_epoch(epoch) + pbar = enumerate(train_loader) + LOGGER.info(('\n' + '%10s' * 7) % ('Epoch', 'gpu_mem', 'box', 'obj', 'cls', 'labels', 'img_size')) + if RANK in [-1, 0]: + pbar = tqdm(pbar, total=nb, bar_format='{l_bar}{bar:10}{r_bar}{bar:-10b}') # progress bar optimizer.zero_grad() for i, (imgs, targets, paths, _) in pbar: # batch ------------------------------------------------------------- ni = i + nb * epoch # number integrated batches (since train start) - imgs = imgs.to(device, non_blocking=True).float() / 255.0 # uint8 to float32, 0-255 to 0.0-1.0 + imgs = imgs.to(device, non_blocking=True).float() / 255 # uint8 to float32, 0-255 to 0.0-1.0 # Warmup if ni <= nw: xi = [0, nw] # x interp - # model.gr = np.interp(ni, xi, [0.0, 1.0]) # iou loss ratio (obj_loss = 1.0 or iou) - accumulate = max(1, np.interp(ni, xi, [1, nbs / total_batch_size]).round()) + # compute_loss.gr = np.interp(ni, xi, [0.0, 1.0]) # iou loss ratio (obj_loss = 1.0 or iou) + accumulate = max(1, np.interp(ni, xi, [1, nbs / batch_size]).round()) for j, x in enumerate(optimizer.param_groups): # bias lr falls from 0.1 to lr0, all other lrs rise from 0.0 to lr0 if scheduler: @@ -309,14 +349,14 @@ def train(hyp, opt, device, tb_writer=None): sf = sz / max(imgs.shape[2:]) # scale factor if sf != 1: ns = [math.ceil(x * sf / gs) * gs for x in imgs.shape[2:]] # new shape (stretched to gs-multiple) - imgs = F.interpolate(imgs, size=ns, mode='bilinear', align_corners=False) + imgs = nn.functional.interpolate(imgs, size=ns, mode='bilinear', align_corners=False) # Forward with amp.autocast(enabled=half_precision): pred = model(imgs) # forward loss, loss_items = compute_loss(pred, targets.to(device)) # loss scaled by batch_size - if rank != -1: - loss *= opt.world_size # gradient averaged between devices in DDP mode + if RANK != -1: + loss *= WORLD_SIZE # gradient averaged between devices in DDP mode if opt.quad: loss *= 4. @@ -324,244 +364,216 @@ def train(hyp, opt, device, tb_writer=None): scaler.scale(loss).backward() # Optimize - if ni % accumulate == 0: + if ni - last_opt_step >= accumulate: scaler.step(optimizer) # optimizer.step scaler.update() optimizer.zero_grad() if ema: ema.update(model) + last_opt_step = ni elif hasattr(scaler, "emulated_step"): # Call for SparseML integration since the number of steps per epoch can vary # This keeps the number of steps per epoch equivalent to the number of batches per epoch # Does not step the scaler or the optimizer scaler.emulated_step() - # Print - if rank in [-1, 0]: + # Log + if RANK in [-1, 0]: mloss = (mloss * i + loss_items) / (i + 1) # update mean losses - mem = '%.3gG' % (torch.cuda.memory_reserved() / 1E9 if torch.cuda.is_available() else 0) # (GB) - s = ('%10s' * 2 + '%10.4g' * 6) % ( - '%g/%g' % (epoch, epochs - 1), mem, *mloss, targets.shape[0], imgs.shape[-1]) - pbar.set_description(s) - - # Plot - if plots and ni < 3: - f = save_dir / f'train_batch{ni}.jpg' # filename - Thread(target=plot_images, args=(imgs, targets, paths, f), daemon=True).start() - # if tb_writer: - # tb_writer.add_graph(torch.jit.trace(model, imgs, strict=False), []) # add model graph - # tb_writer.add_image(f, result, dataformats='HWC', global_step=epoch) - elif plots and ni == 10 and wandb_logger.wandb: - wandb_logger.log({"Mosaics": [wandb_logger.wandb.Image(str(x), caption=x.name) for x in - save_dir.glob('train*.jpg') if x.exists()]}) - + mem = f'{torch.cuda.memory_reserved() / 1E9 if torch.cuda.is_available() else 0:.3g}G' # (GB) + pbar.set_description(('%10s' * 2 + '%10.4g' * 5) % ( + f'{epoch}/{epochs - 1}', mem, *mloss, targets.shape[0], imgs.shape[-1])) + callbacks.run('on_train_batch_end', ni, model, imgs, targets, paths, plots, opt.sync_bn) + if callbacks.stop_training: + return # end batch ------------------------------------------------------------------------------------------------ - # end epoch ---------------------------------------------------------------------------------------------------- # Scheduler - lr = [x['lr'] for x in optimizer.param_groups] # for tensorboard + lr = [x['lr'] for x in optimizer.param_groups] # for loggers if scheduler: scheduler.step() - # DDP process 0 or single-GPU - if rank in [-1, 0]: + if RANK in [-1, 0]: # mAP - ema.update_attr(model, include=['yaml', 'nc', 'hyp', 'gr', 'names', 'stride', 'class_weights']) - final_epoch = epoch + 1 == epochs - if not opt.notest or final_epoch: # Calculate mAP - wandb_logger.current_epoch = epoch + 1 - results, maps, times = test.test(data_dict, - batch_size=batch_size * 2, - imgsz=imgsz_test, - model=ema.ema, - single_cls=opt.single_cls, - dataloader=testloader, - save_dir=save_dir, - verbose=nc < 50 and final_epoch, - plots=plots and final_epoch, - wandb_logger=wandb_logger, - compute_loss=compute_loss, - is_coco=is_coco, - half_precision=half_precision) - - # Write - with open(results_file, 'a') as f: - f.write(s + '%10.4g' * 7 % results + '\n') # append metrics, val_loss - - # Log - tags = ['train/box_loss', 'train/obj_loss', 'train/cls_loss', # train loss - 'metrics/precision', 'metrics/recall', 'metrics/mAP_0.5', 'metrics/mAP_0.5:0.95', - 'val/box_loss', 'val/obj_loss', 'val/cls_loss', # val loss - 'x/lr0', 'x/lr1', 'x/lr2'] # params - for x, tag in zip(list(mloss[:-1]) + list(results) + lr, tags): - if tb_writer: - tb_writer.add_scalar(tag, x, epoch) # tensorboard - if wandb_logger.wandb: - wandb_logger.log({tag: x}) # W&B + callbacks.run('on_train_epoch_end', epoch=epoch) + ema.update_attr(model, include=['yaml', 'nc', 'hyp', 'names', 'stride', 'class_weights']) + final_epoch = (epoch + 1 == epochs) or stopper.possible_stop + if not noval or final_epoch: # Calculate mAP + results, maps, _ = val.run(data_dict, + batch_size=batch_size // WORLD_SIZE * 2, + imgsz=imgsz, + model=ema.ema, + single_cls=single_cls, + dataloader=val_loader, + save_dir=save_dir, + plots=False, + callbacks=callbacks, + compute_loss=compute_loss, + half=half_precision) # Update best mAP fi = fitness(np.array(results).reshape(1, -1)) # weighted combination of [P, R, mAP@.5, mAP@.5-.95] if fi > best_fitness or sparseml_wrapper.reset_best(epoch): best_fitness = fi - wandb_logger.end_epoch(best_result=best_fitness == fi) + log_vals = list(mloss) + list(results) + lr + callbacks.run('on_fit_epoch_end', log_vals, epoch, best_fitness, fi) # Save model if (not opt.nosave) or (final_epoch and not opt.evolve): # if save ckpt_extras = {'nc': nc, 'best_fitness': best_fitness, - 'training_results': results_file.read_text(), - 'wandb_id': wandb_logger.wandb_run.id if wandb_logger.wandb else None} + 'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None, + 'date': datetime.now().isoformat()} ckpt = create_checkpoint(epoch, model, optimizer, ema, sparseml_wrapper, **ckpt_extras) # Save last, best and delete torch.save(ckpt, last) if best_fitness == fi: torch.save(ckpt, best) - if wandb_logger.wandb: - if ((epoch + 1) % opt.save_period == 0 and not final_epoch) and opt.save_period != -1: - wandb_logger.log_model( - last.parent, opt, epoch, fi, best_model=best_fitness == fi) + if (epoch > 0) and (opt.save_period > 0) and (epoch % opt.save_period == 0): + torch.save(ckpt, w / f'epoch{epoch}.pt') del ckpt + callbacks.run('on_model_save', last, epoch, final_epoch, best_fitness, fi) + + # Stop Single-GPU + if RANK == -1 and stopper(epoch=epoch, fitness=fi): + break + + # Stop DDP TODO: known issues shttps://github.com/ultralytics/yolov5/pull/4576 + # stop = stopper(epoch=epoch, fitness=fi) + # if RANK == 0: + # dist.broadcast_object_list([stop], 0) # broadcast 'stop' to all ranks + + # Stop DPP + # with torch_distributed_zero_first(RANK): + # if stop: + # break # must break all DDP ranks # end epoch ---------------------------------------------------------------------------------------------------- - # end training - if rank in [-1, 0]: - # Plots - if plots: - plot_results(save_dir=save_dir) # save as results.png - if wandb_logger.wandb: - files = ['results.png', 'confusion_matrix.png', *[f'{x}_curve.png' for x in ('F1', 'PR', 'P', 'R')]] - wandb_logger.log({"Results": [wandb_logger.wandb.Image(str(save_dir / f), caption=f) for f in files - if (save_dir / f).exists()]}) - # Test best.pt - logger.info('%g epochs completed in %.3f hours.\n' % (epochs - start_epoch + 1, (time.time() - t0) / 3600)) - if opt.data.endswith('coco.yaml') and nc == 80: # if COCO - for m in [last, best] if best.exists() else [last]: # speed, mAP tests - test_model, _ = load_checkpoint('ensemble', m, device) - results, _, _ = test.test(opt.data, - batch_size=batch_size * 2, - imgsz=imgsz_test, - conf_thres=0.001, - iou_thres=0.7, - model=test_model, - single_cls=opt.single_cls, - dataloader=testloader, - save_dir=save_dir, - save_json=True, - plots=False, - is_coco=is_coco, - half_precision=half_precision) - - # Strip optimizers - final = best if best.exists() else last # final model + # end training ----------------------------------------------------------------------------------------------------- + if RANK in [-1, 0]: + LOGGER.info(f'\n{epochs - start_epoch + 1} epochs completed in {(time.time() - t0) / 3600:.3f} hours.') for f in last, best: if f.exists(): strip_optimizer(f) # strip optimizers - if opt.bucket: - os.system(f'gsutil cp {final} gs://{opt.bucket}/weights') # upload - if wandb_logger.wandb and not opt.evolve: # Log the stripped model - wandb_logger.wandb.log_artifact(str(final), type='model', - name='run_' + wandb_logger.wandb_run.id + '_model', - aliases=['latest', 'best', 'stripped']) - wandb_logger.finish_run() - else: - dist.destroy_process_group() + if f is best: + LOGGER.info(f'\nValidating {f}...') + results, _, _ = val.run(data_dict, + batch_size=batch_size // WORLD_SIZE * 2, + imgsz=imgsz, + model=load_checkpoint(type_='ensemble', weights=best, device=device)[0], + iou_thres=0.65 if is_coco else 0.60, # best pycocotools results at 0.65 + single_cls=single_cls, + dataloader=val_loader, + save_dir=save_dir, + save_json=is_coco, + verbose=True, + plots=True, + callbacks=callbacks, + compute_loss=compute_loss, # val best model with plots + half=half_precision) + if is_coco: + callbacks.run('on_fit_epoch_end', list(mloss) + list(results) + lr, epoch, best_fitness, fi) + + callbacks.run('on_train_end', last, best, plots, epoch, results) + LOGGER.info(f"Results saved to {colorstr('bold', save_dir)}") + torch.cuda.empty_cache() return results -if __name__ == '__main__': +def parse_opt(known=False): parser = argparse.ArgumentParser() - parser.add_argument('--weights', type=str, default='yolov3.pt', help='initial weights path') + parser.add_argument('--weights', type=str, default=ROOT / 'yolov5s.pt', help='initial weights path') parser.add_argument('--cfg', type=str, default='', help='model.yaml path') - parser.add_argument('--data', type=str, default='data/coco128.yaml', help='data.yaml path') - parser.add_argument('--hyp', type=str, default='data/hyp.scratch.yaml', help='hyperparameters path') + parser.add_argument('--data', type=str, default=ROOT / 'data/coco128.yaml', help='dataset.yaml path') + parser.add_argument('--hyp', type=str, default=ROOT / 'data/hyps/hyp.scratch-low.yaml', help='hyperparameters path') parser.add_argument('--epochs', type=int, default=300) - parser.add_argument('--batch-size', type=int, default=16, help='total batch size for all GPUs') - parser.add_argument('--img-size', nargs='+', type=int, default=[640, 640], help='[train, test] image sizes') + parser.add_argument('--batch-size', type=int, default=16, help='total batch size for all GPUs, -1 for autobatch') + parser.add_argument('--imgsz', '--img', '--img-size', type=int, default=640, help='train, val image size (pixels)') parser.add_argument('--rect', action='store_true', help='rectangular training') parser.add_argument('--resume', nargs='?', const=True, default=False, help='resume most recent training') parser.add_argument('--nosave', action='store_true', help='only save final checkpoint') - parser.add_argument('--notest', action='store_true', help='only test final epoch') - parser.add_argument('--noautoanchor', action='store_true', help='disable autoanchor check') - parser.add_argument('--evolve', action='store_true', help='evolve hyperparameters') + parser.add_argument('--noval', action='store_true', help='only validate final epoch') + parser.add_argument('--noautoanchor', action='store_true', help='disable AutoAnchor') + parser.add_argument('--evolve', type=int, nargs='?', const=300, help='evolve hyperparameters for x generations') parser.add_argument('--bucket', type=str, default='', help='gsutil bucket') - parser.add_argument('--cache-images', action='store_true', help='cache images for faster training') + parser.add_argument('--cache', type=str, nargs='?', const='ram', help='--cache images in "ram" (default) or "disk"') parser.add_argument('--image-weights', action='store_true', help='use weighted image selection for training') parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') parser.add_argument('--multi-scale', action='store_true', help='vary img-size +/- 50%%') parser.add_argument('--single-cls', action='store_true', help='train multi-class data as single-class') - parser.add_argument('--adam', action='store_true', help='use torch.optim.Adam() optimizer') + parser.add_argument('--optimizer', type=str, choices=['SGD', 'Adam', 'AdamW'], default='SGD', help='optimizer') parser.add_argument('--sync-bn', action='store_true', help='use SyncBatchNorm, only available in DDP mode') - parser.add_argument('--local_rank', type=int, default=-1, help='DDP parameter, do not modify') - parser.add_argument('--workers', type=int, default=8, help='maximum number of dataloader workers') - parser.add_argument('--project', default='runs/train', help='save to project/name') - parser.add_argument('--entity', default=None, help='W&B entity') + parser.add_argument('--workers', type=int, default=8, help='max dataloader workers (per RANK in DDP mode)') + parser.add_argument('--project', default=ROOT / 'runs/train', help='save to project/name') parser.add_argument('--name', default='exp', help='save to project/name') parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') parser.add_argument('--quad', action='store_true', help='quad dataloader') - parser.add_argument('--linear-lr', action='store_true', help='linear LR') + parser.add_argument('--cos-lr', action='store_true', help='cosine LR scheduler') parser.add_argument('--label-smoothing', type=float, default=0.0, help='Label smoothing epsilon') - parser.add_argument('--upload_dataset', action='store_true', help='Upload dataset as W&B artifact table') - parser.add_argument('--bbox_interval', type=int, default=-1, help='Set bounding-box image logging interval for W&B') - parser.add_argument('--save_period', type=int, default=-1, help='Log model after every "save_period" epoch') - parser.add_argument('--artifact_alias', type=str, default="latest", help='version of dataset artifact to be used') + parser.add_argument('--patience', type=int, default=100, help='EarlyStopping patience (epochs without improvement)') + parser.add_argument('--freeze', nargs='+', type=int, default=[0], help='Freeze layers: backbone=10, first3=0 1 2') + parser.add_argument('--save-period', type=int, default=-1, help='Save checkpoint every x epochs (disabled if < 1)') + parser.add_argument('--local_rank', type=int, default=-1, help='DDP parameter, do not modify') + + # Weights & Biases arguments + parser.add_argument('--entity', default=None, help='W&B: Entity') + parser.add_argument('--upload_dataset', nargs='?', const=True, default=False, help='W&B: Upload data, "val" option') + parser.add_argument('--bbox_interval', type=int, default=-1, help='W&B: Set bounding-box image logging interval') + parser.add_argument('--artifact_alias', type=str, default='latest', help='W&B: Version of dataset artifact to use') parser.add_argument('--recipe', type=str, default=None, help='Path to a sparsification recipe, ' 'see https://github.com/neuralmagic/sparseml for more information') parser.add_argument('--disable-ema', action='store_true', help='Disable EMA model updates (enabled by default)') - opt = parser.parse_args() - # Set DDP variables - opt.world_size = int(os.environ['WORLD_SIZE']) if 'WORLD_SIZE' in os.environ else 1 - opt.global_rank = int(os.environ['RANK']) if 'RANK' in os.environ else -1 - set_logging(opt.global_rank) - if opt.global_rank in [-1, 0]: + opt = parser.parse_known_args()[0] if known else parser.parse_args() + return opt + + +def main(opt, callbacks=Callbacks()): + # Checks + if RANK in [-1, 0]: + print_args(FILE.stem, opt) check_git_status() - check_requirements(exclude=('pycocotools', 'thop')) + check_requirements(exclude=['thop']) # Resume - wandb_run = check_wandb_resume(opt) - if opt.resume and not wandb_run: # resume an interrupted run + if opt.resume and not check_wandb_resume(opt) and not opt.evolve: # resume an interrupted run ckpt = opt.resume if isinstance(opt.resume, str) else get_latest_run() # specified or most recent path assert os.path.isfile(ckpt), 'ERROR: --resume checkpoint does not exist' - apriori = opt.global_rank, opt.local_rank - with open(Path(ckpt).parent.parent / 'opt.yaml') as f: + with open(Path(ckpt).parent.parent / 'opt.yaml', errors='ignore') as f: opt = argparse.Namespace(**yaml.load(f, Loader=yaml.SafeLoader)) # replace - opt.cfg, opt.weights, opt.resume, opt.batch_size, opt.global_rank, opt.local_rank = '', ckpt, True, opt.total_batch_size, *apriori # reinstate - logger.info('Resuming training from %s' % ckpt) + opt.cfg, opt.weights, opt.resume = '', ckpt, True # reinstate + LOGGER.info(f'Resuming training from {ckpt}') else: - # opt.hyp = opt.hyp or ('hyp.finetune.yaml' if opt.weights else 'hyp.scratch.yaml') - opt.data, opt.cfg, opt.hyp = check_file(opt.data), check_file(opt.cfg), check_file(opt.hyp) # check files + opt.data, opt.cfg, opt.hyp, opt.weights, opt.project = \ + check_file(opt.data), check_yaml(opt.cfg), check_yaml(opt.hyp), str(opt.weights), str(opt.project) # checks assert len(opt.cfg) or len(opt.weights), 'either --cfg or --weights must be specified' - opt.img_size.extend([opt.img_size[-1]] * (2 - len(opt.img_size))) # extend to 2 sizes (train, test) - opt.name = 'evolve' if opt.evolve else opt.name - opt.save_dir = increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok | opt.evolve) # increment run + if opt.evolve: + if opt.project == str(ROOT / 'runs/train'): # if default project name, rename to runs/evolve + opt.project = str(ROOT / 'runs/evolve') + opt.exist_ok, opt.resume = opt.resume, False # pass resume to exist_ok and disable resume + opt.save_dir = increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok) # increment run # DDP mode - opt.total_batch_size = opt.batch_size device = select_device(opt.device, batch_size=opt.batch_size) - if opt.local_rank != -1: - assert torch.cuda.device_count() > opt.local_rank - torch.cuda.set_device(opt.local_rank) - device = torch.device('cuda', opt.local_rank) - dist.init_process_group(backend='nccl', init_method='env://') # distributed backend - assert opt.batch_size % opt.world_size == 0, '--batch-size must be multiple of CUDA device count' - assert not opt.image_weights, '--image-weights argument is not compatible with DDP training' - opt.batch_size = opt.total_batch_size // opt.world_size - - # Hyperparameters - with open(opt.hyp) as f: - hyp = yaml.load(f, Loader=yaml.SafeLoader) # load hyps + if LOCAL_RANK != -1: + msg = 'is not compatible with YOLOv5 Multi-GPU DDP training' + assert not opt.image_weights, f'--image-weights {msg}' + assert not opt.evolve, f'--evolve {msg}' + assert opt.batch_size != -1, f'AutoBatch with --batch-size -1 {msg}, please pass a valid --batch-size' + assert opt.batch_size % WORLD_SIZE == 0, f'--batch-size {opt.batch_size} must be multiple of WORLD_SIZE' + assert torch.cuda.device_count() > LOCAL_RANK, 'insufficient CUDA devices for DDP command' + torch.cuda.set_device(LOCAL_RANK) + device = torch.device('cuda', LOCAL_RANK) + dist.init_process_group(backend="nccl" if dist.is_nccl_available() else "gloo") # Train - logger.info(opt) if not opt.evolve: - tb_writer = None # init loggers - if opt.global_rank in [-1, 0]: - prefix = colorstr('tensorboard: ') - logger.info(f"{prefix}Start with 'tensorboard --logdir {opt.project}', view at http://localhost:6006/") - tb_writer = SummaryWriter(opt.save_dir) # Tensorboard - train(hyp, opt, device, tb_writer) + train(opt.hyp, opt, device, callbacks) + if WORLD_SIZE > 1 and RANK == 0: + LOGGER.info('Destroying process group... ') + dist.destroy_process_group() # Evolve hyperparameters (optional) else: @@ -593,23 +605,27 @@ def train(hyp, opt, device, tb_writer=None): 'flipud': (1, 0.0, 1.0), # image flip up-down (probability) 'fliplr': (0, 0.0, 1.0), # image flip left-right (probability) 'mosaic': (1, 0.0, 1.0), # image mixup (probability) - 'mixup': (1, 0.0, 1.0)} # image mixup (probability) - - assert opt.local_rank == -1, 'DDP mode not implemented for --evolve' - opt.notest, opt.nosave = True, True # only test/save final epoch + 'mixup': (1, 0.0, 1.0), # image mixup (probability) + 'copy_paste': (1, 0.0, 1.0)} # segment copy-paste (probability) + + with open(opt.hyp, errors='ignore') as f: + hyp = yaml.load(f, Loader=yaml.SafeLoader) # load hyps dict + if 'anchors' not in hyp: # anchors commented in hyp.yaml + hyp['anchors'] = 3 + opt.noval, opt.nosave, save_dir = True, True, Path(opt.save_dir) # only val/save final epoch # ei = [isinstance(x, (int, float)) for x in hyp.values()] # evolvable indices - yaml_file = Path(opt.save_dir) / 'hyp_evolved.yaml' # save best result here + evolve_yaml, evolve_csv = save_dir / 'hyp_evolve.yaml', save_dir / 'evolve.csv' if opt.bucket: - os.system('gsutil cp gs://%s/evolve.txt .' % opt.bucket) # download evolve.txt if exists + os.system(f'gsutil cp gs://{opt.bucket}/evolve.csv {evolve_csv}') # download evolve.csv if exists - for _ in range(300): # generations to evolve - if Path('evolve.txt').exists(): # if evolve.txt exists: select best hyps and mutate + for _ in range(opt.evolve): # generations to evolve + if evolve_csv.exists(): # if evolve.csv exists: select best hyps and mutate # Select parent(s) parent = 'single' # parent selection method: 'single' or 'weighted' - x = np.loadtxt('evolve.txt', ndmin=2) + x = np.loadtxt(evolve_csv, ndmin=2, delimiter=',', skiprows=1) n = min(5, len(x)) # number of previous results to consider x = x[np.argsort(-fitness(x))][:n] # top n mutations - w = fitness(x) - fitness(x).min() # weights + w = fitness(x) - fitness(x).min() + 1E-6 # weights (sum > 0) if parent == 'single' or len(x) == 1: # x = x[random.randint(0, n - 1)] # random selection x = x[random.choices(range(n), weights=w)[0]] # weighted selection @@ -620,7 +636,7 @@ def train(hyp, opt, device, tb_writer=None): mp, s = 0.8, 0.2 # mutation probability, sigma npr = np.random npr.seed(int(time.time())) - g = np.array([x[0] for x in meta.values()]) # gains 0-1 + g = np.array([meta[k][0] for k in hyp.keys()]) # gains 0-1 ng = len(meta) v = np.ones(ng) while all(v == 1): # mutate until a change occurs (prevent duplicates) @@ -635,12 +651,27 @@ def train(hyp, opt, device, tb_writer=None): hyp[k] = round(hyp[k], 5) # significant digits # Train mutation - results = train(hyp.copy(), opt, device) - + results = train(hyp.copy(), opt, device, callbacks) + callbacks = Callbacks() # Write mutation results - print_mutation(hyp.copy(), results, yaml_file, opt.bucket) + print_mutation(results, hyp.copy(), save_dir, opt.bucket) # Plot results - plot_evolution(yaml_file) - print(f'Hyperparameter evolution complete. Best results saved as: {yaml_file}\n' - f'Command to train a new model with these hyperparameters: $ python train.py --hyp {yaml_file}') \ No newline at end of file + plot_evolve(evolve_csv) + LOGGER.info(f'Hyperparameter evolution finished {opt.evolve} generations\n' + f"Results saved to {colorstr('bold', save_dir)}\n" + f'Usage example: $ python train.py --hyp {evolve_yaml}') + + +def run(**kwargs): + # Usage: import train; train.run(data='coco128.yaml', imgsz=320, weights='yolov5m.pt') + opt = parse_opt(True) + for k, v in kwargs.items(): + setattr(opt, k, v) + main(opt) + return opt + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) diff --git a/tutorial.ipynb b/tutorial.ipynb index 3954feadfcb2..1479a164cd8e 100644 --- a/tutorial.ipynb +++ b/tutorial.ipynb @@ -6,7 +6,6 @@ "name": "YOLOv5 Tutorial", "provenance": [], "collapsed_sections": [], - "toc_visible": true, "include_colab_link": true }, "kernelspec": { @@ -16,9 +15,10 @@ "accelerator": "GPU", "widgets": { "application/vnd.jupyter.widget-state+json": { - "8815626359d84416a2f44a95500580a4": { + "eb95db7cae194218b3fcefb439b6352f": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", + "model_module_version": "1.5.0", "state": { "_view_name": "HBoxView", "_dom_classes": [], @@ -28,17 +28,19 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_3b85609c4ce94a74823f2cfe141ce68e", + "layout": "IPY_MODEL_769ecde6f2e64bacb596ce972f8d3d2d", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_876609753c2946248890344722963d44", - "IPY_MODEL_8abfdd8778e44b7ca0d29881cb1ada05" + "IPY_MODEL_384a001876054c93b0af45cd1e960bfe", + "IPY_MODEL_dded0aeae74440f7ba2ffa0beb8dd612", + "IPY_MODEL_5296d28be75740b2892ae421bbec3657" ] } }, - "3b85609c4ce94a74823f2cfe141ce68e": { + "769ecde6f2e64bacb596ce972f8d3d2d": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", + "model_module_version": "1.2.0", "state": { "_view_name": "LayoutView", "grid_template_rows": null, @@ -87,118 +89,76 @@ "left": null } }, - "876609753c2946248890344722963d44": { + "384a001876054c93b0af45cd1e960bfe": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "model_module_version": "1.5.0", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_9f09facb2a6c4a7096810d327c8b551c", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "100%", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_25621cff5d16448cb7260e839fd0f543" + } + }, + "dded0aeae74440f7ba2ffa0beb8dd612": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", + "model_module_version": "1.5.0", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_78c6c3d97c484916b8ee167c63556800", + "style": "IPY_MODEL_0ce7164fc0c74bb9a2b5c7037375a727", "_dom_classes": [], - "description": "100%", + "description": "", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 819257867, + "max": 818322941, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 819257867, + "value": 818322941, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_9dd0f182db5d45378ceafb855e486eb8" + "layout": "IPY_MODEL_c4c4593c10904cb5b8a5724d60c7e181" } }, - "8abfdd8778e44b7ca0d29881cb1ada05": { + "5296d28be75740b2892ae421bbec3657": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", + "model_module_version": "1.5.0", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_a3dab28b45c247089a3d1b8b09f327de", + "style": "IPY_MODEL_473371611126476c88d5d42ec7031ed6", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 781M/781M [08:43<00:00, 1.56MB/s]", + "value": " 780M/780M [00:11<00:00, 91.9MB/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_32451332b7a94ba9aacddeaa6ac94d50" + "layout": "IPY_MODEL_65efdfd0d26c46e79c8c5ff3b77126cc" } }, - "78c6c3d97c484916b8ee167c63556800": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "9dd0f182db5d45378ceafb855e486eb8": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "a3dab28b45c247089a3d1b8b09f327de": { + "9f09facb2a6c4a7096810d327c8b551c": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", + "model_module_version": "1.5.0", "state": { "_view_name": "StyleView", "_model_name": "DescriptionStyleModel", @@ -210,80 +170,10 @@ "_model_module": "@jupyter-widgets/controls" } }, - "32451332b7a94ba9aacddeaa6ac94d50": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "0fffa335322b41658508e06aed0acbf0": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_a354c6f80ce347e5a3ef64af87c0eccb", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_85823e71fea54c39bd11e2e972348836", - "IPY_MODEL_fb11acd663fa4e71b041d67310d045fd" - ] - } - }, - "a354c6f80ce347e5a3ef64af87c0eccb": { + "25621cff5d16448cb7260e839fd0f543": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", + "model_module_version": "1.2.0", "state": { "_view_name": "LayoutView", "grid_template_rows": null, @@ -332,56 +222,14 @@ "left": null } }, - "85823e71fea54c39bd11e2e972348836": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_8a919053b780449aae5523658ad611fa", - "_dom_classes": [], - "description": "100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 22091032, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 22091032, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_5bae9393a58b44f7b69fb04816f94f6f" - } - }, - "fb11acd663fa4e71b041d67310d045fd": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_d26c6d16c7f24030ab2da5285bf198ee", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 21.1M/21.1M [00:02<00:00, 9.36MB/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_f7767886b2364c8d9efdc79e175ad8eb" - } - }, - "8a919053b780449aae5523658ad611fa": { + "0ce7164fc0c74bb9a2b5c7037375a727": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", + "model_module_version": "1.5.0", "state": { "_view_name": "StyleView", "_model_name": "ProgressStyleModel", - "description_width": "initial", + "description_width": "", "_view_module": "@jupyter-widgets/base", "_model_module_version": "1.5.0", "_view_count": null, @@ -390,9 +238,10 @@ "_model_module": "@jupyter-widgets/controls" } }, - "5bae9393a58b44f7b69fb04816f94f6f": { + "c4c4593c10904cb5b8a5724d60c7e181": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", + "model_module_version": "1.2.0", "state": { "_view_name": "LayoutView", "grid_template_rows": null, @@ -441,9 +290,10 @@ "left": null } }, - "d26c6d16c7f24030ab2da5285bf198ee": { + "473371611126476c88d5d42ec7031ed6": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", + "model_module_version": "1.5.0", "state": { "_view_name": "StyleView", "_model_name": "DescriptionStyleModel", @@ -455,9 +305,10 @@ "_model_module": "@jupyter-widgets/controls" } }, - "f7767886b2364c8d9efdc79e175ad8eb": { + "65efdfd0d26c46e79c8c5ff3b77126cc": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", + "model_module_version": "1.2.0", "state": { "_view_name": "LayoutView", "grid_template_rows": null, @@ -523,13 +374,14 @@ { "cell_type": "markdown", "metadata": { - "id": "HvhYZrIZCEyo" + "id": "t6MPjfT5NrKQ" }, "source": [ - "\n", + "\n", + "\n", "\n", - "This is the **official YOLOv5 πŸš€ notebook** authored by **Ultralytics**, and is freely available for redistribution under the [GPL-3.0 license](https://choosealicense.com/licenses/gpl-3.0/). \n", - "For more information please visit https://github.com/ultralytics/yolov5 and https://www.ultralytics.com. Thank you!" + "This is the **official YOLOv5 πŸš€ notebook** by **Ultralytics**, and is freely available for redistribution under the [GPL-3.0 license](https://choosealicense.com/licenses/gpl-3.0/). \n", + "For more information please visit https://github.com/ultralytics/yolov5 and https://ultralytics.com. Thank you!" ] }, { @@ -550,27 +402,26 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "9b022435-4197-41fc-abea-81f86ce857d0" + "outputId": "3809e5a9-dd41-4577-fe62-5531abf7cca2" }, "source": [ - "!git clone https://github.com/ultralytics/yolov5 # clone repo\n", + "!git clone https://github.com/ultralytics/yolov5 # clone\n", "%cd yolov5\n", - "%pip install -qr requirements.txt # install dependencies\n", + "%pip install -qr requirements.txt # install\n", "\n", "import torch\n", - "from IPython.display import Image, clear_output # to display images\n", - "\n", - "clear_output()\n", - "print(f\"Setup complete. Using torch {torch.__version__} ({torch.cuda.get_device_properties(0).name if torch.cuda.is_available() else 'CPU'})\")" + "from yolov5 import utils\n", + "display = utils.notebook_init() # checks" ], "execution_count": null, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ - "Setup complete. Using torch 1.8.1+cu101 (Tesla V100-SXM2-16GB)\n" - ], - "name": "stdout" + "YOLOv5 πŸš€ v6.0-48-g84a8099 torch 1.10.0+cu102 CUDA:0 (Tesla V100-SXM2-16GB, 16160MiB)\n", + "Setup complete βœ… (2 CPUs, 12.7 GB RAM, 42.2/166.8 GB disk)\n" + ] } ] }, @@ -584,7 +435,15 @@ "\n", "`detect.py` runs YOLOv5 inference on a variety of sources, downloading models automatically from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases), and saving results to `runs/detect`. Example inference sources are:\n", "\n", - " " + "```shell\n", + "python detect.py --source 0 # webcam\n", + " img.jpg # image \n", + " vid.mp4 # video\n", + " path/ # directory\n", + " path/*.jpg # glob\n", + " 'https://youtu.be/Zgi9g1ksQHc' # YouTube\n", + " 'rtsp://example.com/media.mp4' # RTSP, RTMP, HTTP stream\n", + "```" ] }, { @@ -592,58 +451,51 @@ "metadata": { "id": "zR9ZbuQCH7FX", "colab": { - "base_uri": "https://localhost:8080/", - "height": 534 + "base_uri": "https://localhost:8080/" }, - "outputId": "c9a308f7-2216-4805-8003-eca8dd0dc30d" + "outputId": "8f7e6588-215d-4ebd-93af-88b871e770a7" }, "source": [ - "!python detect.py --weights yolov5s.pt --img 640 --conf 0.25 --source data/images/\n", - "Image(filename='runs/detect/exp/zidane.jpg', width=600)" + "!python detect.py --weights yolov5s.pt --img 640 --conf 0.25 --source data/images\n", + "display.Image(filename='runs/detect/exp/zidane.jpg', width=600)" ], "execution_count": null, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ - "Namespace(agnostic_nms=False, augment=False, classes=None, conf_thres=0.25, device='', exist_ok=False, img_size=640, iou_thres=0.45, name='exp', project='runs/detect', save_conf=False, save_txt=False, source='data/images/', update=False, view_img=False, weights=['yolov5s.pt'])\n", - "YOLOv5 πŸš€ v5.0-1-g0f395b3 torch 1.8.1+cu101 CUDA:0 (Tesla V100-SXM2-16GB, 16160.5MB)\n", + "\u001b[34m\u001b[1mdetect: \u001b[0mweights=['yolov5s.pt'], source=data/images, imgsz=[640, 640], conf_thres=0.25, iou_thres=0.45, max_det=1000, device=, view_img=False, save_txt=False, save_conf=False, save_crop=False, nosave=False, classes=None, agnostic_nms=False, augment=False, visualize=False, update=False, project=runs/detect, name=exp, exist_ok=False, line_thickness=3, hide_labels=False, hide_conf=False, half=False, dnn=False\n", + "YOLOv5 πŸš€ v6.0-48-g84a8099 torch 1.10.0+cu102 CUDA:0 (Tesla V100-SXM2-16GB, 16160MiB)\n", "\n", "Fusing layers... \n", - "Model Summary: 224 layers, 7266973 parameters, 0 gradients, 17.0 GFLOPS\n", - "image 1/2 /content/yolov5/data/images/bus.jpg: 640x480 4 persons, 1 bus, Done. (0.008s)\n", - "image 2/2 /content/yolov5/data/images/zidane.jpg: 384x640 2 persons, 2 ties, Done. (0.008s)\n", - "Results saved to runs/detect/exp\n", - "Done. (0.087)\n" - ], - "name": "stdout" - }, - { - "output_type": "execute_result", - "data": { - "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAIBAQEBAQIBAQECAgICAgQDAgICAgUEBAMEBgUGBgYFBgYGBwkIBgcJBwYGCAsICQoKCgoKBggLDAsKDAkKCgr/2wBDAQICAgICAgUDAwUKBwYHCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgr/wAARCALQBQADASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD8347F5pkSP5t38P3ttaFjZzR2rzOMjfs+/wDNVi10+5kh877Gqv8AwfP96tOz0+2b99sw0e1drfxV87HY+wjHm94z4bOZ2WZ4dgV9vzN81Tx6a8jHvu+bd/DV+HT51uHd0Up95Pl21bhtfIkH2ncqfN8q/e21NS0dUbU4/ZMf7Oi52OzMu1UVU+an/wBjlW3w7l2t8y/3q3pNPRl2I+1tn/AqZZ280cXk3Nrub+7v+6tefKtLl5onZGm48qMqbQ3k/wBJeb5lb5PMf5l/2aZcaW6tshhyzffZn3ba3biHzI5USFfmX7tQyWc3zTXltuWPb+8jT+LbXJWxVWO534XDxkchrmm/KZt+d3yvurBm0maHLvu2su1G/vV3OsWsMe5xyWTd5bVh3VikkLJ5Pyqu7b/easaNacX7x6nsYyicrJYws3nom1m/vf3qWC3uYW32zr8v95v/AEGtK6s5I9iJuDMu51aq62827502Nt3Jur6zAylKUTlqREj+0wsiI7OzNuRW/wBr+7ViSPy4/wBzud9+1vm+Wq0aurIJtxdf4qtLayeX8nyusu5mb+KvqMPSlKJ58qnvco65uHaNpvlTdt2fJ8y0kjSbER3Vtq7tzJtqbyPtDLDNtx96nTKjR/Ii7t38X3a9D2fKebUkoy5SHyXjnP75l/i/3amSSVm+0v5joqbfv/Ky/wB6i3/fRrv+9911j+6rUsMMuxvJufu/fXZXPKXLE4OaUuaxPBv3b9n+r/hjl3LVqH9zJ/qV2t823/eqtbwpHGkP+qVn+dY/l/4FVuzZLqRI5plV13b12fdX+GvLxHvF04825p2cm1Ucopdvl+V9taVvDcSSK6fd+ZXrN0+GGS637F+V1aXd/d/hq7b75mX51Db9zMr/AC/7Py14WIqSNadHuaVjNLJCsP2pmTfuddvzNU8jO3yQ7X2/e/iaq8IeGNPLRW+bbu2fdq95n2OZXhhV2b5V3V4dap7+h6VOnHqWob792yI6o6orfLVCZJpPnudrBf4v97+KpmuIWmDzTKsrfdXft+7VCS5dpmR5o3/vq392uJSjztQOlx928hzbIZXSFFLs7fMqf6yopmubzY63jIVb7qrU32OGSP8AhRPveXHSyKluy/J975VXf/FWkqnNqLk5fdEntdy/3vl2eZs/76pU3yQyJsYeX8if3lqwsE0iy2zzfuvl/d/7VVr6O6WTf8yfe/d7/u1n71TRSMK0R8d1cxwrvRQv3dzfdWoprp75hNc3cjtHtSLzG+61OaGaS3RJnV1+88bVVkkRlKWtthlf+GspRhKRjH3Y8rKuoXtvHteN8qy7X/vVga9cXisrpcthkVfm/u1pXk00zAu+R/d/utWDq14+5n342/6rav3a78PFRj8JyVqhj6lM/wC8+8f/AB3dXManN82/fjd/CtdBqW+4bM0/Gzc1Yd48Pls/Vm+Xb/FXsUYy5NDxsVLmiYF9avt+07F21QVXmuNmzb/utW9cWbyR56hVqnHp7rMJvJ8xK9CnKMeU82T5hljlWZE3fN9//ZrodI3x7ntn+Rk2srfM1V9N03bGOdu7/wAdrVhs4I5BGiMk0f8ADJ8tEqhrToz+I1NLtUinR9+fLf5F/wDsa7bQZnjwibU2/N+7X5VrjdH/AHKxBE3f367TRZE+x7E2/wB1dv3mqo1PfOj2fuWOu0W4k+ziF5sOzfxfw11ui6uNyu6Mrqu1/Mfb8v8As1wWk3KOuy28xVVvnb+7W/puqQxsU3/eiVmj+9XZGpzmMoyj8R3Wn6kQN8Myh1f/AEfb93/eatXT9am8ve+1vvbmrgrHWd0iXOcFfl3L/F/wGtCHxB5K+d8wSR9qKq/M3/Aa6OYw9+J2q69C3zpZttX5Ub+9/vUybV4IYd+//WbtzL/CtcqutbYf3fmHc+1/mqvcawk3ybJCu/b9/wC9U/DAfunT/wBtusCv0/2d/wDDWbqGuosbO8jEt91tvystYN9q226ldH2xtt8qNX3f8B3VVvtUm2l3TLsnzLu/i/hqJRjI25vslPxRNDdZm85iv3fLb+GuMvJ3dXR/uK23/erW1PVHuomQXLFpJfkZvur/ALNZGqQ/aFb5G+V/3sa1x1I8x0UeaOjOa1SG2ml85Pv/AMO5vlWqtvbupYOmPLf5d3yturcbTkjdt6Mxb/lm38NQXWnpJcM8iSO38Un8K1nKn7p2RqQ5tTPWFJpD5czIn97726mTWVzIHfez+Z/yz/vVZa1eSTZDCqqqNu+fbSLYwzRuXhxufd9/71cNSnI0lUM2SN1CwpMuyT5tv/stJbxurI/nL+8ba0cn92tXybaOSHyYfuxbtrN8v3qq3Eltu+0+T86tt+VK5q1P3tCoVOXWRbtWdcoltv2tu2t8u6uj01na3TZuAVt27+61YNu7s0jzbWlb5U/hrQ0+aGObzo3bzl+X7/y7q+Ox1GXNKTPewtT4ZI7LT2T/AFM03mt8q7v4a0WuvLUI+6H5v9Wvzbv+BVzVnfTeSH/55q25d/3m/wBmp/7UdpI+Nqt8rbWr5DEYeUqp9DRrfDzG5cXySsN9zuVot6qybvu1m3mpRrD5iO0KSRbvlf5aqSal8zbNuPm2/J8q1Uk1QSM73KKrrF8nlr8u6tKOHUZe8dvtOhPeahD5yc7v3X975t1Zs0zrsfo2/wCZW/h/4FS3F4jKkEyMXX5X3fdaqzLBNJscrsZNqqv8NexhcPGPuozqVOWHKJe+c0hf7Tv3fL8tVri3DSPD9pUyr/F91d1aEljH/wAvMylG+4yp91aktdPeRc+Tv+f5fk3V9XluH5dTwcdiIx+0YLK6tvfcKry6bN5ezZ+7b/lpG+35q7BfDiNa+XNC37xtq7m27qdY+DXuN0m/hX/1f8NfY4ej7lz5XGYjm+E5C10e/Ece+2+fdtXb81XF8P7bqPztwkVGV9vyrt/2a7ux8KzRyJCkLM6/Nt3/ACtU7eDXkmj811Ty2+f91ub5q1lTjGZwRrcp5wuihpJIPmZGf/v2tQDwrMzHyXbZ93aqV6ovg/y5FT7zL99VT7y0kngvM3nfZmQbWZFWuKpR5vdN6dbl+0eUyeG7mO4Dp0Zf/Hqfp+jzQtLNczZK/wAP92vS28HmaOL/AEXa21n/AOA1m3HhWaxmm32fySIv+1uX/drxsVR+yejh63N7xysmnwxqrwp5rtztV/4f/iqJLRLVVT7HIo2bd27+Kuqj8Nos29BiKRdySN/d/u1UvrN/MhhmtmH/AE0rzJRl9hnbGpLm1Obmt5LfPkoxdvmdqpGzTzks33MrRbvL37WrevtPmkuNk3zLI27958tZd1bJZ3mz94Xk/vN8taxl9kr4vhM9YUt2SFJtq/8AXX5vlqb7PNdTPNM6r5iLsVf4f9qnzW8KM72yKpX+KrDWf7vYJtoXb95vmrS8fi5iPe5iCGSZrdYfObYvy7v7zLUNxcFVaNHaM/Mu3/ZqzInkxhGm+79xf7tZN1I7L9/HzfPu/irejTlUkYyqcseWRDM0Plu8kzfc+6v8VZ0cszN87qPm+fy/m2rVm6Z7iTyfl2xpt8yNdu6qk0nlqXh2hG+4y161GmeZWqSjL3SNpEZfJjhXb/D/ALVIq/ut83zf3fmpkbIrDftC7P4fvbqVVTCPHBtH8MbN/FXV7P7RjGt7xGq3O48Z2/N8vy7qfIszRq6Pj+9u+9VhbXbJs3/MqfP8u75qVbVMt5j/ADfe2rTfvfEbxqe5ykSXj/Y3DzSBv4Kt2zIsa70y+/dtb/0KmW8aW6tcvM21fl3bPutWlHYO1vvmhYf3JF/irel8ISrT5CssYM/7l2Rm/vfLUNxpsysNm4fLtfd92tVdI+UvezbXZP71X9I8Ga14hMh0DQri+EWzzRFEWC5zjOOnQ/lXrYalXxNRU6MXKT2STbfyWpxuTnLlgm32RyMmkvtY72Z93y/N92si+sXkupk2MNvy7a9Pl+E3jiRk2+BtTz3JtWx/Ks7Ufg98Q1K/ZvBGqvlfmxYt/hXrxyPOv+gap/4BL/I5qmDxcv8Al1L/AMBf+R5Lqmkutrvdm3r8yMtc1qmmlv8Ab+8te0X3wT+JchMa/D/WCGXLEWLnn8qwr74BfFhi0dv8NNZ2Hp/oD/4U45HnX2sNU/8AAJf5HDUy/Hy/5dS/8Bf+R4Vqlrc28jI6fKv8VUvJmkH8TbvmdVr2DV/2cPjTJBttvhTrROMcabIf6Vz837Mvx5H7v/hUXiHH95NKl/wq5ZJnXLf6tU/8Al/kY/2fj/h9lL/wF/5HARw+Wd+9v92rlrbTSXGx5mZW/vV2sP7NXx13Av8ACDxGfc6VL/hWlZ/s7fG5U82X4P66GxjH9kyf4Vw1clzxx/3Wp/4BL/I6Y5djv+fUv/AX/kcfb2fksr/+Oq1adrbvMqo/ys33Pm212Np+z38ZwUf/AIVbrqKFyR/ZsgOfyrRh+AXxcjRm/wCFZa3uP3f+JZJ/hXHLJM7/AOgSr/4Ll/kdtPLsY96cvuf+Rx0cMkbbEfdWhaxO3753Zd38O77tdVbfAr4tyuwufhrrgCr8pOnyfN+lWbX4G/FpVDn4b6wGAYLmwfgflXPLI8++zhKv/guf+R108uxcf+XcvuZy6wvtabDf7W6jzN0iPvZR8uzzK7OP4KfFRkIj+HWsq+xuXsXxu/KlPwQ+KrBVk+H2rnav/QPf/CsP7Cz3m1wtX/wXP/I744HFdIP7mcpCtzNIRDtbb/DJUMizKuwQ7dqfe/iVq69vgt8Vf4PhtrQ29D9jf/CiL4HfGK/lW1sfhVr8zf8APOLTJGZvwAzWryXPErvCVf8AwXL/ACNYYLEOWsH9zOJmjhb5PmLL8yM33t396mzSTRsr7Fd1Tb9+utv/AIEfF21Lx/8ACsfECSl8SRPpsgKH6EVUk+CfxeWUlPhfr2W6gabJgfpTjkmfSj/ulX/wXL/Ip4LF/wAj+5nNtM7EI0+xV/hWp7eZGwn3X/i+atmT4J/GHIZPhdrudvP/ABKpOP0q5pv7Pnx9vibuy+C/iaZVfaJY9GmcH8Qtb/2FnahzSwtRf9uS/wAh+wxKlrB/czJh1CazmKO6uzJj+98taVvqD+WHd2LfeWnx/Bf4zwztK/w21zcG2lTpsn+FaWn/AAC+Pl7CZbL4O+Jp4ifkeHSJmVT6ZC1vHJc6pLmlhqi/7cl/kc88PjFK/I/uZlyakkP+pdVZm3M1QNqzzK3nPk7/AJljeuhP7Pn7RbhQ3wT8VAAYLDQJ92P7v3awPEnw1+JnhWyl1rxB4F1a0toCBPNdafIiQ5O0biRgckDnuacsmzOMHUlh5pLVvklZLu9NDlqU8Sot8jsvJmbqGoJMrbPlXb/E9ULjWCtsE6j+9WfNep5g42/8DqrdaomXTf8ALs+balcUY8u55NbFS6FqTUHaNXCMwas261J2kOeBs3Lu/iaq8l58pmhfb8vytWXdawFjb58t/dpyOeNbl0Ld1fTbt4mVFZfn2vWfNdJI3zuwH8DVTuNSuJOqLt/u1Va82/Oh/wC+a56nNE9CjiveNCS+eF98aMwX+Kh77cyzvN96s0zP5nzzcf3aljuEab9z/DXFWifS4XEc3KlI0HuPNGxH+ZvvbqktZ3jbY75C/das/wA5JJGdPvMnyK1WrW3uZJkT+7/FXHUjyxPfw+I5S/G7yHZM2/8A3v4ateSjR/I+NtUoflben975quRqixsyOzM38P8AdrllHlPeo4jmHqvk7dif7+7+KpJJJvOTf/wHdUTRuI9kz7t33amVXjiCTP8Adb5t1YSid8a0dgX5meB+iv8A+PVK8z+SJnfLt/d/hqDa8fKHhmoZtqt3bdtSlLmNvrRbVtuAk3y/+zVGJk/jT5o3qFpJ2jZPOyy/NtX71NaRFz8ir/Czf3qcaPMH1rm0JJ7h1Vnd1dW/8dqDzHkHmK/8X3aTa7s0Py//ABVV2byZN6JtK/K3z1v7PliclXGcurLM0yLh0h3fwtTFk2q2x2D/AN3fVJrpFY+Vu/21qP7chXncm7+Jq3jGR52IxkbFybUJvlfyVVm+Zqq3E3mKd83FRtMm5tnzL/BVRr5/M2bFUN99a6qcZHz+KxXNAtrP50bIHYK38NNjkDN5EzqrfNVKOYwJvR12K1SrdPcNvR/mX/x6uuMT5vFVoyNG3kdWV3mxWhbuiqr+d8v8f+1WPp58xnR/7+379atlHDIuNmVX+Grj73xHkyrGnZyO395Vbb8y1raer3Ejb33fwvub7y1nabDH5m+GHhtvzSVtaXZ/xzRrhfu7aInmyqcxr2VnNJE3zqEk/hX71dPpdrtjjf8AeSstZeh2L/I6Ip2rt+b+Guk8O2aW67LmFdsa/N8/3aoxlI39Ls0VU3pjcm5F/u1r2Vo8i7HhyzNu3R0zQ7OTy40httu5Ny/7VdJY2KMuyHdvVW37kro+I5/aGJNYpNC28tjavy/3WqZ7GFo1h37fl3OrfwtWtHo8022GaHbu/i/hqKbT3WRnfcn8Hyv822ly/aOmjL3zFis5mkFz8zlvl3b/ALu2npY/6QZpptgk27/722r62aQt5Nt5n7z+GT7y1FdWO2FfLfJVPustTKMeXmPewsvdM/ULO2kZZkRnX7RtRm/h/wBqub1rT5lkbZN/F95WrsLiOH+NJNv8DL/ermNUi+y5fYvzM3yq275qcYwl7x72Gj8Kkee69YvNC80L+cjN8jN/6DXE+JNPfcyb2O75fl+9XqHiCHcrfIy/P+6b+7XGa5Z+dG6JG3y/MjVyVpfzHqxwvN7x7Vp8NtCrvMm8eb95fvK1S28T3DOnkx+Urs0TL8rK1VoLiBWY2bqUjb7zL95v/iant77/AEjyfszPtVd1eNGPLA+e9pyl+xtXjb/SUV/l3J/FWjC0MinyX/g2orL8y/8AAqz47jyW2PJ+6Z9yxqn3f+BVehbtcvhFXcjf7VefXk/5TupVOaVxLqOFZCj7WPlKrrG3zfN/FUUdq8ciu7sGWp7iRPtDpIil9m/5U+WRqY1siq58lX/j+VvlWuKpUlHc9CnHm+EbarDM02+GRt0u3yW/9Coe12uIXufKRv8AWqzfdpI4937503IqMzqvy7amihgkjO+GR3++vy/7NefUqcsz0KMfc5jCks0vJpvJdflfbFI33qzri3kmuDc7MlV27vl+9XRX0MyqblJoV2yr8uysya3hjV08lfmqqPN7U6OaJzV4rwyM7quP4G2fdrI8lLiTY80m2H7nz11WpWv7vem77vzKy/w1g3Gmp8r+WqfL8n95q+wy3mjLUxqcv2Situk+5/O3eW7I/wDDuqzDG9nCH2Nt3/eVd1RTK80ZTf8AOu1fl/vf7VSRqkfkwIm3/vpt1fXUZHj4qpGMWSWs3mN8+5f7rMv3qjnZ7qF0R9u5/vfdqxIr7o3G7+9taq7MIV2O67t/zr/drq9ofPVK0ucVLV9q/Plv4F31JDM+0v8Aw/7NRF3jwmzCsnybf4lqONpp5vOebbt+VFrKpIiMpfCX4WeSYul4r7futs2/8Bq3DJBDD/pPVt2+P+9trJhWFv7zsr/N81akLTfIny7vvff27Vrx8ZKPN8R3UYy+I2bVdrJMib0k2t+7+993+KtK3t7OaN3dPNO35WX5axIWS0Z32bty7VMdbdveLbwo+xUVU2bV+avnsRU97mPQo0/5i7C0k0bbyzOsX71tm1f+A06G427vszthk27W/h/3qqtdOq+Sj7n/AIY2/u1Fcag4Z3uYVXcy/wCr+VVrwMRKSPSp04/aLn9o7v8ARn8vav3W/wBmkVbO4ZbmaFn8v5f3afNtqGCRFklSWaGT+L94v3V/u0QyPFIIYQ3lbvm/hb/7Ksaf7szqe8XbO3S6jTY7LF/C33WqePyZFlR9u2Nv4vmakt1Tj7SY0H30WSpJI5lhX/RsnbuZmbdt+b7tVUqX6GUVL3SMxzRgwpNCu7+Lf91ajaO5kka5m+ZG/h3bq0Lf7THhJoY0Xb8iqv3qrzWsyyMkNzlm+6rbV21NPTZ3JqfCZ8kaXExhTdlot27+Ff8AZqtdNNbr86bZWTbtVa1VhdlD7GQs/wA0e373+1RNZ2aoIdjbm+VP71KVTlkc0uaMTl9SsUhUyJudv4lVqwtStwtqLaZMvJ/Ev3mrsNSs4biLMN4xLfK67P7tYOrWvkSM83ysqqvmKv3lr0sPzT5W/hPJrOcuY4y+hSNPJ2N8vy/M1ZkNjDcZ+RQ6ttX/AGq6TUIYZjJC+1d3z+X/AA1RmtYZ5lSHaiq/zrXrwlJwkeVUjIxfsDzXBdNyfw+W1Ot9Lkz8+7Zt3L/s10Xl+XJvS23Bmp0dijRt5Myp/syJ92m6zjG3QSpxjLmMWHS0jh8xId7bvl3fLSzRpDN5MwyZE+b5/mrX1C12ybPm3fKy+X/EtUry28mbfMn3k+RqqMve8jqjTHafcQ+YkGxfN+78r/dWug024aGP+HG7duX+7WDZ27+WzvDGzfeRlatjT7yT7Os0yZbf95aIy5pe6a8v8x02l30y7k+9uTcjN97bWrHdJJbo++Quqbkkjfburm7KHyLj7TCjfc+dletVZoGt/wB9BuDbvvPt/h/hrup1P5jjqQ7mxY648MiokeEbarMy/wAX+zVxfEEMLLD9p37X+b5q5r7YmYrbfNvWL7rfd/3qinmdpC7uw2/N8tdkahxy906tfFCSSMU3Ax/Lu2/L81Jb60l18m9WZXb95G3y1zEeqIsaiZNrSfM0b/w1Nb6lDHGpKfxfe3fLtrfm9wiMoROjbVE2hH6L/D/eqjPs8wpDDlJn+dd27bWba3UM3yb2O77kf8NWYw8itJbblVv7rVFT4SebmmMmuJpFP2lNnktsT/aX+GpobXgyeSuf4Y1+7V2GzeaFXeRWZk+81W/sq/IXTY3yov8Avf3qw9nzG0cROJi3WlvG/mPbK38KbqzLjR7lYWdIcPu+9Ia7aTTRdXAmS2/h+ST+H5arSaDM0x+0ozJv3bVeqjHl90qOI984yTR0W3kdEwF+aX5f4qp/Ybn5BM8e5vm/d11V5ptyvm20MPKtufd8u5f7tVLjR/s9ud8K79nyeWtYSpm8cRyyOauIYY7eL5P49yMtU7izT5XdGbc27/eroZrCGNW2Q8r827+7WbqEaRzNGkzJ5nzbtn3q4qlMuNYoQ3jrI33vvbfm/hq5Y7DJ+5dQq/wyPWe0c0cjI6L83yqrNUtvZ+WpTYxlb7jfeWvnswwvc9nD4rl1N+yunWVd6KWV93/AamlvIY5f33HmT/Kv+zVPSYUXKu7Nt+X5nrRhsZmk/dpwu1kaT71fF1MH+91Po8PiuaF5CNbosnzv5Qbds+eq8027dvtsnZtTd/6E1ai2rzfuRZ7/AC03/N93dSrpE98sWyyVpNnz7vlX/vqoo4OcavPI9SNb3DKgjNxMkPzLu/vfdrQj0va3nQou3cvzf3q1NP0HzJGf5ZW3/wAL/L/u1o2ugwwyCH7GyGOXb977te/g8L7WV1E48RjIU9zHj0tNsvnfPu+8v92tOx8N3lxHHDNYbjDtfcqf+PV0Fn4XRpF2Q7f3v3m/irf0/wALwwx/PuVlf5Nrf+O19bgcO4xiuU+Yx2KVTmZzVh4f8+Pe8Kld22Jm+ZVq/b+FZm+dPnRW+9H92up0/S0jhhjRGil37ty/Mvy/3qvWeg7l+eZYl+Y7f9rdX0mHj7p89Uqcuhztn4d8z50sG2/89P7zVfs/Dc0qvD9m8oxvXT6X4ZRjJCLfZtZvK8tvl/4FWnY+HYbWFEfcq7t6/wD2VdMqZySrSlLQ5CTwvCsKfZkZljl3S/uqbcaDbQ/6ZCjeV/D8n8Vd5Ho810q+Sir8/wC9Zf4l/hpt54ZmWR0+V4vu/wB3atcNSiHtDzG48LzSK3yYC/NuX+Jf9qsy68Pvayb38yR9nyM392vUdU0WGNSiQtsjT/lj91v96ua1LSRIwh3/ADyfcWRflX/erzK1HmO/C1jhLzR5ncTJbMi/wLJt+ZqxNS0fyZGe58zcybdrfL5bV3Osx+XdPDvX5fuTfwVzd5bvNcI7zbYWZm3TPu3Nt/vV4MsLKLke/RxUTjrzT7lpA7wq3lptdl+bbXP61C9vveGFnT5WSXbXZ67DuuAmxl3fNuV/4awdYhdl+T5lX7lYuHU6lLuYCypCzzDrs27W/i/3alk/0i4PyLt27tzU+4s3hmdgkbBv4m/hao5pHkj3x7R5ibdrfw1rTpwcvcMvae7ZyM+5uoWt/wBzbNtZ2+b/ANmrJu9833IWHy/LV7UGePaiuxVk3bvusq1UuA7/AHGUv/D8n3q9PD04Hl4iXvXM+Oa2kj3puDqu7d/eqnLN5i7H+RV/8eq3qGxlZ0RkC/f21Raby4wghWYN9za/zLXfGPL7xySqc3ulmO3eZVP3yqbtu3atEMgbajp5b/3lqPYm4yI/7r+6rfNU8N1+887y2+Z9u1fm3VcfeMvQs28aMzB4Y2Xb8rL/AMtP96r8Ni8kbfuflk+/UFrDtYuibG/u1s2Nv5sKI/y7v4W/iq3KUYlxlL4ipZ6fBD9/a6s/zR1o2enx71Tzt+7/AJZr92rcNjbSKiTBcyfxba0LDTYYmEMKMyxr97/a/vVZftOaPulb+zd4XZjcr/Iq16f+znpdy8V/bWljI0s80EcUaIWaV/mAwBySTgYrk9N0eeRlTZ8zfxf3a+mf+CUtvHpP7Zvw9+0CRwfHemx4STYQTLgHODxkgkdwCOM5r9C8McW8u4up4tR5vZ060rbX5aM3a/S9rXPRyLEeyzeM7X5VN29ISZseGf2SP2pPGT30Xhf9nfxpetpl21pqKQ+Grkm2nX70Tgp8rjjKnkZGRzXD67oOueF9ZufDvibRrrTtQspmivLG+t2imgkU4KOjAFWB6gjNfpN/wUz/AOCoH7Sf7Pf7S9x8E/gvcaVpNhollay3V1caal1NfSzRLKQ3mZCIqsqgKA2dxLHIC5X7W114P/4KCf8ABNy2/baufBFhpvj3wdOtlrFxZ3BjTylnWOaEBmO+M+dHMiOS6Fiqsdzb/wCgsp8QOJvZ5fjs3wVOnhMbKEISp1HKcJVF+79omkuWfeL93rro/uMNnOP5aNbE0kqdVpJqV2nLa6aWj8tup+fPgnwJ42+JPiODwh8PfCWpa5qt1n7Pp2lWT3E0mBkkIgJIA5J7Ctb4m/Aj41fBc2v/AAtv4UeIfDYvd32N9a0mW3Wfb94IXUBiMjIHIyPWv1v/AGQf2W/Hv7P37DukQfsy6f4T0/4i+L9LtNQ13xD4juJbiDdKhcMDCH8zy0cLGi4iyWc78tv7XwB8H/2ifFnwh8X/AAv/AOCgPi3wH4r0LVLBvL1PSLZrdraPaS5lV4Y4l8shZElXDIykkngr8vmHjtSw+ZVXQpU5YelU9m4uo1Xmk7OpCKi4cqeqjKXNJLpfTz63F0YV5OEYuEXa13zvo2la1vJu7Pw/0fwn4p8RWOoap4f8NahfW2k2wudVuLOzeWOyhLBBJKygiNNzKu5sDLAdTXU6n+zH+0ZovgI/FPWPgZ4stfDgt1uG1u40CdLYQsQFkMhXAQ5GG6HI55r78/4IdX+j/D7wB8a9c1HV459I0W/tZZLgSJ80MEV2zy7QxABQA5yVODgnGa+cvjz/AMFdv2rPjhNrfhldfh8PeENadoJdD0S1ijuBYk4MX2p1aTeycMwwCS2FCnbX3MOL+Kc04txeU5Xg6bpYaVPnqVKkleNSEZWjFQfv6y3dtFe19fWWZZhiMxqYfD0ouNNxvJtrRpOyVt9/LueEfDb9nn48fGK1lv8A4VfBzxL4it4G2zXOj6LNPGjehdFKg+2c1k+Pfhr8Q/hZrZ8NfEvwNq+gagF3fY9Y0+S2kK/3gsgBI9xxX7IeKdb+O/xb+APgvWv+CXvxT8CadoFhpMcFzpuowJLLCqwx+Va7tsiQuikh43VWBx83OK+Zf+CgXx0+PUP7HjfBL9uj9nG9PjGXW4ZPDfjrRjB/ZBK7mEhljLhLkoJUMAVdyMz/ACYAPz2QeKec53nFOh9WoqM6nI6XtWsTTV2nOUJxjGSVrtQbaT8jiwfEOKxWJjD2cbN2cea1SPm00k7btI+BPBPgPxt8SvEcHg/4eeEdS1zVbnP2fTtJsnuJpABkkIgJwACSegAya1vib8CPjV8Fza/8Lb+FHiHw2L3d9jfWtJlt1n2/eCF1AYjIyByMj1r9Brr4gab/AMEqf+CeXgzxF8KPB2mP8RPifbwXN/q14/2hQzQecZThsOIo5I0SNcRhnLndlt7P2Av28fE37efiHW/2O/2yPD2k+JNP8SaRNNY3cVmLV3MWHeFxEVGQoMiSIFdGjJycgp34jxE4ilhsRnODwEZ5bQlKMpOpatOMHyzqQjbl5YtOylK8lF6q+m086xrpzxVKinQg2m7+80nZyS2svN62Pzg8N+FPFPjK/fSvCHhrUNVuo7aW4kttNs3nkWGNS8khVASEVQWZugAJOBXX+D/2U/2mfiB4Vj8ceB/gD4w1fR5lZoNS0/w9cSwyqvUoyoQ4GCOM8givt7/gkD8Nv+FM/t7fF74TXN07S+H9JurGIJIsiSRR6hEodmB+9t2cY/iYHaRiuV+IH/Ba79ojU/2j4bf4fWOmaP4Is/EEdoujSack9xe2izhWaWVuVkdc8RlQmQMsQXbpxvG/FGOz+vl2QYOnVjSp06rqVKjimqkXJJRUW7y+y72VnfdW0q5rmFbGToYOlGSjGMuaTa0krpWS3fT01Pg6eCe1ne2uYXjkjYrJG6kMrA4IIPQ02vtP/guj8P8Aw74T/a203xVoyeXc+JfCsF3qcawqqtLHLJAJNw+8SkaA5HGwcnPHxZX3PC2fU+J+HsNmkIcirRUuW97PZq9lezTSdlfex62X4tY/BU8QlbmV7di94Y8P33i3xLp3hXSyv2nU76G0t9+ceZI4Rc4BOMkdAT7V+mvx4/aE+Fv/AAR68IeE/wBn34EfCjRNf8a3WiLd+IfEepW/kvMhkYeZK0f7yQySCbZGZMRIij5hivg79iEaKf2wfhn/AMJD5H2T/hNtO837Tu2Z89Nv3ec7sY7ZxnjNew/8FrP7S/4bt1X7djyv+Ef037F97/VeTz14+/5nTj8c18RxbgqHE3HOByLHXeFVKrWlC7UaklKMIqVmm1G7la9tdbnk5jShj82o4SrrT5ZTa2Ummkr27XufQC+JPhT/AMFfv2TPG2u6r8NdH8K/FTwNCt6mp2Ft5jXCrFI8Y8zb5rQyhJozGS+xgjjccCvzMr73/wCCCH2j/hb/AMQ/tez+y/8AhEIvt/mbsbvtA257Y2+b159O9fC/ir+z/wDhJ9S/snyvsv2+b7N5G7Z5e87du/5sYxjdzjrzWnAtCGR8TZvkOGb+rUXRnTi25Kn7WDcoJu7teN0r2SenW9ZRBYTH4nB078keVxW9uZar8NEe1f8ABND4PeAvjl+2b4Q8C/EqCG50kSz3s2n3AQx3z28LzJA6sRuRmQblAbcoIIwSR9d/tS/8FZPi7+yb8ctZ/Z8+G37NfhrStD8OXC2umRX1tNGbiLaCssSQNGiRsCCoAPHU54Hyd/wTL/Z58eftB/tV6Jb+CfGF54cXwwy61qHiCytxJJaxwuu1EDfIXkZggD5XBYlXClT91ftCf8FpP2efhX8XJvhzovwo1HxiNB1BrXUtejngijilRgshttysZdpBGT5YJX5SVw1fF+INCWaeIMMPDAf2lGGH96hzumqMnNtVHL4G5rRJ+9Zel/LzmDxGcqCo+3ShrC/Lyu+99rtaW3PJ/wDgqD4M8AfF39iLwF+2ZqPwns/BHjbWNQgW/sYreOOa8juI5CRKfkabAhSVGIZ1RiCACxHl3wS/4LRftBfBD4VaJ8J9F+F3ge7s9Csxa2tw+nTQO8YJILJBKke7nlgo3Hk5Ykn2j/gqVp8X7av7JHhn9tT4IfEHUbzwr4fDDUPCVxaIptWllEUs77CSs0bBUdWLrsO9GVdxk/NRVZ2CIpJJwAByTXreHnD+ScUcErB5xSVT2Ner+6nz3w75naleVpPli93pr1sdOS4LC4/KvZYmPNyzl7rv7jv8Ouui/M/XX/gnd/wUX/aF/bH8ba/J458AeE9E8I+FtJN1rOrWMdwHErZ8uMNLOVXhJHZiDgR9twNfkt/wWB+PVh+0ZqPxQ+L+jaNaWVhqE8cenR2lmsJkt45oo45pNoy8rqA7MxJy2M4AA/QH48sv/BO//gmJon7Ptk32bx58V99z4kKnEsEDohuFP+7GYbbHfdIR3r8p/wBr9zH+zh4ocDOLaH/0oiryOGuHMkWBz3PstoKlh5UqtGgo3tKFOLU6mrd+eovdfRRt1OLDYLCRwuNxlCCjBwnGFuqSd5fN7eSPg2S8RbfY8jF1/iqpNqE6t/Ds2fL/AL1U5ryZmf59yfwVQupnkjXD1/PnKfkMqxcm1b922x2P/AqzbjUHkjbemdv3WWkmkddyLyGX5v4arNNH/tDy1/irOW5cZcwNI8cn8S0jSOrnY/3v4VqGY7dpfcSqfw/dqMzZUOEbdWEtjqp+7ImaaZGZy6j/AGtlSwyOWV0+9VZd7yBH5/3anhXdl3+U/wAG2uOoexha0omjDG7D54dir92tGxby1SHZlt27zFqlZ/LCEwx+ati1jRcP/F/HXHKP8x9PhcRzcpahhTy98P8AF9+rEdttUzJuKKn3aS1R5Iw8m77/APu1fsbXpPHN8v8Adril7srnu0cQVYY2+/N8vy53VIlvtb7jP8/3mrRexRo1d/Lb+/8A7NJ/Z8K7pidy7N336x5uY7adacfiM+S1fcQ8O7d83y1XmiRV+RMH+7WnNb/KuU+WP5qrzWs32hpt+35NtEolyrlNpBuaZ32f7K/eqJm2NvL8rLT7hUkm2TcbV/76qpcEQo3kyfMvzfN92tIxlzGFTHRiLNeBWe2RGDt/E1U7ieETBLlGO35d2+oZpn85HhfLfxVD9odd2/qu7/gVdXs+b3jzamYdB9xcOsyif5F+7uqvdSOuX86o7i885fJdGqBr0RwtsfJ3fJuropx5Tz62O5pEs155UK702ruqvJfbpGd9v+z89VLrUPOP94N95WqtcTeW33Nyt/FXXGj7p4uKx32UaX2oudnZanspt+P7rf3ayY5vmZEdvm+61aemr5jff2t/DW/wnhVsR7T4Tcso33b04WtnS4XW4RJH3BlrF05Jm28fL/drpdMt3aRPnXarf8Cp8sDm5jY0uFzGPkV9r/8AjtbNjDC209m+X5vl21R02NIWLumFb+Kuk0u1hVVd4f8AcrL4Z3OeUuaNi/oP7mZI4fubPvNXX6DDbMu9LbfL/wA9N25f++a57SbVFuN8yL8v3P8AZrrdCj8uT7mX2bm2pWhidT4f0+8uGKPNuVolZdq7du2uk0uFJGWR0VkZd0vz7a53RZkRNjvMszMvlLu+Xay/d/2a6OwmhjZfOjX7vybU+61VH3fhDl5QGy3j/hG35khWql5D51wdm1v7rNV6S6m8tZpJl3tu3qyVTjuU3Lc2bsys3ySMny1fuSNKdTlK8kMduypv+8v+saqdxvZvJ2ZRV3eZ/C1W/tJf/Rkhx/tN826qcjLJMUlO2Nfm3fd+asZS+zE9zAynKUTPvf36+SkOX/56bvu1iapbpNsSF1A+b7qVt3kKRxpNs+bft21laq00a+TbJ8rbtn8VZR9pHY+xwdPmjqchqywxs0yfK6/w/wANchrC7ZiXTP3vu/w12WrWb7v3zqv95VX+GuZ8QWoVW8kLsZ/nauWpL+Y92jTlLU7e31BJIf3L/NGu5I2q9a6gJpjIiMg+X7r1xseoRxoqedtH3XaOtGz1S2h2o03G/d977tedHnifASlA7iGbdGX38bdrU+2ukj3cMrM/zN/Dtrm4dY/c/Nxu+6y/ebbVttUhkt38n94zJ91XrzazqrRHTTqU4yN+HULaPc8Lsksnyoyr8v8A31TI55re3XY6nb8u5n+9WNb3jiNoNi7Vfd81XIrp5IykyLsZP/Hv4a8mu5KpyqR6+Dq88byNmHZNGs1ymGV9u1vutT7q8FvCwuXX5l2qu7btrNjukMKo7fPH8yf3V/hqS1vIL6x3/u5fMfci/wB3bXHUlHnPZpy5oDrqTzI1k2b2j/vJ/DUV5aw27L533m+ZNvzVZk/0hlh+Vn2bdy/Lu/3qSRbby9+9Qrffb+7XVQ1mOp8JiapNc+W6WzruX5kVovu/7K1jXkLyZd/mMe1d2zaqtXQ6g0LW7J/Av31X5WrE1C6RQvztv2/73y19hl3wHFUl7vMZsi/aJntbZMt/D8nzNT4YfMVUebDbPlZU3fNUbRfM7pM3y/c21dh2QsPOdkf+6qV9LT+A8DFVpx+IrrbutuJndSV/9BqnqGxV855vODJ91V+bdWpIvlKnkw7F2t82/wC9/vVmalHJ5jQyOu1fvqv3lrq5zxpfvJlKaZI1RHRlbbt3b6YzIsjw+crbk3bmpzbIYWTfGq7/AJWb+H/ZqhJBNt3ojfdrCtU5YHXSo8vuotWcwkkCO/ys23atbVmqRt+8fcNnzrXPwxuqxI/9+ta1kRZgjoyL/eb5vlr53GVF8UT1MPTf/bpu6e3lqm+44X/lntrThkfzP9cq7f4mb7v+7WPbzDyVPnKrbv4v7tWvtGxvnmyknP8Atbf9mvAxFTmPao06UY2NBv30bI7r97/eanzX00kzwokap8rfvPm+XbWV5w+xjyZmR/N2qzL/AA/7taC3jrblEdXdkVd0ledze6dHs/dLlvJuUQ/K7fdX5KsW1ws0ivcvnzIti/3l21Rt1SS4+5t2v8u35W3bfvVow2v7x53ufk+VXZfu0/iOOUZx940tNjdrdYN6nb83zVY2zR3IkR2Xc33V/i3VXt/JiXyfJVH3K25n/wDZasrav5n7mRt7I3y0SlL5GMYe0nK4+K38tnh37tr7d0jbmVqFWCSNZppst975U+VqmUpKqvMmz5V3Kv8AFUbW94rM72ao33/mf71ZyhGOw7S5eUga5hmtXuXhZVVN3y/eqz9lhZT+5kD7Pu/xU6OSY70dMfKvyqnzNTLhLm3kSbZt3bf3ivuatIx55cpy1o+7qYmoSWelw70Rssnz7k3bWrntQH2yRv8ARm3L/E33WWul1+N7i4N5DtdVfa/z1zWoRwx7oV+V9n3Wr08PT5o6RPCxEuaXumDJGjTP5afd+42yq81n++aaHa/9+P8Ai/3q1rrZJl0eP7nzMq1SVXhZ/n+b7uP4lr1OX7J59SRB5c0nyQ/39svmJU3lia1DvCzpu/hWnrG7Y8sbmX7/APtVZtYvtE/kzXjAL/d+bb/wGj2ful0Zc0Cp5Y2/voVO75WVvl2/7tV2sUjjbejPub/e3VtSKkzB4U3qrbdrfxNVeOx/el5LZUP3nkjeol7p1x+Io6fZ/eQw87fkVv4auwR+VG6Sxrhmot5k+5nH+0v+992o1me3byfmfy23fN/drCMpR+E6vspMst5zbpk+ZF+/t+WrOn3UNvHsR8Kv/LOT+Gs2GSa5kZ3TLs3yfP8AL/3zVea98lvOnmwv3fmrro1JHJUj/Kbcl9DGodLn/WL8ysvzf99Uv26FkHyRna/z7n2/LWPb3ieSqeYxT/ZpJ72OOTej4/vq1dkZe8eZW90172eGe4RAi/d/hf8A9Cqx532fbD2/gZV3VjrdQsrunySK33dn3f8Adqb7T9oWJoblVf8A2q6acub4jil7sjZ+1LuTY+7anzts+7/s1t6LFtl3pMuJF+VWi+Zv71c9Yq8ypDNNsSR/8tXYeH9N85vJRFX51Zmkro5faE85o6fpT3W3f13/ACRsnyr/AL1bsPh3czMjq80iK3mR/d+X+7Uul6ftBezRQ+z+J/4q6O10b7RGEQbWVtm1v4quMeWPvC5omFp+h/Z1abyYyrJtfa+7bT5PDkKxF/tK7F+b93/C1dXa6DulXybNfN+9tZPlqxZ6L5KvClmz7n2/Kn8VRyw+IfOecat4dmt5P3yM67dzR+V95v8AerEv9FtrcD5edjMrN91f9mvUdY8PpLM6P52/+BlT+KsXUtFmFn8kKsivu27aOXmCMjzDUNJeH/SXSNk2f8s/4d396sO68P3jLLDA7OrJuTdXp974Z+0RtBMjfvH3vuT5VrOuPC9+qvsRSfvP/srXLKjy6le1PMJtBRVl85Gfb8yMsXzVNY6ait/qWxH823b81dxeeF/LkEyJI67/AOH+KiHwztVvJhkUSbt7N95VrzMZh4VNGdtHESOc0/TUa62PCuzbteORPvVvx+HXWEJ9maV9jbl/9lq1b6T9kUpc2y7vuOuz5l/2qtWbOszQyPJhm2/NF822vlsVgYxq3jE+iweK93lZSh0tJIR5O5N3/Lu33lqa30vcslh+8VNq7W3fxVanjtkZZv7vzbv7rVYsy/lbPJZnVfk/u/8AAqzjh4yp/CepLGSjLkiQ2Okv5MkLou/YuyON9vzVv2Ni7RxzJuyzqssap8sdVYVeS3ihS3b93t/eVuaXawyRpFMmBvVv+BV7uCw1tTzcViOhp6ToYZhDNDtaH5vl/irWs9JhuIzDC7I7L/wKo9LmTef3zeVu2+YqfNW/pqvHPsTblvldpF+8v96vpMPR948CtW5jMOg/6O/2Z/461rPR3dVjm3NtXc7f3q1LWz+XzE2/vPueZ92tSx0lLyZJ5o9g+83l/wAVetGj2PLqVDPsdBhlVZnmb5m3Mq/Lt/2a2LPwrDdI5mh3S7N2373/AHzW/o/hlGjEJX5Gf/WL81dLpfhW/t8eXMv+xJGn8NdEqPumEqxw8Ph1J49727PtX5fl+VaLzw/NcWoRN21olTbt+aSvQ4fDKJGqbGY72+9/DWfdaKi2cMN1uaNV/wB2uSVMcah5dc+GXhbf5PlNJ8qK33ttctrmi+TdSpNbbdqV6tr2jfvpU+Vvn+XzPl21xviS18mR08mRVkfbu37ty1w1qJ1063KeWeINMhb5Hdk3PuRW+61crqlvNHv2bW2vu+VflX+H5a9H17T4bht4tt7Rv/F/yzX/AGa5HWLGOEi53rt+bev92vNqUaXLY9ClWv8AaPPtWs3mnEkz/wCrXbE396uc1Rkjj+Sbd/D5jJ8zf7tdv4iExh3/AMbL93eu3bXE6pHNHDN5L7VV93y/w15dSjL7MT0qOI0Oe1HUI5pNiOzJs2tI33aozSQ+Y0O+R1z91flqXUpI5LgQ+Szovzbdnyt/tVTuNQjmk2JJhNvzf3VatoUugSrRkJdXELIjzJIny7Nyv8yrVG4mdV2FG+V/ut/47U9w7xxjeik/e2r/AHaq+dumW2SPb8ufMrrjHlOSpLm+Ijk2LDv2Y/56tWfNDbQtlIdrN/FVq6Z5vkSZkT+Flpqqkiok3Cr8qybN1dHLzR5jn5v5itHbwrIjo+7bU9jZvuKIkm9m2r/C1N3JC290Zmb5f31XrOGbyW2Ox3Pu3Uvdpl048xYs/JWZIZ9vyrt+b+9XT6PYxyzt5KM3lqqqzfdrAsbEY8+a23Irfd3/ADV2Xh87Y0dEyfu7V+8tZ81ocp1fYLdjorxyRLDMsqr95ZP4latvT9LgjUeciu0n39vy7as6Lp9s6p8kbSt8vyo25f8Aere03Rd7Dem9o1/u/Ltq6fvSMKlP+UpWen289mqQ9W+dFj+8v+9X0F/wTJsbWw/bP+G5uo5Gjfxzpu0xsAS/nDackHgMVyO4z0615FHY2f2jYlsqPu3ytH937v3Vr2D9inxX4U+HP7Snw58a+MNbj07R9I8WWN5qF9cKxWCFJ1dmYKCeAOwr9O8Msu+vZri6ibvSw1eSSV+ZuDp2/wDJ7/K3U9bh/De1xdR3+CnN+t1y/qfpD+3z/wAEovEX7XP7QF38ZfhV8YtEsrq4gtrTxHpWro7G0kihQIyNCGOWi8s+W6rjhgxDgL5P+3F8Tvgd+x3+xbaf8E7vgp8Q38R+Iby4Evi3VdPlj8uPE/mTpPsdvKkeRFUQAkrGnztyN/zV/wAFWv2kvDfjf9uTxb4n+CPxCs9b0ie1soU1HRL52hlkhtY45AHUhZAGVgGQspHIJr5bvPiRq0JdhBbMQu7mN8n9a+l4W4gyWWAy2GeZnOpRwqpzhQVDl5akY+6pzTbmqbuo6K9k3c68FnOXQpUI4zEuUadmoKFrSS0u1uo9D9YPg/4i+GH/AAU+/Yr8N/s2XPxjk8KfFPwNFDFpz6jcgNqJiiZFZEVw1xC8KgOV+eJ0DFWGPMx0/wCCdvwF/Y8+F/iL4if8FCfjq2vvcabLD4f8MeHNanglmk4HmQCR0e5m3MuEKeUnLSblPy/lSvxn1+yuEuEsYISvzRyKXDA+ow3FVtV/aH8XahcPLqTW900Q2LLPJK+T6Alulems54cw2JqUMuzirQwdSo6jpRoe/Ft80o063xQjJ9EnbVJ6u9f25lFKo4UcVOFKT5nFQ1V3dqMt0n6aH6e/8Er/ABJ4P0j9lf8AaVtbnxDZWCz+GS1nBqWpQpL5Rtb2JS2SufnliTdgKXcAckCvg61a2W5ja9jkeESAypE4VmXPIBIIBx0JB+hrx2//AGhvGFpvSHQdPZlOCSzgA/8AfVZN7+1N40tdqHw5pqyE8oyyED8Q1fYZXx/wLlGcY/H/AFmcnipQlb2cly8lNQte7ve176dvM9TC8U8P0MVWre0k/aNO3K9LJL5n7NS/8E0fh38aND8N/F//AIJn/tHweG4n0a3XW7O48SXLzrPsDeZLJAWeGc5xJCVVQw+UIPlrov24vEOm/AH/AIJv3X7OH7TfxvsPiL8R766iGnL9t3Xlu5uPNSZtxMxSJFcebIBv3BOAcV+Gqftr/FHRLiSXRdK06BsYZoZJ0bHvtkFUpf22PiLcTm5v/D+ku0nMkrecx3e5MnNfBLP+GcRmOGqZhnFStRw9RVIJ4a1ZuLvGMq9+ZpXs9E2tDzY5rldXEU/bYqU4wkpK9P3tNk572P22+F198Hf+CqX7FPhn9mjXPiknh34peA44o9LbWJEZr9o4WQNGm8NcRPCoDlRvidAxDDHmbX7OP7Jfwr/4JKprH7T/AO1R8ZdK1HXV0ua08OaBobYknVinmeQsxR7iZvlTG1UjUszMQcp+Gdj+2l8QkkWaHw7pUc0Q37oxMNp9Qd/Fav8Aw2L8S9ckW41zTbCZwuC8sk0hA9AS9ZYziPg3kr5fh83q0surzc50FQvL3nzThCrvGEn05XZXXV36pYnL5KdGniZRoTd3Dk11d2lLon6H68f8EfvjNbeOv24vif8AFrx/4ktbKfxD4cvr921K/jQ4N5FOygsRlY4kYkgYVI8nAFfDskkJ+KDSi6h8v+3yfO89fL2+f97fnbtxzuzjHOa+c9P/AGofFl118PWK4OGyJF/9mq7B+0V4keIzNpumlduV2JIdxzjH3q+py7xP8NMqz3F46liZ2r06VNQ9lK0FSUoqz63UtrK1utz3MLi8tp4qpXpzdpqKtbblTX6n6g/8F3PEPh7xH+0d4QuvD3iCwv418Cwl2sryOXaHuJpUJ2k4DRujqTwysCMivh6vI2/aF8UAZbRLA/u933n6f99VHcftGeI0fZFpOncfeLh+P/Hq34U8XPDfhfh7D5VHFVKipR5eb2UlfVu9tbb92b5fjMBl+Bhh1NvlVr2se3eF/EF94T8S6d4q0wKbnTL6G7tw5IG+Nw65wQcZA6EH3r9NPj5+z38L/wDgsH4P8J/tB/AT4raFoPja20RbTxB4b1S5814kEjHy5RHmSMxyGbbIYyJUZT8oAr8V5P2k/FKR5/sbTA2cbT5n/wAVUcX7VXj/AEqX7dpunWELp92WF5VZfxD15vE/ilwDneLw2PwGYVMPiqHMoT9i5xcZq0oSg7KSdk97pq61McfXwuLlCvRquFSF7PlurPdNdUfs6nhn4Vf8Egv2S/G+iax8SdI8U/FTxzCtkmmafc+WbdWikSM+Xu81YYt80hlITexVBtODXzl/wTN/Yb+DH7Z2s+J7X4sfFy60ZtEtI3s9F0ieGK8uQ2d1yWmR18mPaFYKpOXGWQY3fm/q37XPj0SSXdxoumyuV3tJKspZz9S9Yl5+2l8SLSISxeE9GbIycCXj/wAfrjwXHnCGFynGKlm9VY7FyjKeI9hquWyUY072UVFOKV76t32txRxGGoYeqvrElVqNNz5e2yS2slp+p+sP/BOP4xfCD9ir9vHxL4J8VfEjS9R8MahFdeH7XxvGWS1JWdHhnJyVWNzGFZssqkht+wFj3fxC/wCCHPi34g+Mb7xz+z/+0B4S1Dwnq91Jd6XNfSys8ccjlhGJIFkSYKCAJARu67RX4r3n7cvxHtpfLHhHRf8AgSTcf+RKZZ/8FGPjVpQeystG063iBJ2wT3KKx+glrXH8c8OvOHmuUZvOjXqU4Qq82H9pGpyX5Zct48stX8Lt5IxxOY4WGJeIw+JcJtJSvDmTts7aWfofuD+1Jc/A79gr/gnXqf7E2g/FXTPF/jTxPqm/VoLSUbrZzLFLLM8cbsYFVIYkRXbLsd20jeB83/8ABLL4HeFfjd+1zosfjjWtOttK8NRPrdzbX15HG161vhkiRX/1mGxI4wQI4nzgV+X8/wDwUB+KCF3l8HaFvByQVnyT/wB/Kgk/4KE/FSIgjwPoQB77Z/8A45XZlvGXBmX8N43AU8yqvEYpznUrui789RKLlGCaUUkkkk9O5lSznKMNgqtH28ueo23Pl6vS6XTTbU/Sz/gop+0vL+1L+1Nr/jewvfN0PTZP7K8OBH3IbOFmAkBHXzHLyZ9HA7V8rftHeDPEXxC+CmveDvCdmtxqF9BGttC0yxhiJkY/MxAHCnrXzpc/8FFfijChKeCvD5b+FSk/P/kSq7f8FHfi4kYY+A/Dxbb8wCz8H0/1lfT4fxB8OMPw6smpVJxoqn7LSDvyuPK3e2/W7T11ZvLiXhuGAeD5pKHLy6Rd7NW+8464/YW/aYeTKeBrYj/sMW3/AMXUD/sG/tNlsR+CLcBVwv8AxObb/wCLr0zRv28vjjrkipZ/DvQDvxtGyfv/ANtK9e+Gfi39sH4lzNaaP8G9PkmePfZRQ2VyxuR6r8/T3r88lgvByK1xOI+5f/Kz4udPgKE9a1W/ov8A5E+Um/YE/abBLjwhbkkcj+17br/38qAf8E//ANpxl2N4HteDkE6xbf8Axyv1T/Zj/Yr/AGkfiLq9sv7St/4d+G+nXC5828hmkuB6fuQ5YfjivS9Y/wCCedsnjWx0Lwp8f7LVNOmvvKvNRTw1Mojizjco87JP4Vj9W8F3L/e8R9y/+VlqPAkNVUq/d/8Aan4uz/8ABP8A/ajkO7/hBbUt6nWrb/45UR/4J9/tUHA/4Qe1Azn/AJDNr/8AHK/W74l/8E9/2qvB2vX1ppPi3wQbRJm+wLqXmRXEkXUOyedx8vNdBoH7FvhnSPDMeo/Ez9oa3k1JlDSWPhjwZd3EaZGdvms+3dWc8J4KL4sXifuX/wArLjPgWW1Wr93/ANqfjmn/AAT+/amU7f8AhBbXnuNZtcD8PMqxD+wD+06pBk8EW3y9P+Jzbf8AxdfqlqX7Pn9sytD4A8WXYbcRF/bHh9l8znAGEm+U/XNeYfHD9n79v74T2suu6N8MvDmsaXH9yZEnjkk4yPlMny/jURwHgpU2xeJ+5f8Ays6IYjginL+NUXy/+1Pg22/YQ/aPTa8nge2DL/1GLb/4ur8X7Ef7RuVVvBlsu1cbv7Wt/wD4uvSNf/bJ/aE8K6pJpOv/AA20GCaIYkj2T7g393/WVn/8N7/F0bd3gfQVz97Mc/H/AJErmq5d4Hw0lisV9y/+VnsYarwvL+HVn/X/AG6cxbfsYftBwqEfwtbnnOf7Ug4/8fq7bfse/H22gyvhe3MpbJb+0YP/AIuuli/bu+JUi7h4O0Tn7o2Tf/HKng/bl+I0/wAq+EtEB91m/wDjlck8v8CeuLxX3L/5UerTnkmnLOX9fI5Zv2PvjyzAP4at3XdnH9owc/X56mX9kD41JEdnhCAOPuf8TKDGPT79dOv7cHxCYKF8K6KWLYZQs3H/AI/Uq/ts/EGSMtH4V0cnbkKY5v8A4usnl/gNHfF4r7l/8qOh4jKIvWcv6+Rx0n7H3xzkkMknhOA+w1OD/wCLqGX9jn48Stz4UgUe2pwH+b12LftwfEeMEv4Q0bAOGfbNj/0Oobj9uz4gwnavhTRM7sAFZv8A4ur/ALO8CP8AoLxX3L/5UR7fJd+eX9fI4ib9iz4/lCq+FIGBOSv9qQf/ABdULv8AYf8A2h5590fg+JU9P7Xt/wD4uu/uv28/iZbyso8H6GVX+LZN/wDHKoz/APBQj4oRS+WPBWhd/mMc+OP+2lbU8B4Fx0WLxX3L/wCVGNWtkUvinL+vkcHP+wx+0lv3ReCbYn+8NXth/wCz1Rf9gz9pl02f8IPb8nLf8Tm2/wDjld1ef8FHvivaKWPgjw6SBnG2fp/38qg//BTX4truK+A/Dhx0AS45/wDItdEMu8EJaLFYr7l/8rPPqVOGoy96pP7v+AcZL+wV+1KwwvgW3Pu2tWuf/RlV5P8Agn9+1NI5c+Bbfn/qN2v/AMcrtJ/+CoHxbjTcngPw3nGcGO4/+O1UP/BU74yAE/8ACv8Awxz935bjn/yLWqy/wStpisT9y/8AlZzyqcKy3q1Pu/8AtTkT/wAE+/2qdpT/AIQC1I/7Ddr/APHKhf8A4J6ftYOdqeBbNF2441q1/wDjldif+Cqfxm8woPh74Y4Gc7bj/wCO01f+Cq3xjZcjwB4XB90uf/jtbRwPgt0xWJ+5f/KzjnDg6W9Wp93/ANqcrD/wT4/arUqx8C2ile/9s2v/AMcq7L+wp+0vo2nS6heeBIZI7eJpJEh1S3diAMnCh8seOg5PaultP+CpfxiuQCfh94aXPqlx/wDHa+gP2Pf2k/Ff7Rena7d+KdD06yfSprdIl08SAN5gkJ3b2b+4OnrXrZNwp4T8RZhDL8FicQ6s72vypaJye9Psi8FlXCeY4lYehVqczva9uiv/ACnw7pEa7wrBwScEEfMK6zS7d1kTfD8zfM237tafxtso7j49eLHUGMr4kuug+9+8ak0O3n3L/dr8TzHB/UcZVw978kpRvtflbV7dL2PiMRSlQrzp3vytr7nY1tHt/MZYUh+81dLY2aSTGGNMrH833fvf7NZej27j5H+8r/LtrprfT5Fj3un+0is23dXFzcpzylyF3SdN8uTyZodzMm3cvy7a3dLidmSF3bb/AHv71QWMMMkLWz2zFP4V+9WxZ2/8b220fdX+61Pm5iJR5vhNGyEkKxQ7N4/hkb+Jq3tL1Dy02zTZ+Tcqqm75qw7dZsfaYN0R3bkb7qx/7K1qWFtbW9ujTQ+Tufb8yfd/2quBMi1czQqsWxFT5d0qr/eqnqFwkm+HYw/uKtLPdTW7NZzeTMsbbl+X5apXl1Myr/q96tu20c3KXEkWTbNsRG3qm1lrPkmkupvkmbH93Z/tUyS4SPMzuwRn+9/7NTPtUN0okeZok+ZYpFT5d1TKXKe3l0e4XUkMkj3Oze8fyorJu21j6zJtmT5GVmdtqxt8taNxIkluiO+yTb8jbvlb/ZrHupEkmWG5f5fvbv4lauSUub7R93gXyqKaMPUFSMtB5yszfNtX+GuR1mP9ykcLqy72+X+9XXa03lwvMnKsv/Aq5TXIdzJ5aNt27kZflrnlLm9496lGMdeYry3CKyIiL+8+V6t2t9tkXznVF2fdb+KsRrzayPvzt/vJVeTUt0iyPwuz/Vt/FXNHnPzCVTmO60XWIWX9/Nxub5f4lrW026trdt7uzbvm+avO9J1ZIVRN7M33vmrXXXnXP+krt+75f96vGxPtfe7G1OXL70jtlvkurUu7thv4l/iX/wBlp39qP5aCzhZ0+ZmZZfmauQHiR0hZEk3bflfdV+z1jzIza+dsVXVt1eBUouU+ZHtYWXN9o62x1iS2dIX3Z37F+Xc21vmq41x5kZ+7v+9u+7XN299JdFB521l+/wDP/DWhZyIrFH8xWX5otyfeqKFGfOe5TqezjyyNrzHkmebyWMm7au1vl/3qfdXEMe6aa5VW2fLt+7uqpb3EFwqf8sw3yvN5v3aJLeOZUTf/AAfekT73+7Xt4OnzfEZYip/LIjuNkm1E4eRfvL8tZtxb3klx878L8jbfl21uLB9oX/UtujqO6tY7yHY4+6n3v7y19Pg17P3TzK2IjH3TCk0/zI1dONvzOzP/AA0tvsa8f52ZpPuN97/vmr0luhkCJCwib5fm+792nx2m5UuoU2bovlXf/FXv05R6ni1qntJycivqFvCsYeGHemzdu37ay7qQzTP5iMrf3VTbV+8t33eXcjftTcm5/u1l3TS+dve52nZt3N92t5S5feOen/dKF1CkanzpmUK25F27laqkzbcuH+ZvlZf7tas1v5kcfnTN5n3VaqElu/2xt6bH/wCei1wYip7p6NOMiCNXm2P+7T+5Vu385VD72eZm2/eqNYPMmTyX/dKu5/k+81W4YMZTzmb5vkZa+axlT3rnsYWPuk9nL9ouJXfh97fL/DVuG8eRnS5T5V+42z+GqqshUon36tL94IEYjZ8v8VeZUfN7x6dGjy+8WY2tppN/nbSqbX3JViCbzmX7Z95flXc/y7arR280LJs+ceVudf4t1SW6+Yr/ALlh/Du+98tc/uSNKnuwNe1+03lwk0D43fK/+1V+PyVmhtprlQVdml3J96sSz3syJ+8zH8z/AC/L/u1s6XI80x/0be0f3amVP3tDjlU5o6mxbh2bZ9pVCv8Ayz2feWtK1VP9dCiurfNuZvmWsuxW8YI7vx833fvVpx7LiHzoUjjPlL/Dt+7XNUjOUeUI8kpFhreby2ms0VnX725PlVaS3tPMkbzn+78rfxbv9qmwu8kPkO8jj7rtsqWOPdGuyZdu7aiqm3atTGE4mcpR5vd2EWNFkREhy0j7Hbzd3+7TZoXVf9TtLfLu/u1IsaW+XQ/LI/3VqteX0MzOiI0bx/8AAvlrrpVJc+iOKt70TA1CPdJ8+52+ZUXftrDvpt2XhgXP8asnzLXQ6hIkLLN5zM8fzbv4vmrD1CGb7Q/nOxRk3Oy/LXv4X3TwcRExbxUmV4RJ+6X5vMb+GoL6zhjVHfzHH3kXb/FWhND5MbSvtZN/3W/iqC8V2kLu7BWT5o1bdXdGPtDhqRjGPvGck0Nu0nzsjt/Ez7qs2Nxc3ipvRVaFOdv8X+9TGjTzD8nH8asm7dVm1VF2JbeW7N/d/irSUfsnNHmj70S4stzBGU2b9yMys3y7ao3Vwlzs+Rl/vf8A2VXbiSe1ZrZEU7l/4D/u1VZtyn59rfeRf73+zXPKnE7qdScdSOzTdA7ui7t21G/vVFbyQ26ed8o+8qKz1PHZurffVfM+ZNr/AHaq3Fq7Qt523fv/ANXt27q4pHdGRG15tUzJbSD+8v3dv+1urOuZvOZtkzbmfc67d1Xmt08n99D95Plj3/erPuI3Vt6PtXft21dP3feRnW94qSag6rJ5Pmf3U+bbVyxvPtiK+/8A1a/3N1Zl1au0i+dHvVv4m+6rVoaRbvax+T5LfL8u7furvpyjI8vEe6XVkubi4/0lF2/KyNH97dWvp9i8nlZ2lf7rL826q+l2PmN86MWX5UVf4Wrdt7V2u4vvH/pmq/xf7VdlPc8upI0dJ011VZkdZG/uyf8Astd/4X0GSaFIXhZFuP8AVbn+7/tNXP8AhbQ4VkU78fJuWRmX71dz4b01G8nZdRy+Wm5Fb7q/7Nd0Y+4ZSlym74b0NI1+4uV+V2X5t1dFY6KkcnnWb53PuRm+Vv8AgP8AepfD9snyzJCy+Sm3a33m3fe211+l6fZria2h+ZV3Rfxf71ax/vGEqhj2ehvGyTfvGZfv7m2/LVqPw88du23d8rs3mRvXT2OlpMo3w71k+Xbv+arlroX2e3fenz7/AOJPlqfZxkL2hwWoeF9sweG8b5tzs0jfxMtYNxob2qr5af6v7i7t26vS9S8PzSTfPD8rbVesrUPD/l/vnto3WP7jKnzbafLKKI+sc3uo83utB8uFv3O+L5v3cn3l3f3aoXGhwzbvkyq/wyJ97/er0W80d5IQkyKrKjfeT/vlayL7w68cYm3rvVFbatTKnAftPe5UefXWh+cqvDIyIq/3aqX2ipNH+5fcy/fXdXa3Wlv5Mlr+8VvvfN91ayLiz8uGSGz3M/8AeZf4f71ediKZ3UZHKXUPkhofJbdJ/rW/5aN/u1BPZvHIpmdkkjT5P4W21tTaajYvH+V9+0LJ8u6ql8jtumf5Tv2LtfcteNiKMuX3T28PU5Sh/ZsMkbI824SJ/C3ysy0iL9laK2hjYeZ827zdq1JqH2do/kh2NI251X7tCXyNJslH/XJW/urWNLD8252e2/lLtvZvuRH+VV+Vfn+atuz85bjZDwY1+81YulzQSKqTncPveZu3fd/vVoabeOrFJpmZWdfKZv4t1ephafu2OLEVPaHWaO0MCrsdWeT5nVf/AEKuh0/943n3U/mn+Bdv3VrldMh/fB4dyOv3F/vLXT6OEaOKa83b2b7q/d/3a9mlGx5tSUuY6TS7ePy0m2SYX5ol27l/75rttL0lIVDzQfP95F/9lrA8O2/l4+95TN92u90Wxm8lJnttz/e2yfeVa9aMuWB51SXLL4jR0HRofs6BIdu5NzN/drqNL8O7oY32bUb5UaodHs/Ot1mmtW2RtsRY/wCKujhh+VExho33bmX5v+BU5e8cHtvf94zTpP2fe9m8OPKZdzfxVmaho9tGPtSIu1f4fvV1lxFDHjft/eJuST+GsHXLdJIW8lNyxy7mWFtv3v4q5qkf5TWMjgvElj51zs+zM4VF+9/erhPEFq8jfcw8jsjbvlaNq9K8RWs1rJL+5+dXVnbd/DXE+KbV5pHh3szSJu27K5KkeY7KdTmPMtYsZpVljdFRo/k/2f8AvquJ8QaT8rJM/mov32/hr0zW7OG3m2b/AJ2RmZW/u1xHia1+0M/nTNsVNjL/AMs2X+8rVxVKZ2U582p5v4i0+2mt3RLbDr9z+Fq4DxNZ3yyJ/q8/xrv/APHq9P1i1eSdHh+YL8kUjV534ou386a5ddhk+VFjX/x6uSVPlOunW7nA6o0ylk8lt8aN81ZscsM0mxPuM+19y/MzVq6tHMt9Md/y7NyNWHJIm7Y42H7zLv8Au1hyxOzm5tIkrXf2VsoMov8ADsqlcTG3b98m5ZP4anaZGU+T/F9zd96q9xcbW+R1I27WZV+b/drWMeb3iZSj9oiMkMm5E6/dVViqPzizSpM6jb91Y/ur/wDZUrSPGrO8O2X+9v2/LVdpIdzfPv2/7FXy8pzc0Ze6WRJC2zzplf8AhbzFrRtWDQu+xQWVdm2sqxXy9ieT8n3trfNuq5Gybi6bdjJ91f4aipHqbUeeO5uae3lzFJk2tt/iTdtrrNDk8mOLf5Zfb97/AOxritPuv9I2SOrfL8+7+KtjTdSkVfMm2p83/jtc8o8p2xl9k9T8N3kMcbPNMuPu/L8rf7NdRot5MtqLb5VO355F+7Xmvh/VkCo+/fHH/qvMf71dPpevPI6Inlptf5938VVR90KkjtY5kh2PDCrlV2ytGnzL/eq7BKzab5u3adjHAHTrXNLqCSRIjzM0i/N8rfKv/Aa39Om8zQxNKCP3Tbv1r9u8FnfOMf8A9g1T/wBKge7wvKMsXWt/z7l+aOVulhWQ/Y0Uru3bvutWJq0m5nhSbcv95v4q19WmhZUf7SybflVttZF4okj3oV/d/Kqs33q/KoysfB1InP30n2e8TyfkZv733ayppIbiR0eFVDPuZf71bGuKlw7onKKu6KSNN3/Aawry3TzA6W2z+Hdu/vV2xlGRxy934jJ1eJPOZ/M/vbq57VG+/wCZwPK+7t/irodQhSFpk+V/n+9XN3yzSRzP8u9m+ZVrKUvcKp83OYOoqjRrMg3t/Dt/u1QRFaP5E+Vv4mq7cRwsrQzQsu3/AGtqrVK9aGNQkbqqL8u3+GuCt/Kjup8nOPtf3Mm5/wC/tX/arTt714Y/73zfN/s1kNIkduknQL/D/dqSO68uQ735/wBlK82pThserh6h1Gm6lCyb/Obb/data31CGaHY02U3/ulj+VmrkrG8TaqTfIf9/wC9Vyz1HzpvOm+T+GKvNqYePQ9jD4iXwnUrIfJeRJlyqbH8ylW68tQlyilmT/SFWsKTVpvnfzsbfufxVOkzt86fLuT7zfe3VzSp8p0xrc0uUtXN0mF82HC7/k2/w1FNLeSS7IYVwqMvzOq7v+A0xZvmRPtSs/3tv8NR+TdSM291+b5kVfutUcsfiOiNSXwxM7VIQyrM6MpZ/wDe2/7NYeoafCrfO+FrpLi3eP8A5ed396sqe3Rpmh8ldsny7mrqp+7A563J9o5u+snjU79q7fu1n3Fqkn91j/eat28jtmVnR2Yfd/4FVBrEyTP/AH9v8SfLXpYfmlqeTiDGmsYXDbnZtvy1nXVq43b/ALq/d3V0U1n5cf3Gb/d/irS0P4X+IfF2oW1tpumzP9o/1SrFur0KfkeLiPdicJDod5qF8LazhkZ5H2osabq+rf2Bf+CW/wAWv2vvG0Og6bYSQ2ULq2pak1qzeTH9792v8Un+zX03/wAE3P8AgjzefFjxVpU3jOwupHaf9/b+Q0EUK/e3SSN/eX+7X7b/AA5/Z58GfCfQU+GPwQsNL8KadDYLZJdabb/6Szf8tJt395v71dNTFRpw0PErSnUl/dPze+B//BJ34D/B3VLZNY8NX2o6xDOqabot1YfabmZl+80kcfyx/wDAq+uLXwD4k+GetSX7+NrHwfdw6WsVloui6XDJdLGq7lVY41ZlZmr1T4qeF4f2bvBaW3gDWLfQra+vP+Ko8d65ceZcxx/xLBu+ZpGrxyL9v/4V6D4T8SQ/s2eEJ38QWUTLB4u8S6Zu+0bfvTbfvMtcVTFc0uVl08Py+8jl/CvxQT4P3WpeNvj98PLzVZdS+bTdS8cXq2zyNu+6sP3m/wC+a7vwX/wU0/4J56L4LW21o6amszMy3Gm6Ho0ki28i/wALSV8ReLfhh8Vv2n/G0Xj74reONQ8Q6je27NPqk1vJtjjZvlWGNflVf92qmufsSv8ABbxlpGt23wc8VeKtLjt1nnt5L/7D9quN33d393/0KudwqSl7j5TROlT3PsDxR/wUN+BV3rNr4nufiF4JfTVLQweHb7SVRo23fK0tzKvzVwvxS+O3iTXJpofCvjnwjqOkattli03Q2WRbfd/CzLXjPxA8Fp8XPDb+FdY/Y80Hw3bSSx7bq68Q/afL/wBn7v3q1/Cv7GPxI8K/Dyz13QYfBdtZ2M7eba6PK3m+X/DuasvelFe8TKMPiZufDHRf2jtH1j/hLbbwTp99Asv+i3FrKrK393crfxV3mg/Ej4naa9zqvxX/AGY9U8R2l15jT6hH5byeX/eVV+XateP6X+0NrHw/aXwr4z1WPFvLuijtZdyrtr3X4A/tufBzXri2sIdVvEC/8fkMkXlrWftlEJUZSjFxPMvit/wT1/YA/buhv5ktLjwl4kvLNks75bVoJ7ebb8vmf8Cr8yPj9/wSv/a3/Zn8ZXPhXVfhvJ4q0ppW/s7xBp8EjR3EK/ekZtvy1+6/iz4mfsr/ABC1pPCuj+MNF0zXI3826Zv3DR/wruk+6zLXZ/Dv4Y/Ejwr4fubzQPj9Z+IIfK22FveIsvmL/d+b5dtdUcTGpG03dDo1q+Hn7p/Mp44+Bz6LpqarZ21xb3McrRXum3jqrx7V+8q/e21wEdnDG3yOr1/Rn+3N+x78Fvjx4Nurz4i/CjSdD8QzQMsHibQUji3Mq/dZV+81fjH+2N+xzpvwR157/wAH+JI9StI7PzZbdl2zxt/FuVayrU6Uo80GfQZfnHNPkmfOTWfQI64X7+1fmpY7WRVkDo3zNt/3qteWkjLsttv8T7qmhs3j2zfKSv8ADXm/D7p7/N7Qz5I3WMJ5PzN/eWq7Wf7nZ5C5V/vba12jmRRNH93fVC+V45nm2b93+d1bRjORn7SEdjEvI3be7w/d/i3ferH1BflE3ksvybdtb2oRpIvlx8Lv27t1YeoRzRspPI/3q6acffOKtWnuc5ffvH8x3xt+Xb/FWJfrM0iIUXDbvu/w1t30b7t6f99VhX3neWybNy/3a9GnE8utWl9oo3XkhTx/wKqUknzEbPm/2at3SptG9/vJ92qsm+OT5ErrjE45S94rsu1N3zUx442j2fxbvu0+TzG+R9zfxbf7tPhjk/5ada0jEjmLenr5siu833f7tfan/BL2MR6H4wAXH+k2X/oM1fGFjGke3Yn/AH1X2f8A8EvUddC8YGRcMbixzzn+Gav0fwijbj/CelT/ANNzPpOEv+Sgpf8Ab3/pLPKPjc+z47+K5NuNviC65X/roadoMkM0ImCNv3fI3+z/ALVP+NpT/hePiuIY51+6LY6/6w1DoMe2MeTwdy7Wb5q+Hz73s8xX/Xyf/pTPEx/u46r/AIpfmzq9BRFuPJTa25l+9/DXTWMcefJSbhpfk+SuY0dnmZnMyn5vu10+m/6QphSFt2/5If7teRzcupwyidDptvDIqOj7v92tayhmeYfdCRszRRr/ABf71Y+lw4kG+Zl2/NtrXsZPvTb2f5dyfL81Rzcxlzfyl+3tUjbz53+Zm/h+b5aurcCNt/X5F2R7flqrb3ULqY9jH/ZWnyyfZWMaooRvmdt9aD6+8N1C8/1yTIqbovvL/wCg7ayLwQx26eTNIdvy+d/EzVYvE2OJtm7cnzrWczXO7zo/LVvu+Wr/ADUS/lHTiNuLl5GeadMt95/7tRzXTxKr+fu+fai/e2tVLVmk+5DeqybFaVV/h/2apSTJ5O/exH3V+eplKEj28HGEf8RqXWoIzDztsyxy/wBz7zVm+Yka4uZl3M/yNs/hqtJcQ3GLYPtCy7trVJ9udrZnuU+Rn2ouyuGX90+zy+p0ZT1DZJJ5yPgbvnk/2f7rVzeuRxyKqIjbFb/gVa2qXSNal0mZEb726sDWL54wy9V/jasYxme9Tl9mRzEl5tdk8liP4V31UluUjb765/ip0mxZCkM2dv39v96q19JjCfw7fvbanmjzH5p7OXQeuoJGyzfMxWrtvqSSKqPMu+sZrqHzBI77FX+FaFXdMskLyDb83yv96sqlOlJm8Y8x0a6o7YQq2W/u/NWvZ6gm1N77Gbarr/8AY1yFvK63Su7sn8S7WrZt7iaKRftib2+75lcVbAwlLmideHqezmdfb6skMivM/lbfl+X5t1bmn6h5irNHefN8q/8AAa4rT7hJJm+RnX/lk1btnMdyP/ef/d2rXP8AU4R2+I76eJ/mOqsZEVTNNCxMku1Gb7vy1qWdw6sn8O75t33tq/xVzenLNJG6Q3P3pflkX7tb+myTbET+HbtdVWuzD049TT2kuX3S/HIkLK8ULHc23zI//ZqsyWsjRs/kqHX5mkX+7UViwVTN8u3+6z1PMs0cjum77v8AE/y7a9zDx5TgnUluyvdK8cg2Ovy/M+6qlxcJbwvczfuk/ikb7tF1cZha5meNXjZkRY/vVk3V1jKTTbU+7tavUiebUl73vEclwJN7+cz+Z/49VLbMzPzHlvlSht6L+5eP/eX+7TjLbNjY+1v71ay5eUUZAFh8tNj7P4dzfdZqr6hDDGyb3VWZNz7XqPbBJIyeT8zP86s//j1WZLe2uPnG1/L+Xd/drya3xHpUKkpR1Kcbor7N/wDubfu09Y7aORPn2My/6tnpsKp5ypsZh/B8v3qWbY0hTZlP7q14OIj7/unt4ep7t+UtW7P9l2IMfe83c/zNVvyfLWJ4fM+7tZf4f++qqW8GNiPbM3y7dy1qxwpJcLbfMm2Jvvf+g15laUeU9OlL+YS32Kyvvb73ysr1OLdFk/fXTFV+9I38VOt18uP/AEnzPl/8epwh+zKHd1P8P3P4a5+aP2RylEnaORbdUttyP8rPJ/C22rcMiMQk6KG37naP7rLUFjDNNEmP9T95d38NWo40Me93YN935V+Vqly5jjk5RjsamnqGZ5ndtm9XX+7u/vVr2NzNNIzudvl/Lu2VjW7JG0MUCYf+7v8AvLWxDceZI8KfMzfdbd8y1MnPm2FH3Y3LscyTR/ang2iP5f8Ae/3agkj+wWZhvH3D7yeWm35f7tXY1SNSiTSJtlVvLk/hqpcLunMM0kjD73zPURlze6Ty+01CaRFjDpNsCrvdf4t1Z+qTQyMiQJsTb8zQr826pN0LebNs3Ju2rtqpcWciqfJ3NIy7V8v7rV04WPLLmMa/vU+WJRvtlyv2lnUqu3ezVUuoZnWSZNv91/4vlrTaOZVa2hRQ6xbv3n3W3fxVFNbzW6k+cp8z5flr38PzS1keHiI8srGM1vDIokdF2/3d9U5LWHyX2Js+fdu/hate4hSdfO/i/vf3dtZ95dfaIVhtvn8v5mVlr0InDUo80TOW18tok8n5pP8AgS0tqsMkm+F1YbtnmN8vl0rKnmM6PhW+X/gVRQTPuELlVbdudmrf7BzSp8upamt4FXyUdju/8epPsaeWjpNu/wBlqjhkmjuPJ2Z2/MrMnyt/s1YsVdkaGTblvuVjUjLkKo+7KzHNYpcQlPJ2r95Vqhdw/wDLbeu/+8zVqws8MZePlGfb81UbqNFYQfNmvN5pRm7ndy/CZkmn3KIj71/iXa3zMv8AtVDcaem7yfuq38LVsW9u/mL5ybfm+6q7t1PFm8jv+5Uxx/KvyfLtqftWNZR93mMSPQ9yhLby9m5m2yfw/wC7WlpugwrKqB8ldrRfJ8zVqLYorb3T/XL8m5d3l1o2th5ezZC38K7a6o80djy8RHmK2l6G9ri6mRnb7y7fl2tWxo+kos377cvmff8Ak+7Uum6em4pDB867m+Zvl21sabZpIYt8LMi/Lt3r83+9XpUfdPHrR5dy9pFlbNMkyJG6R/Lu/h212/h+2RY1e3+VW+b5fm21j6bYwx7UtvnhWJfm2bvm/u12Hh+xtvNheD5Qy/N/tV6cfgPMrS/vHUaGs3mKZkZ1835N1drotvDFGYUhZX2f73y1zfh1UjkhhebazN8i/e/76/u13On26QTf8fMm6aLb5irurXlOOUpbFyz0uCOP7NCkbSbV3yfxba0tvmW6TeSrN5Wx9y/L/vf71Mt7W23D7339u5fl3LV64hSO3Z0fIX5fLq/dkTGpKOrkc9JBNMjukWH3fJu/u/xM1Z91paLJK6QsRvXdt+6tdHLY+cuxEb7m5vmqnNbwL++N4ybk3P8A8CqfhHTlzT1OU1TTIZn/AOPbLKm3cz/K1Zl7pqRw7Ld1RJF2/f8Au/71dRJY+cyw9E+b5m/ib+9WLqkM1ufOhfYNjfd/h/2mrKW5106fvcxyOpWf775/LZl++2zav+ztrCvtPDbtiK0zf3fl3V1us7JrdPMhjTzN2yTZWDqFpuVkmh2fL96OX/x6uKodtPc5XVLMNl0TL7W+X/arCvLWGORUTa3l/Nt/2tv/AI7XXalb2DRvD58yFU3K0fy/N/drC1iz25d9qSfefdXnVNzvoylE5uaFI5vO+VGb5trS7qVrBHUpv81YUVt38S1eutPS4vI9/Kr9zau2oGi+xvsfzNzfLtX+7/vVivdj7x3RlIns2SMtbQ2ysrfwr8qr/eardiYWmZ3kbf5W6Blb5ttZbXn2GRX2KwZ9qbt33aYusKzI72zY+4v+7XbhvhIqVDs9JuEjmSF3k3tL8qr/AHa6zQ44bi4WFE/d7fk/2Wrz7RbmFm2Qvvdovvbtvy12fhXUnlX98m0bdu1fvV6tGXLE8+p7x6jocfkrs+0/8stu5a7nRW3QxzTop3JuTzP4W/2q838I6hbeWpmhZm8rZtZtu6u30fVHmh+0u7b2dd25PmavSp8so8p5dc9K0G6na1ieb91LHudJFb5a6K1unvIzNJ8/7pmeRa4Kw1izjs47VH2NIzNu3/w1vaXrkMduAjrjytqbmrSMuh50v7p0MMjr++WaNl+/tb+7/u1jax9muN6GFn/ifb/DuqX+1E+zpNbTKjeUyuv95f8AarI1K8QxtCk23d9xVqOX+Ur2nLEwvEjQ2rPs8xV3fd835vu1xeqSp9sTf5n3W+ZU+7XVas1y29YfL3/x+d/d/vLXMa03yuA6hFX51b+Lb/FXNUidNGRxWvbN7O8MiJv2rJ/E26uI1yzWS3ltnfIb+Fv71d34keGaGTY/zLFuWRmridaaG4k87Zgtt/1f3a45RPRjU5Tz/wARJ5cJhd23/fXy/mVa878YWaMx3IpVvlSvR/Elvf8A2hoYUWPdLv8AMZ/4dv3a4bxVDNJCzpbRp839/wDirlqU/tHTTkeZeIPPSZYd7M/8arXK6g0zXDPs2O39567XxBAkUbzJbMjx/wDLRX+7XF3ypNJiab5m2/NsrilGX8p2U5fCNW6RpGe6ufm27dv+f4qivGSRhIjsEX5t396mtj5tiZ2/Lu2feqNleSHZHwq/dVUqI+7sVL3pkd1N5jJ5iblZ9u7+7T22RyMET5Pm81W/u/7NRLDu+dPn/wB3+Kp1W53NMnyqz7l3PXRzmEf5RLeR/kdIW3Mn3VWtJbNG3Q/Zl+aL/e3VDD+8k3zblbfuWRavWqpFbibZub723dtrCpzHXRjEfDazyQ7HdU8z5av2K/YQ0Gzeypt3Mv8ADUVn8rfu0ZQvLsvzVoqqLcBPO+8isit91mrklI9CNPmjzF7Sb7yykKWzfdbZu/hrotL1KGHH77cWiVkZf71c3DJ9lZN77ZG+barfd/vVb0+5T7Qu99u75katqMo82hjU5up2tjq1t5SoHXLffuP9qvQNHlQ+DVlAOPsrnBOfWvHrWaZpGm8/a8ny/f8Al/75r1jw3dK/w6S63BgLOUkk4zjd/hX7b4Ku+cY9/wDUNU/9KgfQcLJLF11/07f5o5yZoZpC7zZj+9BJ/F/ustZmqTIszIi79v8AzzpYbyaTfvRn/ufLtqreeTaqzusnnM25fm2qv+y1fkMa3KfFSjzGfqk2632Wbsg2Y2/d2/7Nc1qz3McjwPJ95F3svzLu/wB6tm+kubjfO7xh2Tb8yfdasLVPmY2czqv3vvfLW/teUw9nzGRfXSQzOny7WTanz1z2rXjpI/2Z9q7fvL/erV1pYY2R0OxWb7y/NXP30bm3b7w2v8+3+Gp9pGQo0yjqF5uXYiNn+Pd96s2aTzNkJ243fxf3qkummkb53/g+bclZsmoeX8j7VH97+81ZSkbRLrSW3k7872b5drf3lomm/c7JplG3bs2vWZ9ukmUwzPs+b5f+BUn2ny2/hAj/AIWrjlT9/wB07o1Pd5Tfjut0zOiYdvvf7tWrNv3ZfeuN/wDF/DWDa6h8wd593zfNV6x1J49yJIqLu3J8v3qwrUZnZRre7qbiyOuI3flU/h/irRhk+0Rs7v8ALt27VrFtdReS3b98rN92r8d1C0I2XPyx/M6t/erlqQud9OX8pfhhh2qjpvaR9qKv3v8AgVWFuEk2fJsf+JW/5Z1TtdQmuFZEdkRv4lqz/pMiqg2uVbbXNKEubU6Y1OWPukM0fnXB/wBJ4/grI1NUaTy5kYhfuba0rrzdwT7Nna3zqrfdqrqtu6xr5L42/cVXrWnHl5UZy97Uy/s++TEiMit/DI/8VQR2X2hmhm4H+/VqSHzP3Lzcr83+1XQeBfhzrHiq8httHs2keSVV2+Vu+9/s130Y++ebiJcsRnw7+HP/AAk2qW9nN8jSSr5W5GbdX6r/ALAP7FfhvR9M0258N+D7W/1mS9VpZr6DzWX5f4Y/4a8s/YO/Zb0Twt4403Ur/Spr+6sd32qRbVWgjk/u/wC01fpF+zLq9/8ADG6u9A+Ffw6vLvxDql1591qmqOq21nDu/vf8tJNv3VWtqlbl2PnMRW5pcp9P/A3wjeeGPBdonjbTbW11Lb5UW23WPc3+yq0z4wfHr4K/APQP7d+Kniy3WWF/3Vjbxb5ZpP4VWNf4qu+Cl+Ilno9ze3lra3N66eYt9qMu1Wkb+H/ZVa+WP2l/2fbzxRHqU/xR8f2d4983mWtjodvJuXa3zNu/hVf71c060tLbGHLA4P8Aa9/bJ8WftB+HrTTPhJ8N43fULryIrrUl+2XNiu370UC/u45P9pvu1o/smf8ABOvWdU8Pvr3xU+NE1jLcKq3unyWSyysv/XRvl+b/AGa6D9kv4K+EvC/iaDw74M0qaGwtV8/7VcXDSXNxM33tv8NfaPhzwNBpmmtiHyJZFz9ocKzL/wB9VtSlzRugb5jz7XPB+i/Bf4Yp4P8ABnhiN20+3Vl1jULWPy1+b+KvnH4pal4t8YFL+GGPVnkTdtW82qu3+7XvnxnuPAei6bcN4y+IcmuyzTsn9mteeXHuVflVlX73+7Xw7+0d4os7G+stYm8SeHYXum2RW9jebHjX/a+b+7WVStKUiPZ83xHNfFS4/wCEHszrfiHwlfW33pfsduvmt/vKq1leHf2nPhj4quH8PaJ4km0p2dVnsbiJkkZv4lrmr79u7XvhrJPoPwr8H6TtuItqXXiK3a7kbavzNury7w78PfFHx41C51i88f8A9ircXrTy28ekrBA0jfeZZPvbaw5pz1pm0eXlsz0T46/st/DG5kl8beH/ABDeTeJLza0unxt+6aP+6zf3qf8Asl26fC/4pQp4h/Z5mn06SVWutS1C/wB7fL/Esar/AOO1hfD/AOFfhfwz44t/D1/8XZrm8t4t25d3kKv+1u/i/wBqtj4mTePPB+uR674J8eeKLm2hZV+0aT4f/dqv+y3/AC0/3quUqjjZmXL714nrnj/UPFXxd8a3Gpab+zxo9vYrdLcRah4ksNsce3+FYl/vf7VdKv7ZHxD+Atj/AGl4wv8AwHrEUd1t/sfT5drRq3/LNY1+7Wf8OP8AgpRo/wAMdJ0rTfiF8N/E2uWrSr9q1LVLWGJW+Xb91l3Uz4hfBH9hX9uDxR/wmH7Nl/caV4ptf3uraTpM7LFqTfxRsrfLu/2v4ayk4zhyS91mnLKnLmR2d9/wUg/Z4+NkNt4D+IXw9k0F7qLfFeQ3Xybv9la+Y/23f2Xfh78RtJvfij8Mdet9SNnp0y7bf5XmXb92T+9838Va/ij9nn4P+D9Qm8PfFT4o+D/CeuW8uyLw6uvfa7qNf4d237rf7NRaL8L/ABV4bhuNSfXpNX0e+umVLqFNqqq/dXb/ALtRGUqPu812TKXN76Vj8j9b0+5tdSlttSh+zur7XhVf9W392o2t0+WFLncF/hr9NPi1/wAEi7P4zatL4z+HXi2ztZ7z/j4sWfayt97dt2/3a+O/2gv2I/H/AMB7iayvEjuQrM3mQy7m+X/0Kt/q8pQ50fQ4LNqE4xhI8Kkt3WHf0ZXasu+hkZPv4Pyt/ercuLVPM8mZ2H+7/erP1KNFh2WzqR/Ezf3qxpy5fcketKMJe9E53VFTy3TP+18vy1gapG8m596+V/Atb19+93o521jahG8au7lWZt3yr/dr0KfNLlPMrS5fhOY1BWZPuMv8SstYt8sMWUfdub+L+7XR6hH9qlKD5X/u/wB2sC9jTa298n+9XfT0908mpKXMY0ypDJsHztJ8v+zVORC0jJ53zbN1Wrr9yzP94VCkfy70fI/j3V083vGBBDvXG87t33mqaFAGXYn3qTydzbEdf9mpLdXVs9dr/eqvhI5mXNPX5mR/7ny19nf8EwiDoPi8j/n5ssj0+WavjS0jhVdj7sr/ABV9lf8ABMJy+heLywwftNln/vmav0jwi/5L/CelT/03M+o4R/5H9L/t7/0lnlPxqm2/HjxYgAI/4SC43Fu37w1V0uTbcKLaHd/tN/DUnxwM3/C+PFyxy5J1y6wPT94aq6TJtjXDsD/Btr4jiD/ke4r/AK+T/wDSmeLj+b69V/xS/NnX6XI+77jKG+Xd/C1dRpMiNAsNy+8fwfNt21xGl3otmVzN8rJtZfvV0Ol3iTQq0MKtKr/eryJR93U4ZbnaafcFl5fYq/c2vWzp9518lF837y/3Wrj7PUXVlebam7/nn92tjTdShEwRLpf95vustBHKblrM6zRTu7THZs8vbtqx501xIXwu6P5n8xPlb/gNYq6tNHCkKIx2/wC3t8ula+S5kXyNu1vl3K27bTlLsXysu6tqCKuzzGB2bt38NYl7qE0aF4uFkbbuj/hqO8vnWPMz7l2bvlesi91R5G3um7b9xd9Z+05vhHEnurxoRs6+Y/z7vl2rWa2oIJFSFOfvJt+7WfeXSSKdh2j+Pc3/AKDWfLceRCH+0ttVfvfeqZfCejhZe9zG22oJC/yQsN27duXd8tQXGsPGuyP51VPn/utWW+pIyrCkzYVNqtUclw6rJCjZC/3Xrml70rH0uHrSly2JtQvnuI2DxxrE38NY+pXaKzpsykiL8u+lurxJIWR/u/8AfO2snUdS2ln+XC/L8tH2j3I1pfFIoMzu29E+6m1lqCZpmXY+3K/3f7tLIz7iUC/e/iqG437sf+PVjKPQ+Zo4eRXjj86d/wD2WrNqrzKrom0/x/7VR6e0KrvRGG19u6rdnG8bbE3ZZ/vVhKXKd8cCWLNUkjR0TL/d/wBmtS3t0ZPOh3b/AO9I9U7dJDIu9Mt/srWtart3p8zsv92ueUhfVZFvSvtLfvkTYN3yblrZsbc253o7fKnybm+9VCztv3ZmfawZdv8Au1qWtv8AMv2NPNVV+bzGrH4iOWMY25TV0i6dW/cxsQ3yvu+6v+zXSaPHHMrB/vr/ABVzel3k1um/7MsiMm52b/x2t7S7qJd6OrK2xW+V/vV1UY+9flMuaMYG7BdQ/wCuuYW/hVWV/mZv71STTeXZuQnKu21l/u1kWMgl3u6NsaX/AFn3fm/2almu3Zvs0L5C/wAOyvWo6e8c1Sp1KOoTXjR/P825P9W3y/NWPOs7Mfk8xf7y1q6lHNJK7xuyMvzbf71ULqNPuImVb+KvSjUt7zOeUoy0KCw+Yjp8zjf977u6o185MfZiy7U+ZfvVaa1eFg7n5WX71VfL8uRPs275vuf7VOUoS0OaMeUVWRspDtVu22nRyQtiH7yt9/b/ABU5bV1kYvDj+Ld95adFYvDGvz7WZvk2xfLXBWlCR208RykM0O3/AFL7Ds27mepoYXaPfC+x/wD0L+9UsdnayY2PJIV+5tT5as2tqkatCqMzs3yf/E7a8HES5T3MLU5o3Qun2MMa/aUtlXd8zzbvvVdt7V5v9d5mfl2L/Dtq0umTRwtDDDG3yfdX7q1aj0lGVUudwdvmZV/hrxqkuadz14/BYq/Z3X7m5N3+tVqfHD5jb3/3h/dWr32JFjeZJvNdf4W/hp32GFs/Iz7otzbfl21EuSWhfMQQwuqpNc8NJ/qt38NWYY0kxOm5dr7dzfdp9rYzSRlHh3xr9zan3attH5MZhhtmdVX72373/AqqnGUpcqOSpUjGPvCWbTPOqTeXnzW/eMn8Natr50kPkvtz/wA9FTbWfG0Jh+SHcrJu+ZGVlq9pk0jTffYxL9xWb5q19n7vKc6rf3jTYXL4hublQN6tuZdzNtWoNSkP33/esv3WjanKv7z/AEOFmO6okt0VvJdG+XdURo/DGJp7b3blK5mhjmREf5tvz/J/47Uas8a7Eutm1G/d/wB6p76N3t1T5mib+FX+as2TY28Ju/eLt2yV3UaP90yqVJdCyGto7OKb77Rtu3SP/wCOrVaSb7Ysbwjb97bUULO2xJ3WIR/cj/utTftCthEmkfc+3dIm2vWpx+yefKPtPeIL7eqvC9su6P8Ai31kXCwlPJ+YO332rS1CNPM3v8u1d21WrI1KYSK7p8iq+12V67IxMpUylNJMsyRnyz+9b5m/i2/3aa9x5jK+zYG/8dptwqHe77lDfKsjN97/AHabCsNwy2021gq7ttacsTllHl90s6XsYMjyL5sku7ar/wANaDLCsmyENlfvsy1ShVIdjujMy/c8ur9vlo3d02tu3IrVjUkZez94fZs/+0W2srKyf+PU+ON5pPOd/kb5dv8AtVHn7KzjfJ++2/x1chVJlLxjZu+Xcqfdry8RI7KcZdCKxsfJkeb+98yL97bV+yt3uJvJhmYts37VSoYYXVRCjthV+eSRPvf7VXIZlhxD5ys23a21az9/4hyl/KQrbo67N/Kvu+WrMaQ27ffk+Xb838TVHJcQqwTzGJ2feVKRri22p+5kLr825d1ddOPMeZX/AHnMy9atDHGZt+6P+P8AvVs6fNZtG3nWef8ApoyfdX+GsGzkc3n2b5odybt2z5Wrc0uZFZXkO3a+395/E392vWoRueJW5onY6KqLDA7zNsZPu7du5a7jw7HD5MUycfdZGX5dtcDolx9lWPfDtWP5k+b5t1dj4bvEkhQJZ7mZ/uxtXqU480bnjYjl5jvtEukjKI8yyuzt+7j/AIv96uw02TdYq/ygbVb723/vmuC0O6+0YmQMzfdVfu7a6zT77y2+eZXEaruVl3fNWhzfEdla3VzMq73Xy9m/b/EtXproLMXR1Kr/AOPVh2OpPDuS2udn2j/XtJF8rf7tW4byBgyJMoiVWbdJ8tTzFRo9CzI3kyfJD/Budd9Zt80Mjec6cf3W/hqaS6tm33MO4LGu7zG/iX+9WbcapbTX0aOi7GRnRl+bdXPKsddPD9yDUm8yPzoYfNGxlVWfbXO6hZpHD5PkwvtTc3zfMv8Au/7Na11cedC+P4XbYrVh3U0CxlE5RflRd/3axqVDqjRlEytQg86P7RNNxvXarL93/gNY2qfLcb5nyyqz7VrbvpH8tvJuViK/f+T7tc1rV1YW6vv5bf8AdV655VOY6Y05dSvdb1K+dCzI33o2T7rf3qx7yGG6UlHZ1Z9u2b+GrGsa88kLzJN8y/LukrmtS8Q+VG8N1Mrhm3Iv8K1yVJc3wnRCPSQXzOuxI3XdGu/cv/xVZGtahbW5l8mZkb+7tqjrXiia3tWe2O5938L/AC1yXiDxg8d0qQvHtVG+Xf8Aeaufm+ydtOUuU27rxAscgebdsX7n+0396qC+IEaTYu53ZvkZq4+68UQs3nJ8jr833/l3VUt/FE8lwD5zKy/3v4q7sPEwrS5j17Rtc+yqr/aY2VV+RfvV3/hDWJpoxEjqs396vEPCfiIbh5M3zb/n+SvR/CuoJtCTXO12Tc7f3a9KlLm+I4an909o0XVraHYj+XCzIrJJ96uz0HXHkaK6e5Ybfk2q/wB7/aryPQb52mhmtplO5Nv+9XcaLqSTR7/s0aFvl3M+2vUhJHnVn05T0bTfEG24W5G0Ddt2t81dLa6k8ca/PGiN92vPdI1CH7KmbmR2VlV/k+8tdPoMnzbJvnDI3zfe2/xVvGUebyOGpGJ1Ed5eNC0LzZ3ffZUp8cN7HEu94X+Xay/7VVbWPdCh+07UVVZ9rK3zVZh2TXHybUZvl/eU+WJEoy90ydajh85EmtlIX+JX+Vq5nxFJ+7W2Ta7bWZ5G/wA/drrdU8wzM7pt2pu2/wAVchrUcMcO3eyp93zPustc1T3TaG5xPiKZLhXdywDRMqx7P/Qa4jVFRVW2e5bY27Yuz7tdv4kaDzD9j2qyvtfdXF+JJLaNltvs+59+5W/iZf71ckpWkejCWhxviq1tri3ZLYs6LFtSTf8AM1cPr1pM1vHNZTZ8ncvzfw13mqLCsmxEaINuXav8X+7XKaxaosO9JmQyP91k+XbXNKJ0U9jzTxBaveQuiOzQs25o64/UtK27/k2D7u1q9R1SxhhD73XMnyoy/wANcl4g0ZGuvLm+fb92T+9Xn1PeO6n/ADHEzWqLGZl/vr/s/LSNC8f+jOkeV+ZW3VsTWLyTb0+6v3l27lZaij0uGWR5nh2/3/8A4muOU/sm/LKWxkR2czM1zN5iL/Bt/ipi2+6bcjs21v8Avmte60/gukMi/P8Ae/2aotaeXMN6Nlpdyt/erb4oaClDl5R1q3kzjZDvVX3eW1Wobh/O+xony7NyR0+1t0WPe8m9v9lf/HasWLwyHzkTcrMyt5iVjU/lOmnzRiT28dy2zyU3KyfearkK+XIIUT5lX52X+FqdZ2sxjML7lC/6pd9W5NNSFVd3Ywsv8X3laoj8Oh1KU+YrsyWKs87tnZ8+5d1XbVbaKaOCZIdq7fm/+Jp0MM6yF3dlZv4l/hpq27qv2abgt83mbfu/3aPi+EJR5SeG4hjk+ROVdtm5/u/7VeweEl8r4UqpPAsZ+T3GX5ryFrf7PNDG+1wyr8395q9d8Ks7fCf5mJIsrkHcMHhnFftfgnJvOcf/ANgtT/0qB9Bwwn9crX/59v8ANHBR6pC1v9pLtvV9z+X8zbahvdRRp5Eh8x1+6qyf+hNVHdPbt5P+r3Ju3L91lqvJqXmQt5Xmb/7v3a/F5Vj5uOH7C3135PzpDHub79YGvSO2+ZJvk27kVv71XbzVvJVnhfHzrurF1jUPtELom5t33FX7tRGtLmuZywsTKvrh2k+fhf42/u1zuqTec/z3Klf7v96tK+utirv+dtn9/wCVWrIvo5mVd+3d952WtI1uaXumX1flmZ2pR7rht6Y/hVmf71ZV5sZvuKo/gZkrQul8zbs2v/d+SqF8r/K8z8L/AA1rGXtCJUyncO6/Ojqyf7S1BJqCbd+9d27/AL5qO8ZNron8Pzbaz5pvLYQvHubq1ax7mMvdmadvqG2TZs2/7Va1jNC0yTbPmXdtWuUjuE/g+Y/x/P8AerR028dWaabhm/i30qkZ/ZNqMpc51kd4jRD73zL8/wAladldvIu/5v8AYrlrW8dlx9p5Vv4q37G+eRdgTKbP93bXBOPNI9ej8Bu2Nx+5SF9ys331X+KtNV3fvN+xv46wtPkSOH/XMzKn8Va6s7bZim9pPv8Az/w1xVI8tU76ceaER+o7/McI/wAzf8tKozQpJh5nUt/Gy1oyR+XGHROVX5/7u2mW9ik2fu/3vm+WrjGPLzGVaPNLlKNjpdzcSeXs2v8Ad3L/AA19HfsX/BPxz428UQab4VsNQlu9Qf7PYWtv96Zv4mZv4Vrz34DfD3SvEXii2tvEnFvJdKl15MW6SNd38P8Aeav1t/Y9+GmlfBfxF4a03wToOzxTq37+1s1iVm0+zZvlaRv4Wb722uylpHmkfOZpWlH3EfQf7Fv7HE3hpbLRfiEjJLawxyfZbG12bW/i3O1fXGpeAdH0icaroWg6VHKu1P342Ksa1bub6Hw9o1os2u6bazKkf2yW7dV3f3q+SP2wNO8YweNTrnhv4reIdYhvnaKLSLCz3QW7MvzL95d1TVqez+D3jx404QXvnu3jJ9burpNP8N+PNLheblo7V/P+X+L5f/Ha+Y/iV+0J4t1rXLzwB4WezsVt7hotSvLho5ZY4938Kru27q8Z1j/htXRdaSGb4PyWMMcSwJqGpaktqs0e75VWOP5q9c+FPwtT4aabL8Zf2h5vD/hXTdPeS6XT4Z1iW8Zf4mZv30zVyzS1lNGnxQjyHrXwQ1fwH8APCr/ET4l6xpelK0TLbzanKz31x/d8iD+L/gK1l/Ff9tr4kaxo93beHvhvNoOix2Uk/wDb3jLUo7F75f4fJi+9t/8AHq8W8eftKab4km1T45eAPAOjldPX7Ra+KvGTyeRGv3VWDzP/AB1Y1r5r8J/C/wCJ37fnx5uvFXxC+Md5r0G7zdUuriLyoreFV/1ca/dgWoVaOIjy/ZNYw5IEXxE/au/aN/aS8ZWHw6+CejR3UC3TJOuk7vIt933ppp/vN/31U3xA+CPhf4a6Dcw2dnpOseIbW183xDrl5KzQW67fmt4F3fNJu/iavqjRdS/Za+Gvhu1/Za+APifw7pVu2lyXvjXxFNdKktjCvzNuk/hXbu+9XwB+1x/wUy+BviLWdc+Fv7LXgxb/AMOaDLJbp4qvIt0epTN8rNHH96Tc3/LRqvD1MNT92GpnKnX+KZm+H7fw34s1h7nWPEP2izsUjW1tbN1T7VNI3lxwx/xN81fQ2h2fwf0HXNU8B+Lfijo+gQ+GdNZ/GV5ay+e1j8u77LD/AAtcMvy/7NfEX7JXhP4nax4/tvjB45SbTfDug+dq95faha+XH5yxt5Kq33du7+Fa89t/Elh4H+0eJ/H/AIwW+k1jV5tRvJrjd5V5IzM3/AqVatyRLp04y1Pr7xN+0xc3nhhrn4G/C6Pwx4Gs5WR/EmtQLJqWqNu+983yqu2qtr8cPj9ps1t42sPiLrmrWkL/APINt5Y/L27flVo1+6teReHf20dB+I11Z6N4nnsYNKhg2263Vv8AuF/7Z1778MdN8T3Ghr4q+A8PhXUWk2xXWnwxL/pTbd23b977tc8cVRnK8jX2M/hien/AT9szxD4+1XT/AAl8WvBmkvZ3Hy+TfWCu8m5tv3tvy19Q+KvhP8OvAfwz1HR/gb9l8I61rE6y+I9U8P8Altc2q/eW1X/nnu/5aba+SP2Vf2tvgtpPxkvfDf7VPwht/DF7oqSOt1b7mit1X7vyt975v/Qa7rwj4hf4f/EzxJ4q+EvxLutc8PeKriS9lutUVd7eZ95W3fd/2a562MdKTUZfeaRwrqbxPN/26Pgbf6potr8QhbWaap4fZU1TyUXzLxZF/dyM22sz4O/EDXtL0uz0TU55prZv3kULP8sfy/xVteMNU8SX2g6rba3qTP8AatyfvpdytGrfKv8AwGvM7q617SfD8iTSQxMq/wCsX+7XFWzLnnHQ7KeWyjTlc+mfhT8dLDTPESWdzZ7HmuN3nLKqqsf8W3+9/wACr3b9oz9iXwL+0F8C7q/1XwBDcOsTXFlq1rdbZV3L/s1+YeqePPFWh61bX9hfw3Lx2qxRRtF8u3duavtX9hX9uLVbJYfDfjy/aIr8zTMjLEqt/Dt/u16VHHOjGM90eVWwvf3T8yv23P2Fde+BMlxrdhqtrcvCypFHbuy7l/4FXyheXyfMiQ43P/49/tV/Qt+2Z+yzoP7SWgzeIfCv2W+tNY01kuI7ODzPssi/N53+zX8/nxu8G6l8OfilrngO8hk83Tb9leSbcrMtemksRHnid2V4ypH91Pc5TUZE3M8219vy/L/FWLqChV8x/My33v8AarTvbibkbNyfd3L/AA1lyrhSmGf+63+zXRT5uU6qkjn9QYKxm8liW+X5W2/LWLfKjN8/Cr81bl4vms6Qp83+9WPfRPJuf73y130pRPNqGPfTfP8Acb+6q1U8vbtd/lXf92rtwvyrlmX/AGarTJD8rp83+9W0eaRzczKskO5h5gbDVZtU8tvk+6v3KZt2bn2Z/wBn+7U0LOXZN9WLl6Fyz+7v35/h+avsj/gmGyPovjBkP/LzZcDp92avjm1jk2qn8K19kf8ABMdI00TxcIhwJ7Ef+OzV+l+Eji+P8I12qf8ApuZ9LwcpLiClf+9/6Szxv44s3/C+fF5EPK6/dbW/7aGqGns6qif886s/H+WcfHbxa0UmB/wkF0u3b/00NZGn3yXGCm4fJ/FXxHEH/I8xX/Xyf/pTPHx+uOq/4pfmzrtLkdoVd3+Va27W8+yyfu3/AN+uUsZH2BEfaF+9/tVs2N07ZhdFcf71eNLm2OWXwnV6XdQqvCfIy7d2+tKG8fzB5Kb1ZPlVv4a5iGT91shdV3Vdt2vFhCb9rf8Ajy1XwmXxHTR61CsZs3hZ/wC/8+1agm1SGNnbyWwq/dX5lrIa48vZ94nbtdart+5ZpoXkBVPu/wB6iMfcHzGhdX22AQ71+6v3k3VlaheOtw6QzRpudmVf4abdXTy/vn3bvvI396su4kmVnhmdWXZ/49RGMio7heTTrGqPc7T/ALPzVUa7dWaZPnLbqb5kM0n+sbCoy/8AAqptM+0JBN838W2lKJ00+5JJM8a+S7/e/u0yS4RZhGjt9z5t1R3E3mbXeZS/8H96qF9Ik3yb9yt/drnlHlPWwuI5eWJZuLh4V2O6urfMm6su8uklk2O6pu/i/houpkUh9i7l/h31VuJvMjZHRVC/3fu1j73xHvwxEZadi5Nbo3+rT5qrbXmZU2ba0riPy/n8n738K1Tkh2x+ciYdfmTdXHKXuHoUcKRLC+fkTcf7q1YXzpY/v4Zf4lpLdUKs7zbdv91alt4/l+Tayqm/a3y1zSly7Hs4fB80S3Z2z3A2I8ibvmrX0+ZNrQyblDPs3f3ao2qutuod1X5PurV2zhhkhXem3/Zaufmi37xljMHyrSJqaeP9IML7WRfm8tW+9W1DJbKqzJbbX+6/92sfTVmhjCTOvzVoWbIkj/JsXZ8m35qrlifNzjOmX7KZJJDbGHZu/wCea1q2N15WbaRlVN21Gk+8tYcM/wBnZ/ulf4drfNVyO3+Xfv2fP/e/irpo6SvI4pbnR299t/chN/ly/djqW6vNqy3ifJt/hj+ZvmrLsYX2k+czbf7v8VW7dnbdsRvlf72+vUo8nKcMoz+0RXM1tIrWzOz/AD/7rVRvgkn+pHzbtyRr8q1oXFucfaHTc0b7kVfm8yodQt3muEfYqrvX5VrsjU9y5lKMpFLznY5k2o3+9/FRDZodsiPs2/Kq1O1vtma58lnDfNupLO2SH9y77X+8rUpS7mPxTJJIXjZYZm2FkXZtp7W/lje6+Vu+XbJ/ep63DzBt6Nt/8eq59nSRU/fLcMyb9rP93/ergxFblOmjHmKMNrhgltJ5X+0q1oaesKqXv3+dn27v9qkjtXa8TyUZj/Gqv8talrazKx3zcyP+6XbXzmIqTlLlPoMHT5YXJLOxZVR32vKz79s1XLhZl3P5Kuy/c8v+GkjtrlVhh87zNv3/AJfu1dsrW8Wz+fn+8u3btrglKX2T1Kclze8UIYX8wu9srq3DU6OFIz51yW3fdVf4anhhMU3k23Lsm5FZ6uRab51u+xPKRn3Nu+83+7V0qcakia1b2cNSnZx3MkiTO6rF91l/iWrccKXLNCkOxlZvmVfvf71XrO1MkgeF1CRrtlWRPmarENvNubY8bLs/ufMtelTw84zseXWxEOTmMy4t7mG3CImGj+/u+ZdtWdPtfOVEfllbc+1NrVpLaz/Pv3bWVfKbZ8tW7bR3um8+2sPNaZP3sm/bXU8L/Mcf1rrEz1t7lmlS2dY0WXft3/Kq1Yj02/aPi2Zoo3+Rv9nburetdGe7tfscPG2Jdv8Ae/3a118NzW9uiJDgMv3mTcy10rCx3UTP61zfEcBdabNMr7E+WP5Xj+4y/wDxVZF9oqTXSzpCoZom/dt/DXp83hu2b906R/KnzeX96sy80FJC7JCqhXX5m+X71bxw5vTrcsOWR5y2mzMy/aYY0K/N5i/NUUmkvdeTIj4ffvZV/vV1114fvGjZJoMxfNvWP7zVD/YvlKHeFT8m1Y2rojT5ZaGsbSicRq2nu6+Xv4+6396sa6861j3wwr8r/vVau11bRbmS4T7Sm35Pmkj/AIa53Ure2aR4Zk3Dcv3l+ZmrTl5hfDexzl5LDueb7m3/AJZ/e21TC2yzM803yfxsr/d+WtXWrBLWMu8yjdKq/L95qw5LidmNtZpjd8yNs3Lt/ipc3MYVI8xorsaYIjsFZdrtG3yrU00z2tqyfaWX5v3W5d1ZNjdTmRfnUnd86/3quLI6xs/ktuZvvbvu/wCzWEo9zm92MS3JebVjmdFEv3X/ANr/AOJq/b6xNCws0ucSL/D/AHv+BVhPqW7Y7puXfuZVT7v+zVmG8haNnRG+V9u3+Fa86pTjz6mkanWJuyXm23DzTfPJF97722j+0ZvLO9F3/wDPNX/9mrMa68pfLhTMSov3vm2rSLeTzSM6IrtJ99v4ainGZMpcppfbXklGdvy7t7L/AOg1oafNuhW5eb5f9p6xLG68xCmFT/gf3WrR0+JJvK+7v2ttX/ar0KK/vHl1pcpu2quskkl48ez5dm3/AHau6XG/nM7w5C/J833Y/wDarGt5vLjCefnd9+P+Ld/erZtd8yGZ5mf51X5U27q9bDxPHxEoy3Oo03YzJHv+ZvlTd8tdXo9wlrIiQzf6xPvN8tcdpPlyXaQyP86/LFu/irT0++QYSdI9u/b83zV6cY+4eLWlKMrnouj37rbiaN9u35dzfxV2Wg380avc78JJt/h3f8Bry/Q9WT7Lvk8tRG6q/wDe/wC+a6rRdcghZ03yBWT55FolH3bGdryuegw608d1Eny7V++v/s1aS61CqjY8czfe8vd96uBsfEFndMLO5eRmWJmiZU+X/gVW7XUkaNN77GX5JdtctSX2Tto05HXf2h5cfkzSMvy/dWsy8uIbiEJsYtH96NX+as2TVkjjE1s7Mv3dzJ8tZ0+sQrIEbdvkTf5zfw/7VcVSod9OmbGpXUMLI77vlTdtVKwtW1iaTNtC+xN/3o1+Vay7jXZpF3vcs6qzeU3/AMVWbfeIPJhR0fcy/Knzbf8AvquWpUlE6o0eYtahrX2iF7VHxu3K3y/Nu/vVzOua1D5Zs/lYfd/2vlX71VtW1vddPC8yp/F81cfr2pI29IZo3Kv+9/e/NWHtDo9jIta14ihWPyd8m7duf/arivEHiqVXKbIx/wBNN/3v9ml8QagmnxqNjZ2fIrP8u6uO1rWAzfJtHyfNTlIcYjda8YXOZUR2H8Kt/E1cdr3ip2kaaa5+Zf7r7ttO8Qaoke3ekn8W9t/y1y958zPs+7s+b+LdWVOMzb4S3ceKImUpC7Nul3fvP/Zav6Lqj3F0yXKZMf8ADJXISfNdrP5Ku0L7U3fwrXQaHbzRKibFI3bt0iV1RlEiVPm949O8L3k0cKPC/wA33vu16R4U1KZduz5nbbvVq8r8KiZZIUd2zJ8z7Ur0jw3Hc28bTeWo2/KrM/8ArN1dlGXu2OGpH7R674dmSFrdneMBvlVY5Vb/AIFXW6XeJHvR037Zf4mrz3QVtljt9luuN/3V+Vt1dp4daOZY5k/1v8a13xlM46kTvdHuoZreF3Rv7rqvzNXXaTcJGqpvYM3/AH1XCaLNDNGsPmqpX5k3LXXaTcbriJ3hzt/ih/vV2xqezOCpGUjtNNvE8scrC2zajf8APTbVyONI/kn2rLI+7zP4masTSdSht08mZ94bcyf3l/3lq1Lqt40f7mbeWTc25fmrpj7sOZHH74/WJvs8yedNsKtu3fxf7VcTrmqItw8PnL5bSt8zfMzVta1qmGXzHzJH81cXrWqTJIzPZttVfvL/AAtWMvjNo+6c54hvIZP3MMPl+XKqtJt+9XJatcfbJEuRMrLs2qy/e21taxdPfTGZ/kXft3Mtc1qVxMzN8i+XIjN5jP8Adb+7XHU5JTO2n2MbVr52bY7yM8aqu1fu/wDAa5nWozHvvJkjLx/8tPvV019F82+a5j3yfLE33Wrm9ctxGyXNy67pEZUrmqHXF9DndSt0S1R3hUL/AHvvN/u1z2sRJ8kyQs8jN/C/ytXRag32qGTziqDavzR/3v7u2sbWP3jh04/iVvuqteVW907aPvSOZuLVJ2aH7N80f3mVflqC1hSOQ+T97/vqrs2+aR32Kqf3qbHC8bZdG2fdZv8AZrzZR9/mZ6NOXL7pk31qFtWm+bc38VV4LfyYUR4WIV/4q3LiNI1MKfN/EjVRukSZVd/LzGrLt/u1pGfKaS94qx/vF+dI12tU9nZ+XcbyisG/hb5lqPynj2TTQ7W+9t/vVoWdxvuE+SNUV9v+1upSlM1pxhyly3X5XhSFi396rstm7f6Sj8LtVlk2t/47VbT1eZWTzm++zfvE/hq5GyXqh3fbIyfwp96lGnLm90rm9wRo/Kk+eFnT5v8Aap8dok0iTBGYr/Du+apPs0wRHe1ZQ3/LRn+ZlqXdNMweF1C7m+VV+Zlol7r90IxnL4ih5L+Z/pNtIyt825vvLXrvhg7/AIVAlA2dPn+XGB/HxXmVxZv9oZ38wL/H/srXp/hoxv8AC0eW+VNhPhiev3+a/a/BF3zrMH/1C1P/AEqB9BwtGSxNW/8AI/zR5JeXDrC0Lp86/cVm3Mq1k6k3nW7F33q33m/vVueTsmfzoWH95f4mrH1SHdI8UPyD+7/er8MrS5fhPFp+9E5vVDcyRtDE+WV/urL8rVl3X2xd801sqL951jfdWnqln5Nw8rxsDGu35azLizDM1y82Pl2uq/LUxl3kWZepSBlaHZGv8O3/AGapyRurbN+4MtackKXUap8qL93d/s1Wmsfs8LoX37fubkrSnWjEzqYfmlzIxNSbdCkKeXjZ8se3/a+9WdeL5jNCm3O/5619SjdV3w/7K7WrJuFmhjlTO12fdu/2a9CnKMYHn1qfNMxLxbaOTf53z/x/JWPqEzw5SFPm/jZq2L6ZIyZvl/4F95qxNTkhaY/J/vV2UzjqR98heaCFv3jyFmq/aXF1JLvd9p+7t2VlyfNIzoy/7tW9PjdptiO2KuUeYuJ0mnzHycI+5lT5Grb0ubzI96PJu+981YWhxmRm2bd38O6un0bT5mj2TPgM/wB5a5alPlPVo/Cauj28O3f97b83+Vrct7e8upopkZvKZP3X7rbVPS7SH/Voiq38Lfdrf0mHyz88zRj7qL97bXHL+8ejTjL3RbWBJGELuxVX+fctbHhvw7DqmqfY4bbzXZf3q7/4f71JbWMy5Tdnbt+XZ/FXT+EdFe41iJEh3Sfwbf4d1Y+zjLQ3qRlGHMfS37Enwj0Oz14eM9bh85LNldI9m7cyr8tfpL+yP4BufDN5e/GbxJNavqWpP/oG394/l7flZl/hWvlj/gnX8HbnWNF0rw09neL9uumkv5JIv9XH/F8392vv/wATa54Y+DfhWbUrDR45/wCz7fytIs2TatxI3yx7qcvd91nwGMrSrYiR0Pw70ebWlvr/AOKmq6fqEzStLbxzWv8Ax6x/eXd/8VXIftS/tQfDX4e6Omm6V42stUmk2/Z45tO85V/veWy/+hV5n4r+L3xRm8L33hKw8KxWmpas0b65q00v3o2X/UxrXkPxK+HusapHN4qubKTVbyx05lSOR1RI1/8AQVWvOlKvKMuTQdKjScrSPG/2kP8Agol8VLPUH1+58VedbWNwy6No9qu6RWb/AJaMzfNXz54u/ay8Q+Jr+2+Inxgvbq5VZd0VjfXTMsn+ztZvu/7tQ/GzWNW/4SLUbDwveQski+VdXEMW5d38Sxs1eBeM9D1LxV4kgbUryS4trOJVt45v4m/3a5rU49fePXoYWUtIns+vftYfG/8Aao8Zab4Amv7qz8P28X2eK3ht12WNr/0zX7qs395q9T+PH7V/iT4Z+BbX4Cfs06xNpkaxQrex28Stc3ky/wCsmnn/ALv+zXgvge11D4feH4vCvh7y01LUtz3V591o4/4VovNDTTbP+xNH3TXV1K0t/fbtzM275VVqa5doy9fM0+q81W3Kc98RPEHxO+IHh2X4XaU9xHp91debr1xZu32nWJm/56t95o1+7tr0H4U/AfwN+zX4a0bx5+0Civb3l15uk+G43XzLpY/m+b+7H/tV3/h3xh4V/Zt+C8/ja80HRbTV1WP7Leax8z3E38McC/xf7VfFXxe+J3xR+P8A4uk8f/EjxldaxeTK0Vv/AMsoo4/+ecca/Kq1sqsFpCI44OrWn/dieu/Hb9u74kfFTXNZv9S1jTRbSW7W+jeF9Li2abp8O75dyr/rG21826hq2q+JtS+3+M9Vjd1+XzFT5Y1/uqv8K1u6L8O9Vb/Q0tlQSfNt2bdtaVj8EdVa68l0Z/n+6q0pVoSleTO2GVy25R/gzQ5ry1TUvD3i23dFXakPlfxV6p8J5viXBrEF54P1JtL1K1iZPOsZWXzG/hZv7tP+CP7Ob3l5aXOpWEkcLSruXft+X/dr9FfgD+yn4G/4R2K5vbONE2b/AN4iqzf7Tf7NeNi8RQcowZ6uDyOpKLkeAfCv4O+KviN+5+IUP9pX/wB77ZNLvdmb7y/7tfYHwp+BNnb+GYba5T7yfJHs+VlX5fu/3a6/4f8Awj0Twz4ia80rSo1ibb8qrXunh2x0m3+z6VNo8Jjj+XcsW1trf7VcVSpGU79D06eUQoxPmfxZ8Bb/AFazazttE+Vf4lT5f++a8g8ffCubwixe80yaVWl2I0cFfpBP4Y0ewgFzCAu5e1ea/GL4F+HviJor2cNv5LK7O3lv8zf8CrGpGMpB/Z/NGXKflj468B39ncfbNNtmeLeqt5zfNt3V9KfsE2/wv1jxNFZ+LfFrWbzbV+ztFv2rVX9oT4G3Pgu6EMMMjKu5/lTctaf7DdroM3xOsdH1m2hilupVW3kaL5pv9n/ZruwVb3vZ3PkMywfs+Zo+if2lLPxV8DdJj+IXwcmmitI4JEvNNhfZHeRt95vmr8qf+CvGg+FfjBryfF3RPD1vomqrp0fn2tvFt+1L/e3f3q/bv9qf9m7V/G/wb8iC8UfYV8+KaNs7o9v3Wr8av+ClHgebSfhfqXid9sU1jdLFLHMnzMv+zX0sYzjKMo6RPm8NU5a/LL4j8zVjdt6O7M+7a+6qklvMYTCXx5f92ta4ZJG/hDt/DUE0aLJwinb99q9mnKHIezUj7hzlxZ+Xu2Jjb/Ft+9WJfWrs+xONtdlcQ+c2zYuW/vf3ax9S03y8v5PLfwrW1OWvMjllT7HH38e19rj+CqfkeYT8nH96uhvNJSSQfuV+Ws64i2syZ+St4ynI5pR5TJj2LtTvTo4/LX/aX+JaszQ/x7NrbvvU37Om75+q10c5lGJLbq/y73bc1fY//BMbaNF8YKvQXNkB/wB8zV8bQt82w8/JX2L/AMEv33aN4xX+7c2P/oM9fpPhD/yX2E9Kn/puZ9Pwhb/WClb+9/6SzxD493Aj+P3i9C/3vEV1/wCjGrF09naQINwMjfeWtH9oVGPx+8YTRnD/APCSXSgev7w1kafN5TK7v91K+Lz6PNnmK/6+T/8ASmeNj/8Afqv+KX5s6bT1haQp823+Kt+1kSONETajb/kZfvVy2nq82x/OZT97atbFjdPGzP1Lf+PV5HwnD9g6Ozut032VIWwv+z81aEM0LQvMiYP3dsnytWTZt5cio+7fH99av28kPyud25n3f71HxESNC1j8tV3ncWTdu/8AiqWRZvLdJXw33kqKO587emViZXX93t/honk/dGFH3Ns+dmetYx+0Z/EUbyJ1489lVk2/L/DWZdKnlSwujP8AJ8jbttad5saQu+7d/wAtdtUrqH5vkTKf3v4qv7IRlyyMeSRPMZJht/hqtLD8xCblX+LdWlcQ7mCJHtZaoTske95tq/N/31WMoyN6ZWX5mWHzNoX7lMmCRt5Lv833ljalVngLJ5e4b/u1FJJuY5T+L5maolTOyjU9n7xnXkj/ADb/AJfk3fcqqZLZmKPuP8NT3gPlnejZZ93zNVC4k6h//wBqo5fsnpxxR1t0s32hdibW3/PVObZHJG8yMx3fw1p31vMq733FlfaG21VuC7N8iM25Pk2vXzvtI/Cfq2Hw3LHUrR72kd3Rfm+5tqeGONYx5xwP9+o2VI8og2v/AAL/AHafbzP52/5nf7vy1lKUz0qdGMTSs98kgR4cf7X+zV+OFJFHmdPuturNhvI42Ub95b5fuVfhk8uTe+512fw1yS5+fmMsVR5ocpfhaSWEbE2hU/v1fjaGGPztjbf49397+7WP9pcf6S7/APfNTWtwm37TsyzP8ys23/gVddKMn8R8Rjqfs5SubMciK3yR/N/B8n3q1LW4e4jjhdJP3nzq2z5a56yukb/l5y2/atb1nM8McTzJ91tybq6eY8LlNWG83xqjv833XWN60LVYW274G8qNNzqz/wDfNY1jJ5jMlsiru+/JHVqOaRm86Z2Qqv8Ayz+bd/stXVRqSlHlM5Rj8Rfkmf5P3LbG+Z/nqOa+gZShhVWbdvbNU5tSmZfJmdRt+Xb/APE1C3zNs87/ALZtXTzGUo83vE8cjxsI9mE2fLTFZFkG/n+Fv7tV1mjm3XO1vufd+792kW+2qsyIy/xbZPu/NSqVOUiNPlNSxjtrj53fYW+by2qzGvnbhtVdr/Nu/irLhuoVkRPP3qqfM2/+L+Kr6yQzRo8j4Mfzf7VeXiKkpyR10aMYxNWys5p5k2DYnzfdT+KtfT7cxr99mEf9771Zul6gjZmmmaTc3y7n2t92r0OpQ7ljR1H99fvMv/Aq8upze15T1adSnGlozUt4/Lbzkl3M33/3vy028muYV8mD5k3Lv/iZqitby5bcmxhJNEy/Kn8NL5jySIHTZIq7Umaop0+Wr7x1+0pypE1pI67blEjzu+9/Cq1q2do91JjfvZn/AIv4VqlYr9skRFRokX5vLX5d1dDZw2ki/c/e/Lv8t9rV6eHw8fiieXiMVy+6vhGWel+djZD5e2X+H/lpVqHSfLkZJpmUL83l7Pu1fs9P+0Rl5hwvzbf7tX7WxxMk1tbZjkf72/7terRw549TERMr+ybmeEO4+99yt3TdJaG3jm+xyeVv2o0bbq0tN0dIVeF33orfe3bttb3h3wy9vDjZuDS/e37a7Y4ePVHDUrS5vdM/S/Du3DpMzpJLtX/Z/wB6ty10FZIVtnfy93+qb+81dFpvhm2jt4dj7/4nWtjTdF87L7IQ/wDyy2/N5dbSokrESi+U4G+8LpD5UyWuDu/1i1i6v4ZdrgwmHzV/iZkr1abQdtw6OnmFvv7m+WsjXvDfl3Ucy/IzL93+9RGjynYsV2PJbzQX8xvs1tC6Rqu5W3bo2rn77SXjtTNNbMu1/u/e3fNXst54deO3/c221vm83/pov+1XMXnhuFoTNs2ltzPHtp+yidtPEc0eU8t1rTYZpNidF3bmX+Ff4a4zxFpvkys78q3y/wC7XrWqeHbPyXfY0P8AdVkrhPFWmpGux32eS+2Ld/Eq1nKMTf2zPN9at90Z2bfvbtzLXM3E3+lCHZs2t8y766zxRDsk3wzyB/4P7tcTqy/uzIXXK/M7VzezFKpAat5FZtvSH54327l+bdTl1SZXaOZ/m/g3P95axmvodzJ53H/fPy1A2rJc3Hyfw/fas+XmOaVTlOjhv4fMVE+7/E1TQ6hNtZPlTd9/5q5i21xI92/c7b/4asrqUca/675GT52+9uWseWXNzEe05Ycp0P25I41tndgdm7ctPbUPld3nZlX52/hrnv7aST7j4f70Xy1F/bDyL9/e7fM+2qjTjLY5vbe4dZHqUNvb/aXdpEkdflVK0YdQeb5E8wbV+Zo/urXE2usIoEKPiL7y7v71XrPVHW5SF33pJ8rLv2100aPL8JwVq3N8J6DpmpfKHeRiV/2vu10VjqUO6H5/kb+GvP8ARryHy97vHvV/vb61rHWE875Jm3N8y7q9alGMYnmVJHcprE0cT73V12bYtv3t26tFdWhtYX2XMcrr9zan8W6uGi14WapYJu+X5t396rMeseWqJG+4M235n3NurrPMlGXNzHp2n+IHvMP5ytt++zfL/wCO1rWuvW3ls8c2GZtzq275WrzLS/ESSAOm7zV+X7v3a2LHXEkZN/Lxvv8AmespS+0a0T0yHXJmh8yHcNz7drfNu+WtK18Rw7j5k2122713/e/2ttedaTrjlgkMzKrf3m+7V2bXEhkd9m7bFt3N/FXFUqcvxHp06J3c3iTzGCJNJEy/61Wb5Vasq68RXNrI0M253/3vl21yS+JN0aJGkm2P/wBBqG41aZo2Tzo9iru2s/zMteXWrRiz06OHlI6S58UPt2fKfM+4v+zWLqHirdbyokKj5/lZvmauduNWma33u6na7M3zfw/3ao3F8F+dLlkVvvRtXDLEc0viPQjh5RldGjeapNcR+dC+0SPt3fe+7XPatqyW9vJM9s2z7ryR/eb/AHf9moWvJo7hntpsIu75Y/utWFrl5NIqQzOyDd8m1/8Ax2s+b+U3jR+1Ipa9rEzffm3/ADbd1cpqWrPLHLsRl/8AHt1a2rSPcfOiYCvXPX3nW+XeXfu+Zo1raMuaVmYex5feMrULj7VGEd2dv7tZsOm3EjPsdW/2v7v+zWncbJpMbNu35vvfep9rbu0Ox0Yru+8qfeq/acsAjT98y4dLdpD2f/Zra0HSpmkZPmc/dqW1052ydijd/eWtvT9Ptlii2Q/7O5qKdaHOaVMOb/hez3NsRGRliVdv96vRNBt3t7XZvjCQ/MrRp91q4zwzp6Mo8yRt7ffk3/xV3Ogwo21E+4vy7lf5t1ejRkeZWp8sDtvDawtGkP29V3fPuZK7DR1NncQon3WT+FK4zQXht49gfcuz7y/xba6izuiY1+eRFb5ty/w/8Br0KcpcpwVKZ22kXyBURPLDN/D/ABV0mj6lA8Y2PIjfw/Pt3V5/pGrQTW4m2fJ83zNF826tvS9WSNUtYUZvL+4y/wAK12wjzHnVNj0W1vHtWzH+6E3y7l/hantq3kLLcw3MKvH8v+181crb6wkioUuZGDfws392pV1xPM+e5XbJ821q7Y/AcHL73umjq14j25R9v/Af4q5TWLqG2Evzq/mfN5dXLq9+0K009zkLuZmZ/l//AGa5jXNWRpPO2xoPK/iXc26spbGsYyMrUJ3ib59qjd8sa7vu1g6jJ/f8v5mZ5Vq/qF9ukWFJmf5/vb/u/wC9WPcfZlXfNI0Uyvt8xW+9XFU/vHZGU+XQyNQkkmZZrx2wsX3o2/1bVzusalDJHveZvlTZLuRl+atfWr547zZczK+35fMjb5V/u7q5DxJrDx24hL+aVRv3LP8Ad3Vyeh00utyvPKjRyeQ+Nv3Gb7u6sDVr6FpCJpmRm+VFX5lam3msbm2b5HX7zsq7ttZ91qEM87Rl4xt/irzcRKNP4juo8nwiRzTSMyJB5qKvz7vl+Wp5QmwP8qfxbV+7WbHcI8jb3UK38P8AtVpWrJNMEd9+2LbtV/lX/arzanve8ehGMRNqSfuYfmfZVL940Zd4413P8u1dzbaszSQsk32aRnSN/wCH5d1QNHDGxzcL/Ez7fvVnT68x0R94bH5LQna+9F/vL96kjtnWQedDIis27dJ/DVnTbVGZs/L/AL1TxokNx8j4WT7+7/lo1axlLm0NuXmhqTWipDYv5KM6K2591TWcabtjvsVl3J5a/wDjtR28e6HZ1Zf4d/y1citYWVIX+RVT5tv3qPacoRpyY638ldPjtkdS33vLkb5ttTx/vLxIUs8Nt2+Yrfw0yzW2tvnmRVZf+eifeVqsx+Yqw/Zk2+Z8vnVEZfFYrl+0QTNDHux5m/7v+y1el+HUB+FuxYgAbCfCYx/frzq4t/MYeSGD/e+V/lb/AGq9I8PxhPhlsxtH2CbjrjO6v2vwOd86zD/sFqf+lQPe4a5vrdb/AAP80eWR2vlxpZzP/H/F8u2s3WIdrfOi5/8AZa1mb5lm/eNKv3tq7lZf9qsy+t5Y5d/zMi/L93atfhk583MmeRRjGPunL6lp7x7k+VfMbbub5ttZs1ukzHY+1f8Ano33d1dHq1nHH1T/AHKz5rWHzGjtoWXd/Cv96uf2sY6nZHDykYclrtVke2Xa33938VUbousghd9jN9xa2ZmtlkXEPmfwsv8AEv8AvVj6lLbRx74XYFfmT+KroztU+EmVHliZOpLDIz/ufnXn5f4a57UXdZHQn5V+bbW9qU3y70K7m+bcv/s1c9q9wNr84Zk+Vm+7Xq4fY4K1M5+88xpN7purJuGZdybPvfxN/DWlqCOqPMj71X+Hf/FWZdM8e55h95K9Sn72h49Sn7xXX95cDL42/L/wGtXSYx5gOxvl+XbWbD500ibEUsv39tdFo9pj6Sf7FdPL7gUYzlI3NJsfM5+6yt86766rQ9NS4w/k5VWxtasXR7GGVfn/AIm2o1dloenooZ0C72XbXJU7M9mjTlze8WtLsEkZ9k0Y+fau7+9XUabo728afJ95Nybl3baqeHbcQqj3McbBfl3NXV2FmkjPJDcq4Xb96uOpGXxHq06dKUYpEOk6X5ylILZstF95f71emfAXwimreLre21W5YLuXfcfd2rXKabZfZ7h3jmZG+4vyfLur3b9kP4U63488VWc3h7TY79VlXzY2bb827/x6lHlcRY6MY4WTP1u/ZK+FOm/DP4Z6LDbbrq81KwVrJZF+6rfM25q1vGEj+OvHiTWFst5pfhtdlrb26/Leag38Un+zHXGfAP41eJ9e8SS/DeGzaKbSdNaJmX/l3Xbtbb/tNX0V8K/CPhfS/DMNhYRR+YsrS3En3mZvvM1cMv3kryPzapzJyPOND/Zr1KGFdV8SXLXVxskuNXmb/VLIzfLHHu/hWvnX9rjwb4vvvM+HulJbvaK6tcafp6N5cO5vl86Rf9Yzf3fu19SftCfFC+1vTIPA/gIXn2mSfY5tU+VV+7ub+81eaftlfEzwv+zP8K7fTdHS1PiprJVih3+Z9jkZW3TMv8Un93+7UuVKNKXY68LRk6sbbn5i/Gj4av4T1ObR9YS3udXbd5sMO1fssf8AtKvyq3+zXg+gfC3VY9Yge53QrJP/AKU0nzMq/wCzX0BY69rGrLI+sbUnvLhpJ2kXczbv9qsK6h+2X0eiWz7ZvN3TyMn3f92vmamKjf4T7/C5VOlh+aRw3iLwe+patPqttZRwQw7Yk/vNHt+Zqy4/Elh4Z1LzrnSrd0t/me3k+Xc235a9Pvo7DQfDGuGaFmnjt22M38Tf3Vrx3VtB8YeMNHm1u20qOB5k/wBW0u5qKdT23vIwo4flZ4r8ZvE/xI+Nnj648YeJ5ldY38rTbVflgs41/hjX+Hd/E1ZOk+DfEcbxubbaFf8AiT5a9T0P4J+P7qF5byzjTyZdreZL91q6qx/Z0+JbWsVzYaUt5u3b1t7jdt2/w12VqyjGNmdeFwspu7OS8A+A9Y3R6k9hJcS7/kWNl+b/AHq7nxNZ2ek2qasmiTQyKu6Xcn3f+BVZ8N+EfiLoOqeTdeErqFI0/wBX5W7/AL5rtvFmueHpPC+zXka3favm29wu3buryK9aPMe9Rw0eXmRjfDvx/olnshunVE3q21v/AGWvvb9lXXE8SWdto9nIyJIqq8lxtb5f4a+Crz4d+Etb0m21XR7xVbfuT7P91v8Adr65/Yt16aOxSO1n3vHt/wBcm1v92uLESheMkejh4ycHBo+0ZdP8K+FbX+1tYufnX5XZV3eZVvwT4i0r4heJjo+m20zJDtVty7f92l15/wC3PBdnc6rNa/uUVpdrbWZqT4O3GiaP4gj1j+1rWPajOkbS/wANdcKlKP8AhPPrRnGlJxjqe4W/wlbULBJgm0bflWue8Y/DK+0O3juAjKD8r7a73wF4+s9ejCJqNuyBtqqtanjHyrq0XO1l/ir2ZYfAV8LzwPi6ea5nhsdyTPi79pb4dvq2gyv9m+aGJmST/wCKr5n+Bf8AYnh/4sWb6x+7aO82xTL/AAtur9AvGXhWw8WCWxvIcbd33fvV4P4T/ZB0q18YX0NzDcT2011vguNu3y23fKteVhYxjV0OnPeWpSjM+x5YLZvhqNE169WeK5sNsdwv3WXbX4x/8FY/hzo+tf8ACTaDePNHpWl6XNcJJGzfvLr70Kt/s1+vXg+K6+Hvga58H+LTJdRQt5dq6/Mvl7a/O7/gsN8LZ7z4J+J/EPhW5ke3jtfPfy23P975vlr6aMvejA+AnKPt7n4GrcGTyxc7fOZdsrf7VM2J5flvt37/AOGtW+0/7LuhdMP8zfd+bduqs2mtIV2bhEzfPuT5q9eMoQ91nurmqQMy48mdtnl7f93+Gsy6skWOR0RifvJ89b0lqnmL5e1f9plqrdWqFmR3VQ3/AC02U4ygOVPl3ObmtfOV1dFx/svWVqGmw2+R975/4v7tdXNpvkwtDs+Zl+8q1mXGmzLG2+Fm+fbW1OpKWpyVI/ZOXuLeCNy/k/xfdqpcRw7nfZW9dafubY7/AHfl21m3MCRs2/cq11xlzHNyozWj+benT+OvsL/gl1uOi+MmOMG5sduP92evkSZE3b0Rifu7a+uv+CXAxo3jMbcf6VY/+gz1+n+EOvH+F9Kn/puZ9Fwj/wAj+l/29/6SzwP9oyZ/+GgvGK7sgeIrvj/tqaxNNuPL2JN0krZ/aOwfj94yOSCviW6/9GGub0+R2XYzt/srXxmff8jzFf8AXyf/AKUzyMf/AL9V/wAUvzZ1Oj3SNuRHw38DfxVu29w6yLDs27vv1yelzG3l3p81b1nN5jK78bvvN/drxvfOTl5pnSWl4nl7HT5l+V/n+8taVjcfJsSFflf+KsG3mh2+WduWb5P9qtXS5nkYxv5itu+9VRlH4jGUZmxbt+73zR/IvyrJ/FUMlxM0jOkfH3tzJ95qYskm9Rsbfs/75pZLh/nhO5F3fw/N81aR96Bj8I6WaaSMom7aq/eVP/QqryxI20P/AMCqeN5mZk+Vxt3eX/E1OjXd8kPyVUZcxPL9ozJraG3XeifKz/w/xVnXUMO95Htox/drcvLdFtVfYo3LuTbWXdL8q7AzL96plI1jyGJcQozF0P8A31WddSeYp4kEMbrvb+9W7dW7qGfO0N83+zWPdW/mK/f/AHanm5jojLlMy8m3/P5mTsrIuroN877Xdf7v92tPUI0jU+TuG35X+Tbt/wBmsS+XbJshfaf71Ryx5jXmker3lr9nk3pueJfl3VmzW6TM2zaF/grcuLdFHnHcR8qbW/hqo1rDJH/CN391a+PlL7R/QcY83wmTHYuzLM77gv36lW1SSRnhhZSvzfLVo2b+cSnzfd+an+RNC0SeSzOzsrt/DWHtOY648kYFdWeFUR9uG/8AHqkt5nikOxNoX+9822nL5xjy6Kf8/epm5GzD/H/epx973ZHBipR5fdJ4bi5aP9zNHs2Ns/vNVuHZJbNbI7P/ABf7W2qEbfZ2xN827/x2prW+e0be78L/AMtP7td0fhsj4fMOXn9407WSGFfJRGb+H7ta+nq8kHzpn+78+2se1mfcr71/3lq5b6hN5j7JsCRNqMq/LWrj7uh4EvdmbemyJ8qbFRt3zMv8VTLNNJHI/nLuX+L/AJ6L/s1nW/7uz++2/ZuRtv3quBPLg8ya5/g+ZWStIS5dSOX7I+GRI4XSGNgmz7rPVeS4+ZPOYsd/yU6Tfuf7M6xFv4WqnL50kjuky7fK2vC1dPP9oxkWbjUElk8l32t/s/dWoFvILyQJsYfNtX978tVmbbI6Kioqr8zN/FTI7xN2+Ty97L8jfdWueUuccTY+1Jb/ALlJF/dv8isv3mq/p9x84+eNh/sp8y1z6373EiI6Kr7fmaN6sW00KyK6bi2zduauOfNy+Z0x930Or02b95++mX5f71acepTKzBEXfs3Purn9LbanD7y339yfdrUhZP8Alt137t1cXNzTvI6YxlGBuWt8hhUIJF3fL5i/w/7tWVmSaNfJRti/Ju8373+1WXayJH8+xTtXbEy7mbbV2Jk2Lvs2H8KfPt3V1UafNL3iKlSMYmxpLNMqTecrPHtX5vvfLXXaDbpMrv5Ledv+Xb/dauP01vmVEhVNz/Kyp8tdt4fXzpPIRGR2Vd/l17mFp+6eFiqkoysdDo+mvIzb3b5flZl+7XQ6TpO1o5tmxGXd/stVHQbXdbxecmGh/wCeb10liqM4f92rr8u1Xr1KcYx+E4alTlLuk6D8rPDtQSfNu2LXS6X4f+yyKfJWUtF8q7/u03w3Y+XGyPDH8q/LG33mrqNL02e4bzo4VRVXay10RjGJyc5FpOkww42Qq+77m193zfxVvQ+H08xPJtlZ9m6Jn+XbV7S4YYlR0h3SL/dX5m/vVqRqyjyfJkD+V/q2Sq5UHtOU5280VPtD749n975K5/XtP8m8EM3y+d823+9XeXGzy1eZGd9jb/7tcxrlrbQyhNjFNu7c33aj7ZpzcxyOqWaQ7vOudqeVv/2v92uW1eOGa4a88r5FT91I33v+BV2Wp2brIiJzDI/zyKtcv4gjdZzs3bpPlZm+78tLlluddOfLM4XXo0mh+SHarJ/F8u6vM/G0dtaxvNawqV835tteo641tcb3vHbEbN833a8o8YL/AKU6u6tu/h+6tTLk5Top1JnmnjCOb7P+7mjHz7vl+9trhtek3bofP2oqbv8Aers/FVwlx5mx/nVG3bfmrzHxRdP5hQPzXJyzqG0sRFR94yNU1b7+z7y/L8tZNxq0c0b3Jm2Nv27t1VtavX3702/7y1h3GpPu2O/3f71Vy/ynDWxHtInW22uBdohfa6/Nu/hqZta3N+7fb/7NXFQas67k3/K38TVet9Q8xvlm2bfm+ap9nAx9tI6ttY2YdH3Fko+2Jw6df49rVzK6kYlb+PbUi3ySS/I7f3qIx5ZGUqkpe6dPb3m2RpoX/eL8rbv4a0bfUtzJdO67l/4FXI2eqQtHu3/8C/2qu2epTSOux9n+y3y7q2jH3zM73T9akVkm+8v93bWo2tedHvFyzL937u3a1cTY6g4VUmf52+atG31abDJu2p/erqicnxHZ2/iF5sbPLYqvzM33VqVfESBm2ffX76/3f9quTh1CaPbDH/wGRfu1ajZPl+zTbG/iX+8ta+hh7M7bS9YmkiWGafcjLt2r8rf71bVnrT7kh3yP/F/eX/ZrhrGbzFV3vG37Nrs33VrbsY7xIUe2m83c6szMv8NcdSpynVRpx6Hc2erQx2/2lOX/AIlX5ttWl1yaSETfM/z7f97dXLWN1cw/8eyfeTczL97/AL5rQtdShjhDo+2H+Nm/vV5WIqcsbnsYWjzStymxeeIEjbyYd22Ph2/iqncaw9rHvebCyP8Adb+9WU155032lHXZuZdv96oWZJLN7ab5BG3ybf71ePWxHNE97D4X3jT/ALSe8z9pj2+X8sSr91v96oG1SaTb5235f73/ACz/APiqprdO0QdEXYr/AL1f9mmTXfmMsMPzLs+RtlcVOod0aMfsj5Lp7iGWZ3b5k2/LWFeN5jeT80W7+983zVozXFyI2tt6hG+4zfxVkXjfudm9lZvl3f7Nb06ntNEYVKMY7mdqW+H9z23t8rP91ayLqO5uPkSFmRf4f4mrQuriFoU3fvfn2/NVW4vljk8mGHyjt2tu/vV2c0oxujk5feM37DuZkRMNt2/MlW7PiH/x35aYs26Yp8su5fuq3zLUsLPb3C/Pt2/cqJT5dB06ZetVh3ryq/71aGn/ALuVd/3VrJ3PJIyTJ93/AMdrY0G8Ty/JfzpRHu3tIn3qIRlH3jWUfaaHUaDcJ5ao6MzK27cv3a6zQbyQTNInzO33F2/LXD6bcJGyI74Vfm/2q6Wx1J9ypvkEvy7GjZfu162Hqe8eVWp+7ynfaPJbfZ2dHZ9z7Uh83asf+1W7pupTRstz529lT7rfxVw2lal5kZmd2V12/u2Td/wGtnT7ry1KJMu35v8AZr0adQ8ypR5TsLXVPMs/9c0e19rrIv8AF/s1r2uqXkEYvLOZndfllVotqrXFQ6nthE/ys0fysrPu2/71Os9amaYyC55+VWZq9GnLuedWp8x6FD4lhgaJIXYFlZvlT71Nj8T/AGj9yjr+8f8Ah/iWuGbxI9q7wWz/AC/xs3/oK1HJ4shUqnnMiL9zbXbHklsefKMIzO3m1yFrf7TC7AfMPJ/2qwdY1wrCu/gs3+sV/mrm5vFO3c9n8vz7XaSX5VrMk8UwtDKlzMvnK21l/haoqR5dCffl7xs3esJIX2bnC7d395v9qsbWdYe32b3XzP8Alrtf5dtYN14qgt3e2S5jV2/h/irBvvEk1xGzb1VY1+dt1cdSPNLQ6acvdNnWvEENnHNM8ykxv/C33q4fxDrk0k0ib/l37kkb5t26q+teKkkDvNt3t8zMtcnrHiItN5KBdn3k+b5lrgqfynZHlkbtxrVtZw7/ALTvZv8Almv/ALLWVeat5jApt+Z6xJtS3SI/nZX+61Vprh3kZ4Xj/eN97dXBUj7T4jqjI6i3u8Ks29Szf3flq3a6lJH8+/Yfvbv7y1ydvqEyxKnk7vn+T5qnk1Sa2yjzbdq7vm+9urilT5Ye6ehTqR925uyaqk0b/eZmfa275flqS31NGXzkto/m/hX/ANmrn21iZtuyZU8z5k3U+HWHaSN0Ta6/e3PtVq55VJcvwndTlyy1OvtdQmkX54VTc/zMv3q0oLy2jkV0Tdt/4FXK2OuQ+YryXK7Gf/V7Pmq6usYjeTYqqv3W/vVEeaR1HQxyJ5kSIjH5fut/dq/tcXSbPubPu7/mVv71YdnqXmbd7rhU+dmq/Y6pDcXH2YoyMy/I237vy0hx97c1Ifs3lt5jxjci7Fk+arUMHkqIUdX2oyuuz7v+7WRbt5eyH5Svyt8y7q1GZII9jo21fmb5/l/2adT3fhJjT5viGyabCuy5+YfPu2s9ej6AGT4akO4bFlP8wHXl686mmS6YpDuRFT978/3a9G8PRmP4bCNn2EWU3zD+H738q/a/A6bedZhf/oFqf+lQPo+HKThiattuR/mjzMxww2ryeTMrN/DUN1H50ex3b5fl3L8ys1XZLia8t4kTcwVdvmN8u3/aqhdM/l+RDuVF+ZGV/wDvqv59qVpVDmo4eFMxL63eRv30zBI/l2r81ZV3G8dw/G7y/wDVbf8A2auhmjSGR7v767dvy/xVg6pbTNDvcKCzLuX/AGv4amNSPNytnpU8PzR+E57ULiVpG+RYgrsrN/s1iXk3kuFd1y38X8NdBqNu+2RCnz7sfN/DXO6pGjcPtTb8u7+9XfRlEKmEjuYmsXDyKbeHais/zsz/AHqxNQ3sp3uuV+Xd/DWtqkfyKUm4VtqN/drFvmi2jf8AM/3Xb+GvUo+8eTisHLm90x71tqt5Nt8i/K7R1m3ioU/2v4a1Zmdd+zbsZfu1lzInmb3/AIW+7XqU4njVcHyyE02HazeTtb5/u11Gg27tlHeRtqfJ8n3VrG0+F4nDui/c+bbXU+H7fdGzvMxGz+5XVGXvExwvvm7o1v5Koj/OW+589dvoduk0LPs27fvqtc34ft5gqOm0bf4tldvodq7bOF3f7X/oVRWl7vMethaMZGrpdhbeXGnaSLdtZPmWun0nS5oWbeiqJk/1n/stZWiqjXCQ2z/Oy7Xb73y11Oh2VzGzpM7OVddjL/drhlzSPSw+Hjct2tn5cn2beu3dt8tX+Xd/vV9A/sg+IpPBPjCwv4YftU32jbFH91Y/9qvErK1s7gpNbQ5Lf3v/AEKvU/2fY9SXxdbPYbXXzVXc0TbVbctZVPdIzTD82Dkj9UPhb4f8K+A/BOr+P9Kud2qas8lxdXDRbVVW/hX+81dp4W8Za94f8Kvq/wBsbdJaqkULL821l+Zq8+0rxtrGj+C49N8SQ27XF49vE7bf3axt97av92qnib4gWzalcaPol/G6R3HleTC3zR/LXm1KnN7p+Zxp/vfeK/iD46P4D1aPUtEg8/WFuvN+2TS/u7eNV/55/wATV8RftLftA+IfiVql/wCNpr+adpNSkuPMkb/Wfw/NXrfxz1y8s7/WrmbaIbO1ZUXzf9Y235trV8m/EC8/taOysIYmjT/WxKv3dteZi4Q155H0WT041KsTJh1vxPfTPcpcyFW+Z1Z/u7v4Vrfsbi5Wa33wrujRv3itVLw/Y+Zbx22z5P8AZT5lrtPA/gua41KG2s7NZY2fc3mfe/4DXzlTEUuX3T9Fp058nvGF4ivtV1ZTYWGlSNbzNueSH5mrn1+Cfxa8SWKXL6lHo9lv+S6vFZfOX+Kvqe88E/C74b+BZviR8QtTjsbGxTfLH95rhv4Y468d8YfGLxb8WNNi8W63pVn4c8G287JYfal23N5H/u1vl+IpRjKLPPxWFlTfMtDw/wAVfDHTdDhTSrP4/XE100StKrRMqszN/wCg/wC1UvgnwX480WZI/DHxRsZEkb5FuL1o2Zv91mrK8dfED4Pw3kzw6VCgVtssi3DeZIv/ALLWDfeOPhXqlu6aR5ltNIny/vd22niPZyj7pphZOnLnke9Wev8AxR8M3STeJPDbXMcLqzSWvzbl/ibdW/q2peG/iF4Ru5nsLO4h3Ku26i2yx/8AfVeM/CX49X+k3iaVN4umu2hiVU+1bV/75r0Tw78YvB+rR3OiaxZQzxzS7kuF+Vl/vV5EuenP3T6KjUo1qRt+H/hn4Y/sWGawS4tfLf8AdLDtkRmr0j4f6TqXw71C21Kw1u4htm2rtWL5mZmrmNB0XwfcWkL+FdSurdJG/wBSs+75v4vl/u133ibxI+j+HdNsrnxDH5TXmyJVt/m+7/eqJS5pe8VGPs5H0R4T8RaDqnh1LbU7m6kuIVVYlkl/9Crs/h7oqX2oRzL9lCTPuX5l3Kv+1Xgnw50PStWtFv57+a4juIt3+tZdzV7f+zvofh24m87zt6Lu3NcS/M1VCM5T5UKtyRpSPpTwCulWumo14kburfu/n21va9rN3b2EjWc21JP+en8NcXpzeDZzHZwTW6vH8rLHPUXiH+1LGxl/sLU/MZdxSO4fcrf7Ne1KoqMOU+Lng4V8Xzv8SbS9VFzqkqxPld+167f4aWNtqGp3KXNvG6KnyqrfxV5HpWpXKyGa8mjhmX5pVr0v4ReJHW7CfKyzN8zLXHg8VGGIi5bcw8/wMlg3ym3400NLXSp7ZBny923d/Etfmz/wVA+I2n/C34d6lZ+IZpmsdeiazg8n5v8AWfLu/wCA/er9O/ia00OiPc20G9/KZdq/3dtfih/wWy+MP9vXkHwom01ZraGy3/ao22yLN5n3f++a+wVH967S0PzKNP2mIUT8tPE3h1NF1ibR7O586GJ9qXUn3pFrNm092KwyP89dVfaLtuij3O/y/m/vMv8As1VTS4VhV9nyr8u5V/hrr9ty+6fVUaMYwOWmtfL/ANGSFtzVRl0lNjJ8qjf8q118mj7ZGMyMyN8u7Z92qs2kl5CnkqqR/KlEanu2iP2PMchcQzQ/uUdSG+4rLVC8sX5eNNy/3WrrbrS3+1fP8rbN3l7Pu1l6lp8Z+4ny/eaumnU+E5JYeMeaTOJ1axRN2+FW/wBqsK4s8A7U+b/arttYsbYLsfbvZG3LXMahawqzp8wVf4v71dlORw1Iw6mDffd2Oiqf4WWvrP8A4Jehho3jLOcfabHbn/dnr5Ru49y79n3m27v4q+sP+CYMaxaT4zjXtdWX/oM9fqng+78f4T0qf+m5nt8Ke7n9Jf4v/SWfPn7SEu39oDxlC/3f+Eju2/8AIhrk7dkWVPn+9XW/tGr/AMX/APGZ+U/8VHdcf9tGrj7WRFk8z5T/AHK+OzyX/C5iv+vk/wD0pnjY6P8AttX/ABS/NnQafJuby3/hX7396tfTpN2Pk3L/ALX3a5eGTlXP3d+7733q2tNm8sId/wAyvury/fOXlOmt7j5n/fMh+9WrY3TsvnI6qW/hb+9XLw3Dqzl3+992tnT7h1Yfdx91V2VEveM5ROnhk85Vm2fw7W21L8kkPyJn/gf3ayLe8RlZ8MqL99quxzIy7IX2bl3bquMebYwlEsq0fmb5vl3fKn8NTKyeSsKbt0f3t3zbqq28k3k75Pnf7u5fmp8Mjsw/fcbNv+9/tVfKjD+6Pnt8Rp++VdqNs3fd/wB2sq7jcR7Plx975m+9WjeXkyv/AK5cL95WrMvl3Tb97Z/8dVaZZn3TJIwRE+RU/wC+qzrqNI5vItk+9/47WrqEh8z53+X/AGU+asm6fd9//Wt/tfw1MTaJja0tyu1PlZN/97c1c9qW9fuD5mf/AJaV0GoO8at/C/3vmT5WrA1SHG15H/4DUSiax2Pari186RoU+9/tf3qg2/u40d/+BKlaq2e64OxGI2feanSeTEvzp87fKrbP4q+Fl7x/QGDqc0DFa3RXXenzSfN5ar96mfZUmUeSkn+xWpbxzI3+kpG3/jzbabfWPlvvSHaqpuT+Go5oxNvaMxWt03vC+5N3/j1QSQvtVf4FbanzVpXlnub+L/gNVGXb9+Flb7zV0KVzkxlSHIVZLjbuR5MMzbWoWYxt5P2fcNv3mf5ahulhhbek25arxuVkd0dXX727+7/s120o8stT4zMKnMbtlcedMm+ZlVU+6v3a0obx5Lcwo+z/AGv4q5zTpH2h+7VrWbPtP2bbu/jZmreEYx1Z8/KU+c6TSbiZpGT5U/hRm/8AQquXH+sfzkZDsVU/76rCTVtqi2mSN9vyrtq9HqFzND52xVH3fv1EZT5tRSlzaF+ZYcv8izM33tq/d21R+yQzLK6blX7qbX+9RNqDxw/u3yGfbtVvu02PyZ1Z0fay/dp+/wDEEteUGWPc0m9i6/eWRf4qpNHbNvuX+dd/y/PuqzfM6yLNJNv3fxLUK2/ys/kqC38LfLSjKQL3vhK0d9tVoXRVZX3fMtaFnfIwZ3hVf4fMX+KqsduJ2Uum7cv3d33amk8yFRGj8fe2rRKMZe6VHnOj024eRf3KeYzJ/e21prcfZ1PkupZv4f4VrmNPuts0saQswVdz7v4a2bPY0aB5m2x/fb+Kud0ff1Or2nuWOhs7uGE+ciMq/d+WtNLtJFSzhRnK/Mm2sOxWa4+REUo0W5Pl+Zfmrd0+WzjiCJtfb8y7f4a6adLlncxnJVIWia+myTKrTTJzvVV2r92uv8M3lsu/fufcm3c1clpW+OL/AFO91Tckjfdaun8NyfaNs0yNEq/wqv3q9zC0z57Ec3OegaDM9zCNjxptbdKq/dauq0G1tm3XO9pIm+bbsVVWuF0uZLVY4fIWNGXcit8zbv4a67RrqZpbe58za33mXf8Aeb/dr0adP7RwSkejeH4/O8p0f7ybUX+9XZabawsEtk+QfK21V+9XE+H7jy1T54ztbc6xt8q13nh9vtEf77yzKyqrtH8zL/8AE1rGPLEn/CdBo9n5Nr5Lopdmb7v3dtW5l248ubLKn8TfNVeGa8tYV+fP+z95d1WZCjKX8jY2z96zJ96o5oyKjEqagqR7kd1y23Zt+61cz4gjS0j2XLspaXcvy/dauj1DZHH5z+Yd3zRR7fu1jaozRw/ImWaLd83zbaxlLlNIxOV1hUuml/crCm75FZf8/NXH68sMgEqc/e2fNXXa1CGuDC7+YN3mbo/vfd+WuN8QSQwq5d+d7SfMm3d/wKsuc3jznn3jC68uGaZ9vlbN21fvLXk/jpvMjaG2RVRdzRN/Ev8AwKvVPFU3mJvtodu5PnVfutXl3jC3hjjbZbMsvzebub5WqObqzX4fhPKvFKeTI6JuzIm5F2/erzDxRCity+CvzOuz7tekeKP3kjoiMpX5X3NXm3iCP/Xec+7bu2L/ABUub+Uzqe8cF4imdY5dm37/AN5a5ua4/fbH53J81dF4hWZf+WON392uUvF+zsfm3v8A3a2jyyOWUeUtW93tYo6ZVa0bebdH8j7f/Zq52GTdJ5KDFaNvePGqoNu3b/FWkokRkasF4jbod7K3+zUkd47Mdj/ef7y/3aoQ3Wfvj71WM7vuO3y7ay/xBzSNKxutuzyvm+Xa6slalnIkapM6K53/AHawre5RWWXeq7vlT+9WnYs8f39vzP8AIy1UdiZRN7TbjyFMexfu/wATVpWO+SPjcu59y7v4ax9PWFtvnD73yu1a9nN9onXzplfd8v8Ad+7R7QI0zTt5PM+ffkL/AA/3qvWtr50/nPwsf8Tf+y1Rs4xt2JuRa07ePZIg6IrfLTlW0kyo0zW09Zljih/5Zt8u7+KtuwaaFETep2/3X/8AQqx7P95H8j5WP5XVavxzMg8lHX5trLuavNqVvtHdRw/MblvqEy3SzB13r827ftq39qRd3nP5r/wL/drFVnXZNGy/3fl+ZqveZNMpeEL8rqqM38S15OKqHvYOjJF9Lj5Yk2bVb5tzJ/DTbiTbIkKbmf5m2qvy1VhZ23o42Ps+Vd9NW6muFD79zsu568mpyyqns048seUluZpo23w+WnmIvzbvlaqlxff6O80M+/dx9yo9QYMv8OGX7yp/FWfcaj5sapZwNt/ij3fd/wBqnTlt5DlHlkTtqj/wJhVX7zJVeSRHs9iOzbU+XdUdzsWZEh2/7W56q3kkyqzzI2FX5mX5q6Y8vN7phKPNEgmk2ugmfL/88/4apX0cM0ju7thf++d1TeT9oVXhmb5fmRv4qrXVi8dwqbGK/eZa6pc3wnHKn7pRk7TO7JtfY9WIZJlVkMzMq/Lu/vVNeWLzLv2bAv8ACyUn2eaPYibfv/3flo5eaMSffiTWMKK3yPv+fc6ru+b/AHq0bVbZvkR2iVf4W/vf71Z/lzW7s8PG75dy/dq3ayIbcJ50gl+ZtzfdpKXKXGJtaTqDr883+sVPl/4FW9p1x5OxJpm3Sf3U+bbXKW801ufkm+ZV3Ju/irSt9QS4eCZEkRP41V/vV3U5cxzSox+I7bS9XfSZUSZFZZHZfm/hrZXW9yqyXK5k/wBn+GvOYfESMwSdGYq/7rav3a0ofGEzf6HNMpVfuts+bdXo0ZHnYinGUTvl162WfZbTSEyf7P3qjbWHjjd5h8iy7dq/3a4638SJNImybDLu3Nt+VaZN4mRV/fTfLub5d33mr0acjyalM7bUNYeaPZZ367PvbVrNm8Qf6QzunyR7d9csviHTWmVH3Kjfek3f+y1BJ4ieJtqFZWm3N8v93+HdXRGpy7SOaWH5jevPEkPmTfvt25/733VrNv8AxNczKUebZ/FE2z/0KuevPEXzMjxxmWP+6/3qwtQ8SIv7nfIYv7q0qlaPQy+r8pval4qfZvR1DL8zrs+asjVPEUfktbQzKzb/AJ1j/hrmNQ8SbY9jupVW2uzVlXWseWrQo+3/AHXrmlUlzGscPy7Gtq3iCWNfJedt395U/wDHaw7jUNzMiTKq7/kXfuqpNqTyYTf8y/wtVSaZ4/nwu1nrlrSlI19lylxb658sskisW+b/AHqkjmfzN/nR/L8ybvu1kLeOkbTed8v+zUU2qOzDY/LcNtrm+L3S/hOguNSmZVm+X+79+j+0pto+dUaT+FqwZL4zL8/y7futUkdxN5i/6tv9pvvNXNKX2TpjLlNuPUnkkV5E+RV+epmuvNY7IcbV/ibdurGjvEkUQvDz/vbamt7r5UT7399t9YyjynoU6kZG3DqFyjJ8qsi/N5n/ALLWvp+qPt2bMf3JK5iNfm3puI+8vzVbsL7azb5m+b7sa/w1zSjJHZGR2djqD7t6TfKq/MqtXQaPO6wi6d/vRbdzfxVxWj6nuZUd8MrfJ8m6ug03UvO3Q/ZlO197/wANYylM2p0+Y6K1u/Lj+R2xG3ybvmatGO8fzF+T5Gfb83/oVY9ncJfND88cUknysv8Atf71X4ZPMZIXRW+f5v8AerHm5TaEZy+Iv+ZNuTftfy/vN/Fur0zw65f4Y+YV2E2Mxwe3368sfEjRI/yv/ufLXqnhlU/4Vqixn5TZTY/8er9s8C23nuY3/wCgSp/6VTPqMij+/m/7r/NHnt5H+5itn+5Ii7v7tZ8kMyzSp5KhI22r/d21qSWb3CnenCvuTb/6DULbPOh3w/Kr7Zdz1/PkqnN9o1o4X+6Y95buWNtC+xmTzfl+6y1i6hIixu6bR91nWN/vNXRX1u7M+xNyK21v9paytYVIdsH2Zl2oyp5f3v8Adp0/dPVp0/7pyGuRXMzffkVY2+Zvu/N/7NXO6sfLb7NMjJt/irq9Us90n2l3ztXYism5WrmdS86Ni820p/drvo8vxGv1X3DmL3ZIzIgVl3bflrJuo+S/3Ntb2rQpG2P4W+Xy1rKns3DSfuWAVflr2cPLljc4K2D5YnO3lmlwz7Jtzbty/wC7VWOw/wBKeR/+AVrzWe5m/c/w/dp32fy4N8m3/YbZXfGpKMeU8epg483NIi0+zSZvkTCr9/d/FXS6OszRjeioW+VmWsaxhdFX523b/wC58tdFpMflrsRN27/b/hraNTkMJYXlOi8OxoNls+4r/Ht/irudFt/tHluiMzxoq7WX7v8Au1xuhyOrIyQqhrufDLPJInd2/vfKtVKXNE2o0Yxl7vU6fQbONVLwwySvHFuVY9q11ulRzeVbzIm3+J1b+GsHQVSSNIYfLZV3bpN38Vdbodm81uj+Su5fmfa+7dWTX2melTpxL+k28zWvz229G+b5f4f92vVf2f2ez8XWSeR5paWPyvMT737z7tcFZ2MK2fnbN7q/7j59qq1dz8KbeaHxZDNYW0yXEm1U2v8Aeb+9Xn4upyYWpM6qeF+uTjQl9o/S74m+G4bT4ZS/EzwZ4psLzWtAtYWk06RvMjXb/s/xf7tfMHwX+Jut/FzxlqtnbPJJqlxPJdXEdvFt3MzfdVa+P/DP7SXxy8E/FbxVHZ63cSaRb6pI1/DNuZY/m27a+z/2Q/i54Bk1Cw+IWgww/wBtLexyrG0G1ZGr4fKM3nJSdXY8XijhChlyl7GXNJamT+1t8IfiX4VvNLstb8PXji4+ZpGT93GzL91q+cLjwref29NDqUKxxWfyRf7392v2s+K8ejeLvA3/AAmfjzSLGWBtOxArL8vnMv8AD/eavzg/aO+FvhvQ7BP7N8yaaS6knlVYvu/8Crtz/FUI0oqG8j5zhTC162KenwnhOmrbWypGkPlSyS7du37tegeFfFPhLQYY45LmMTyP/oqsn3o1/wBZI3+yteQeKtUvrWZLO2hbz1+5I275V/vV5r8VvjRqWh6bqei6Dcs1xfQfYpbpW+aOH+Lb/vV8pTjKpLlW5+jYiUMPDlR6b8fP2sPDHxO8SXOt68kieCvB8XkaXpqvsbVLrd/rmX+7uWvh74/ftWePPi14mmnub+4trCz3Jp1rHL8sa/w/LU/xI8XQ6ho6eG7CHyYN/wA/z7mZv7zV4zqWoPcTTwWyb3j/AIv71fQ5bl8YyakfJ5tjJThaMiDXvjJ4q85oZnbZ/vbv+BVF4d+N17Z3W+a5bLfK67qyNQ025jXzr22x5nzbWasi80mGVftKbVb/AGa+jp4XDOlySjY+TlWxUZX5j3bwZ8XptQmFyb/ey/eVW/8AZq9K8K/Eie/uGmS8k27l/d7q+QdNnv8ATXVra4kT/davR/APxKvLEJvmb/b3fxV5WKy5xu4HuZbm84+7UPqez/ac1v4b6lazJc3gtYdzvCr7vmavXfit+1emqaf4Pe2RY45r1ZZZPNbbuZfu7f71fFs3iiPxJqEWnxzcfe2q9bPxG8bPp+l6BpX2m4LWNw0+3zf4tv8AEteQ8JHSx9HDNpSpvnP1y/ZX+Muj61p6/wBvTbYo/mf978y/L/DXvPwv+IfgzSW/4SrVbOGSzXcqyNPtRWr8PvDf7bHifwHo722j6kyySffZvm3f3qitf+Ch3xyWzv8ARNK8W3Ahuk3RRrb7ttYxwmL2ggxObYaKP308Pfth/s1W/iR/Dd5qFvDcyT/6M0gXbGv+1JXpWkfETwP4igku/BviKzlRfmfbdb1r+Z7wz8Wvj34y1ppn8Q6ldy3Uv+rhT/vpa++v2Nf2nvGHw7htPCXjGwvoYm8tW+1W7Kzf8CrGthsdh4c9SzMMux2ExNW0vdP1Vm1r+0rUXP8Aq3ZvmXb/ABV2nwN8RSw+IvJuQqlW+RVrwvwH8RrbxhoMWq28yusy7lkjr0P4U6tdQ+JYXhdg6tudl+avBWInGpFy/mPezTDwqZdNf3T6A+OHiZvDHg6XXLi5W3tTbMtxMx+Vf7tfzQft1fFi8+M37Sni3xiniS4vIW1JrW1jaX5I1jba3lrX7ff8Fef2lY/hJ+yVcbtSEV3q0v2Ow2ffZtvzMq/7Nfz+XSw6lqCXN/c75vNZmmX5fM3N83/Aq/WsJKNajGbPx7BYb9/KZjw6bDI3/LRmb5v+BVLcaXc+Tsmh3L91mj+7Wva6OnmSp5P7tn3I0laVrpcEf7l7Zm3Ju87ZTqVPZyPoadH2hx7abMtr/o1ssifwf7NULjTX8vztnz7/APV/7VegtYpGzfuV2fwL/drJ1LTobdvMd1VvvP8A7tZKvzR1H9VjGRxmoaVM0e+ZGLr/ABL/ABVl3WmzRsyOWV/7ytXZXCosrJ9mZwz7f7v/AAKsDW7Xb5u75GX7rbN1dVOpI46lOH8xwWpaSFaV5k3N82xv4q5fVNJTaHfhv4a9F1PT4JIX2bS38bVyuuWe0SD+H+7Xo0ec8rER5ZXOA1KzmikL9fn+8tfVH/BMeEQ6T4xwxObmyPP+7NXzZrFvt+d4cbvlWvpj/gmlF5Wm+MRnObiy/wDQZq/V/B//AJL/AAnpU/8ATUz2eFVbPaX/AG9/6Sz5w/aTYx/tC+MJFlVf+Kju+G7/ALw1xm1Fb5E4b/brsv2kZU/4aD8YqzFc+JrsZP8A10NcarJu3u643ba+Lz/3c7xX/Xyf/pTPKxijLF1X/el+bLkM5HyId+3+KtW1vnX5ERWb+81YNrNJyiOrD/ZarsN0I8fw7q8s5ZRhE6WzvH2r8isW/wDHa1NPuJI5C8x3fwp89c1b3m1F8l/4N3zPWnp906/O83y/edWoM6lO51lneH78gX5l2+W3/oVXYbjzId6PJ833/n/9Brmre+jV1m87f/DV23vk3BE+X/e/hq+b+U4qkToo7qFVDmFkOzanzUkl87R8Iylk/wBW1ZC6hu/10i/u/wC7SRapDlPJf7qbZfn3bq0+Iw9maUkkednmbdqbd2/5agkvXVkf+Gqbao8jFEMaj/nntqvJqiOrOm1v4du+p5v5TSnEtXl1DIu/5vubU/hrImussu9Pm/2v4qbc3ifxt95qz5b7zJndnz8nzUe/8RrH3iLVrqHa2+bdJ97/AIFWBfSPJIzuGfd9yr99cPMu8uvy/wALfxVlyS7mPzMP/ZaylI1jyH0ZHHIpVJhnd/dp0dv9sm+SFYgqMzeZ95mqfy/LfyX++su7dv8AvL/DVn7Okcab+m5vlr4ipGXU/X8LjJR90zY4X2/O/wA6tt2r/F/u1DqFqkkbHfuZn2s0n3q2ZrF45IoXhVVjbcirVWbT0kkKJH838C1zxO/23LtI52+t5priUfdVU/76qjdWs3mKjpjcu75XreurOaR22W33v4l/u1i3kcbSedt+VflSuyMTixFaW5j3lq8ay7NvzJtfctZ8kbrI0MyrtrauoxGjb4eP7rVSnt3WTf5Kq+z7391a9Wjzch8lmFT2kiPTZNrL87bW/wBitjTlmmKR/d/vNsqnZ26Ltd+Qu1vm/hrWs49q/fbLfxVtKXszzI+9ISOF5mX7NM29X27q0o98J3vBIh/jWT/2WmR2/k/cSPCvt/2matLT7GGNWffu3fM7M3zVzyqFxjEreT5jCTyWLfN+72bamWzuVXZAjRJ975au2tq6xssbyKV+dV2bvmqS1hh+0M8/mAyOqqv+1S5uxfLzGdeRQtMjvHubft2/xbqhu4fJX54Wbb/Fs+Zq9I+F3wgX4kpfz3GutZm0mQKy2+9n3bv9oY6frXVz/so2lzEUm8dTlyc+Z9iGf/Q6+9yXwu434gy6nmGAwynRnflfPTV7ScXpKSa1T3R6mGyfMcTRVSnC8X5r/M8KNvBGqzP8vy7v9r/danRqkkqujx71+V1Va9wn/ZLs51Cv45lGO408f/F0xP2RLCMEJ45lGev/ABLx/wDF16i8FPEe93g1/wCDaX/yZ0RyHNV/y7/Ff5njCzOrfuduN3z/AN6tS1aZpPkf/tm3y16on7IumqF3eNpSydHFgAf/AEOr1p+zDbWgIXxrK5PQyWQOP/H60Xgt4jLbBr/wZS/+TD+wM0lvD8V/medaau6MJDuD7/nXZ/DXTaUz7kjS23Mz7flTbXT237OFrby+YfF8rgfdVrQcf+PVq23wXtrZdq+I5znhmEeCR6ferop+DXiItZYRf+DKX/yZl/q/m62p/jH/ADOftVS3l86Z1RP4Fb+L+GtnS7iazmaG5T915S7vL/hb+Fa1bf4YWkUgabUzKigBUaDgY/GrEfgFIp3mXVXw5yR5XP55r0KfhDx/HfCL/wAGUv8A5M4a3DGdTndUv/Jo/wCZd0CRJJNt5M2+F9jR7f8AZ+XbXYaDdPHshSaFW2fxfe/4DXJafoLWEm437SJnPllcDP51p2MjWYJYl3LZ3ZxiupeFHHyVvqi/8GU//kzgnwjnz2pf+TR/zPUPDd150my53YkfZ9/5l3V3mi6slqyQv/yzTam35WZl/vV4bpvje604Nts1YtjcQ+3OPwrdsvjbd2YBHh6JyDk75yc/+O0v+IT8ff8AQKv/AAZT/wDkyY8HZ9H/AJdf+TR/zPfdJuHVU4Xay7nZm3KtX9Nvt1usyIxPlbpV3fKrbq8EtP2jLyzgWGHwnCNvpdsAfw21ZtP2nb604TwdAQeubs5P47azl4Tcfv8A5hF/4Mp//JmkeEs9X/Lr/wAmj/me03rLJthhm3MvzPu+bdWLqG+OFvs20fN92OvS/wBmX9kb9r79qHwRa/ETRfB2ieHdA1KJpNO1XxBrTo14oZl3RxRRPIFypwzhQwwVJBBrlv2vP2af2n/2StD/AOEr+Ifw+0u/8PSXSWw8Q6DrLTQpK4Yqro8aSx52kbmTbkgbskCvk6fDOZ184eVwlSeIvy8ntqV+bbl+Ozknpyp3vpY4aeTYyWL9hePPtbnjv2338tzzzVGRZJd+4fJt+X5dtcR4qmRrP/Q0U7fl2t8275azLr40XVzE6f2GFZjkP9rJI/8AHab4Rl8YfFbxZpvw88FeEX1LWNZvY7TTbKKQbppXOFXnAA9WJAABJIAJr3q/hLx9SpupPCpRSu26tJJJbttz0SPZfC+cQi26aSX96P8AmcT4pkH2VoXfYNvyLs+7XlnjiaHy5cvs/wBrZX6WaP8A8EIP2u/Feix6r4h8deCdDubiP95pdze3Ezwj+6zQwshP+6zD3NfK/wC2P/wTO+PH7KtzZ6V8Y7eCGw1OSVNM1vSZFuLW7ZApZQch0YBgdsiqTyQCASPlsp4dzHOsx+pYGVKpV1tFVqV3bV8t5+9om/dvprscWHy6ri63sqbjKXZSjr6a6/I+I/FXnTzNsdgy/wDLRk+Zq4PXrOaSSWf7T95/vbfmX/Zr7z/Zo/4JCfHX9tnVb1PhJqCfYNNkSLU9b1YLDbWjSBiozuLyNhSSsasRkEgAjPefH7/g2U/at+FPhG68cad8RNI8U2dhayXOoReHlb7RBFGpZmEU/lmXAH3Y9zHoFNXjOFc2yzNVluKlShWdvddakmm9k/ftFu6sm03dW3Ir5ViKNb2FRxU+znH8ddPmfkv4gW1+yPsLNtf+GuM1C3jdnf5v9mvqn42fsd2vgT4d6l45h8evdvpypJ9mk00IJN0ioRuEhx97PQ9K+aNWt1Enku+F/urTzrhzOeFcXHCZlT5JyjzJc0ZaNtXvFtbp+Z5uaZbi8uqqniI2bV909NujZzY3qzJ90U+3abc53s3yf36ffRoku9EZf7tQLNMrK7wq7L/D/erzeb+Y8o0Ybr5WfZ91Ktwyfu/O3s/+7WXDO7ZjRFz975f4a0LV42j+RG3M/wB3+7WUo8poadlImVff/B95a19NLtIm9MfJWTYxwrHsR8j+9/drZsVfcqTOqq1R7TlLjT5zWsfOkKee6o6/drbsGhl+/wAHZ8jKtYtrD5hX7y/PuVvvfLWxp9p+7V49yqv+t3JXPUrGsaPLI17FX++EX/gVbumrcyR702qGTc/+zWdpSPtT59iM3yfL96tK1heRTCm4+Z/FXLUxUfhZ20cLItx2aRqs4mxF/wChNWjZr9l3JNCroy7n/vU6xsfMWONwrL/D81WWRIZsQosu1/7lebiMVGMbHsUcDy2Yy2j+ysnkptLPtXy/mqzHM/l73ePar7kkV6HjdsfI2/8Ah3fxf7tSx2ci/uXRtn8S/ery6laVSMYnr0aPL8IKzw2/32fc25G2/wAP/wATUUN1JJ8nkrs/jaP7tT3UDyRlIdq/Nt2qnzLTlsUhhZLaZd8n3fk+9XO46ndGnymRJMkkv2aa22K25Ytv96qrXjtIh6J8ys2yrsmnvCpd7mb/AK5t/DUE1nebfsxf733ZN+3dXauQ55U+WXu7lNVdZV2Pn5Nvy/8AoVSyRpqDFHRg+/8Ah/8AHqsrp+6Rsortt/e7aks7F5I1S2Rl27d235mat40+YzlGdPczZLGHc8RhZG2KqMq7W/4C1Tw6a8cjb0Vvl3bm+9/u1pNY/wCnEXMLB9n72P8Au/3dtTLpqSSb97Lu+626uj2JyuMfekc7eWu23cSblZk3fL/CtRrpaXDQv+73t/wGtqazS4uFTZt2vt3L/wCzUl1Z7pEdH2BW+Zl+61bRp8sfdOepzSkZsenvFIkMztkfM0kablX/AGaLrTdrb5ZWLr8qL/C1bCw7m/czbh/BItM1TS3WT7Y+35ovkbf/AKv/AIDWEo25Wa049DH8mEBXmf59m5NtWWvHtFSZ3+7tVtqf+g1DfF0ZYU24Vdrs393/AGaz7q4eZUmRGWJV27VraMvshVpx3iasmoIyvGkzIVbd81RLrkLbpk+/I/8AF975ayby6hmX/RnZjGnz1XuL54Y/kRlRv4q9ChL3PePJrUftG/JrUn+u8759vzRqtV217C7HuZP93fXOS6g6sPJf5mf+Kof7QkVmd34X5n+Wu+nPm3OSWF9pqdVHrB8xn+b5du9Wb5adea1tt+rfM33Y22/N/wDE1zUd9cou9JlG6ludSn8nY7/N/Ay1ftIxOmjl85QLuoah5ibE3Yb+Jf71Y+o6lNDGNn3mfa7b9tR3GoPIqfPg1l3DPJuM24/73/oVZe25h/2a6fvcoy81CZWXD4Zn+b5KoTXzzMrojMn3UarjR/aJNjuu7ZVZrErCv3vl3VhUrGjy2XxcpT+0TQ7/AN58zfc3J92hrhGbf/s/8s3qdbPbvd0Ulv7zVHJYPHDvRP8AZ+X7v/Aq5ZVv5jnqYKZXWT5flDbW/hX/ANmpqqjSf3fl2/71Sx27xyI7/KrJteSnN8q/Inzf7lL2nN8Jwyw/LLUhjV45G+66s/3aPMfzG3809ofMK/3dvystRv8AMqRvbbGb+Kl8XMY8vNIkW+eN9m3lv4qtwX0MaCEPt/8AZqztr2snnJMzbqfBNbNMjuF+X5V3fw1lUj7mptTlyzNi1ukZtiI1XtLeKOZnR9qN83ypWRb3HlyK7vub/wBCq5bzO27ft2r/AHa5PflHU9KjU5Tp9NndZt8cyqsjfOtdDZ3Dqv2lNqr93bu21xlnJ5cKzJN8rff3VvWd07Yh+VkjTd83zbqiXw3O6nI7LS7x/PR3+VfvbY62rPyVmCJ0+98yfdrlfDd+P40Zz935Vrp9Pjdo3S68zezLs+X7y152IlLmPQwsYyiX/sM0LDemwTfPFtf71ereHyj/AA7UxggGylxnt96vMrODbcLNM+Sq7drfdWvTvDqqfh+qKMgWkoGT7sK/bPAWTln+ZX/6BKn/AKVTPqMogoTkl2OHWOGORkR5NrbVdW/i+WiS1hGXh2t935f9qrMdvDIqfaXZFVvk2/N81L9l+0R7JkZh83zKjfw1/P0Y+/7p6uDomFfaW8TfOkgDbvljbau6snWo9sZebc+35tv3Wausu4d0zohYWy/KjMv3mrndQhSSN03sWk/vPXXE9Cnh4cxxOrKPnf5lhVNz7V3bf9mud1K12xb32s+3btX7u2uy8Q6PcrHs8tWVvm2q3zVz+paXuj85EZCyb9v92uqnL3YnoQw8eSRx11GkeYd6yOvzN/s1nTWsPlrsds79zru3V0mpW8LRv88YMi7vu/M1Zq26LHv+5tTb9yvSo82nY5a2HhIwJ7PdcbymPOf+GoZLVzJsTdhv4WX5a15LdI3Hk8/xf7VRzfOq7Imc/d2/3a9SjKK+yfP4qjCJSt4Z41+Xbtb+Fq1rH942x4VXb/Ev3apyRpHKZEm3H7q/3qksRtmZ0+9/eX71b/YPGqSinqdX4f2Dakb7v9pq7fQbySRkhuZlii+6rLFXA6Ldx+cYdjI7fLuX7y11uh3zqzJNMz7fl27vvVfLzR94iPaJ6JoN5ZxzvZwOr+Y23ds+aus8P3zNa/Zt671T5IWbbu/2q850fVHh2JGnzr8zbV/8drq9DunY/aXnzufd5e7burm9pynZRqcseU9J0m9eORJpkjQxxbdv3t1epfBfVLex8QLqVz8629rI6bU/2fl2/wC1Xiei6puaO5mmVP4XWvR/hrNc3WoXFhbIsrSRM0TR/wC7XjZ1KUsumo9j18qlzZjBnpXwl+Hfw31b4EXmq+OZlsbnxt4oZX1DULhVk8uNvvL/AHal+Aek/DfwT+1hb/D34deOYde0iNo23W7bo45N33a+bv28viA/hvwT4O8AaHrHlSQ6W1xKsO5fL8xvm+b+9Xt3/BBL9mG7+JXxmvPiLrM8kum6VbLcXjSP93b8y/8AfTV+aZdTxPsddD0uK5UJ81Rn64ftXyxWfwdsdRkhkhghs40SOP8A5Zttr8+/if4sfxZfXOpX7/JH8sUjNtVm2/xf7NfcP7X/AMZbKbwzD4Vhs4/skCHKyL95tvy1+YHxY8YXt14uuf32yHzWVo1Taq114/ERryjGEj53hbAzwuGlUqx5eY6K/wBF0HWLXffw27xxxfPJH8kjN/vfxV8sftHfD/TbWyuU8H3KyXk10yy/aLLb8v8AstX0T8PfEmja1cR2GqpJFa27Mtx5L/NJ/wB9V1Xir4A6b8RtJa80DSo7S3hRmWaZt3mVrl9SDnaZvnEeX3on5CfEiDxDp1vMlzDJFIvyvuSvKrrUPEmk28mxJAsn3pNtfpH4s/Zf0S88SXdt4i+zv5L/ACSSfd+Wvn348fB+z09Z30TQVeL/AJ57PurX2eX4rD/DKJ8DjMLiqseaB8k2uraxq1x5L/O/+1W1rHhXWNJtEuSi/c+7W+3hHQdJ1FLy2hmQszfu2ib5ai8Ua9NeW/2BEXasW3dtr069ZSlGEIniQwteL99nENqn2hfJ8xVK/frT8LyTXVx5KfKfu7qpWHh172++T5gybvlWvVPhT8Jb+6mS8e2ba33NtZ1pUqcC6EatSqd5+zb8Kbnxl4ytdBezkVrh9kU2zcq/7Vev/txfsK+PP2c/h3Z/F3xPpUlvol1PHBFeXDL+8kb7qr/FXa/sj+CX8L+MrDUtSso8Ky/M3ys1fZn/AAW2+DOuftE/8E1vCnirwlF5114T8QQ3l1tk+Zo/L8tm2/7NfIVakp5jGD92LPuJYPlyj2kdT8Qta1zTdPhH2l1X+5urc+GvxM+G+i6hDdaxp8dyyv8Ad3bdy/3q5Lxt8H/G0OrbNS0e4ETfKjSVe+F/7PviHxB4kisH02RRI3zs33a+meAwvsOac+U+XnjatOrFwpcx+lf7E3if9k74svC/gnV7HS9ajfb9jvkVWk/2q+7rL4d+CfHHhX/hG/Emj2895a2+yK6WBVZdtfkX8O/+Ce/xqXULbxJ8GXkhuYXWWJVb5m/y1fol+yjrH7S2m6vZeBvjT4Sm0u8jVVe6jf5bhf4vvfxV8bmmHrUo89KfNE+wy+eGxtK1aHJM9v8Agj4f13wbb3Gj/aZGs/tG1Gkb7te6fB3Xrb/hLLbfGzr9o2su1qw7XwTZw6YNQMLBJnVm3JuZmrT+C3jXSvCfxPx4gt1fT7G3mup7m42r5axqzbq+WhRp1sXCMv5kepXj7PKp/wCE/OL/AILjfteaP8dvjpZ/BPwL4hkuLHwDPJFetDuVvtkn+s/3tvyrXxdp9vDNdeckO/bxLuTd81df8ZtQTxt8cvGHieF28rUPFF9cQSSJ80kckjMvzf7tULfS0ZQ8j4Xf86r/ABV+y0qMaVKMI9D4PBYfmp8xGunpHhHh3jZuSNX+VWq9HYouUTzP91nq1Z2fkybPszOWep44Xabem4I33o9v3awre97p7dOjyx92Jl3Fq726u8K7Nvz/AMVZV5pu5XSYLt+8jKu1q6eaF45Am/5Pu7W/irG1a3SZmd0YN/zz3fdrKj/LIyqUeaGnxHHahZhWabewf7y1ja/HIsy/O23au9ttdTqVrtZ3Tps3O38KrXP6p532hneZnVU2pHtrspx960jyKkfd0OR1a1hk83ZwzNu+WuW1yx3MX/ib5fmru9QsvmkHyotc/qOmja2/j+Fa9GnHlZ5Nan3PP9WsbaOPZtr6O/4J02sdtZeMDEhUNc2Xy9uFm6V4brWk5UnY2N/8X8Ve+f8ABPq2a2sfFYb+K4sz/wCOzV+s+EGvH+F9Kn/puZ6fC6tn9J/4v/SWfLP7TRkX9oHxkEGc+Ibr+H/poa4Rrry/k/ir0X9qC2mT4++LZht2nX7k/wDkQ15xIuJN7pk79tfF59H/AIXMVzf8/J/+lM8fGSi8XV/xS/Nj45ts2+FF2t/Dsq1DeddzrhaobXjX5PmX+9up0M/zbHh+X+GvG5ehz/FL3jbsbxPlLx7l37srV9dSDK3kurN/B8n8Nc5BN5ce+F+W/hWrEdw6yMiblSiXxcxEpcx09nqTzYhhRd33d1Wf7eeNVSba235a5WG8mjP2aN9p+8+2ntfbRlNpP95qfNy/CYVNzsIdaTydjurFv7tDapthVIdvzfNXJLqgaPyZoP8Avlqmh1J2k+WfAZfu1rKRhyo6dtWTeuHwrffpn25IZG8jbtk/vPWJHqXnSLC6KzL83mVMrPIymbbt3/w1EpFRj73ul+ab94+zafn+bbVdmfyf3m1X+9/stTlL58t0wP4d1P8AJmMj7Id/8O5kqPaGsYlCZdq732qf/QagmtIpMfxbv7ta32Hy1Xjcyt95f4qa1n0TKg/x/L/DWPOaRjy/EfR0djbR3BedN+35fl/iqa4tfMhHkw427tm5fvVYs0hZfkhY/wAKfxNV1bdBahIX2/xOrf8AstfGVJe/7x+k0zG+zpDGk6Bkbf8ANteo5Le2jZ/nYpv3bpPvLWpcWPzbN7Kny7d38VVLuPaqI7r95vlb+Ks5RudEcRymDqASV3869ZPn2/L8y1i3sMZ/49oW+Vf4q6a6s0XY/kyK7fLu27lrLvLV1md3XZ8/8XzV1U/e0MKlTmj7xzF5azec2yHe2z7u6q0lpc28iu6LtX79bdxEjXDud37uX+FflqL7G8m9N7Hd/EtepRqS9lZHgYiPN8Rm2du/2j9yiy/N95q2rG3ufM2Jux/dVKdpeioyvOnlp/e/ire0fS03LJsVQ392nUrHPRo9ypZ2O21Fy9s3zPt2tV6x02GRWx/f+833avx2aSK0MO6VY/mRm+7WhY6TMu534TZuVW/irnlU9nA3p0485mNCkTI8KeU33fvblqxbR3LSfaUTMq/8tK1rjSTI3kum3+8q1J/Zu1fsyI22P7v+9WMqnNE3jT989g/YH+E+qfGT4n2nwi0LU7e0vfEmuWVhBdXgcxxPIzKGfYC2BnsPy61+l0//AAQ68IeBZJV+Nf7amgaAt1fvFoLPpscRvIlxhmE9ymJPmGY0LhePnOePhH/gkfJNYfto+BRbStFIPHuko7RsRkNMVYcdiCQfUGvpL/gs/qOp3v7eviG1vr2aWGz0jTIrKOVyVhjNqjlUB6Au7tgd2J71/VXh/i+KcxyvKMly3G/VacsPWqykqcJybjiZRsudabr5X0vZn0GGnj6lShhMPV9nFwlJvlTek7dfU4z9tf8A4J+fGP8AYo1y2k8VSQ634b1KRl0vxPplvIIGYE4hnDDEExUbgm5gRnazbW29D+xX/wAEyfil+1noFx8UPEHiW28FeBbNn83xFq1sxa6VFYu9uhKLJGhXa8jOqqcgFirKPftN1HUfGf8AwQi1C5+M9/MRpmoCLwfcXczB5I4r6NLdFJUlgCZogORsTG5QMr7h8Q/iH+xz8Iv+Cdnwv0r44eCde8RfD3VtH02GCDRJJZEa4Ft5w+0OksBbLiRsEAF0zsBUY9jMfETi7D5Osuprnxn1qphXVp01LmVNKTnCnKSh7Rp25G+VNS8iK+dZlDDewir1faOnzRSd+VXuk3bma6XtufJ3xy/4I5614d+Fd78X/wBmT496L8S9O0iCWXVrWySOObEYDOIGilljlZUJYxlkbA+XeWC15b+xJ/wTy8d/tweHfF2seCPiBo+jz+GUt0gtdUhlYXc8xYqrOgPlJtjkO8BzuCjZg7h9ifs8/t3/APBO/wCDlr4kP7I37MfxHlvZtJa71fTdI0qa5jlhgDESTBrqVYo13ENKV+VWPXoeW/4I5+Ph4X+Bv7QvxL0HShBc6bAmq20AdRGuy2vpY4wAgAwVIyBjBGFGOZnxh4iZdwnmNSupqrSnQVGpWp04TkqlSMZRnTi5QstlJWunffZPMs7oZdXlNNSi4cspRim+ZpNNJtfM8z+Pv/BKX4Vfs9/CTVvEXjL9t3wvF4w0jTFuJfC01oqmeYgEQRhZmnO4H5W8nnglVXJXS+HH/BGGS28A6b46/ah/ad8O/DxtYto5bHTJ40eSMugfy5XnlhUSqDhkTeAR9418UX/ibxDqviObxhqWt3U+q3F615PqMsxMz3BfeZS/UsW+bPXPNfof4k/bV/Y1/aV8G+GfCP8AwUr/AGffFHh7xXp+jxPYa59iuY0uYJo0P2yIxlJRHKyFwpSRAOVdsmvf4jh4k5Dg8PTpY2piOeUnVnSoUXVglFcqpUnZSjzX5m3KSVvn2Y5Z5g6cIxqud2+ZxhHmWisox0ur77s8I/bI/wCCXvjP9mL4ap8dfBHxS0jxz4Ie4iifVNOjMc0IlJVJGVWeNoi21N6yE7nUbQOapfsVf8Eyfij+1noFx8UPEPia28FeBbNn83xFq1qzNdKisXe3RiivGhXa8jOqqcgFirKPbP2qf2fU8HfsG33j39hL9pnXtd+Dl1qqz+IfCNyFk2EyGOSVZvLSZI1k8rfbOuDnzSTgVo/8FGdR1Hwr/wAEwPgj4Y+FN/N/wh+o2liusTW0zMs8gsxLGkjbRkGXznIO354x8uR8vnYPjPibHZVhcvw+Mi8RiMTOj7aVLknSjCHO1UotKKr9FHWDutbmNLNMfWw9OjCqnOc3HmcbOKSu+aL05/LY4D43/wDBG/xDofwxuvit+y58cdL+KNnpiSHUdO0u2X7U5TaStv5EkyzuFbcYyVbA+UOWC14l+xF+xZ4m/bY+JGr/AA60DxtY6BLpGhS6hLPf2skpcqyxpGFXGAZHQMxOVUkhXI2n2X/ghp4i8f2H7Xt14c8OT3DaJqHhm5fxFArHygsZUwysMEbhKwVScHEjgHkg+0f8Eyrfwvo3/BTn47aF4DuY5dGWHUTaup3/AHdTi4VyoO0FnGBwcDlsBqvN+K+LuFsHm+XV8Sq9bDUYVqVbkjFpTnyuM4pOHMt46arV+TxOY5ll9LE0JVOeUIqUZWS3drNbX7dzitH/AOCH/hnR7XT/AAz8X/2zPDmheMdTT/RdBtLNJVkZmKoIvOnhlmyRjIjXnIGcZPx/+0/+zZ4+/ZP+MF/8GviLNZz3tnDFPDe6dIzQXUEi7kkQsqsO6kEAhlYcjBMHiv4k+OfGX7R1x8TfE/iW6vddn8WLdPqNxJucSLcDZjPAVQqhVHyqqhQAABX1l/wX0WJf2lfB5SCJWbwOpeRYwHb/AEy4ABbGSBjgHgZOOpr6DKcZxhkvFmCwGa41YmGLpVZNKnGCpzp8kvccVeUbS5fe10vozsw1TM8LmNKjiKvtFUjJ/ClyuNnpbda21PhKus+A/gCb4q/Gvwl8NobI3H9ueIrOykhG75o5JlV87SCAFLEkEYAPIrk69C/ZL8Y23w//AGn/AIfeM7y1E0OneMNPlljO77ouEBI2kHIByPcdD0r9JzapiKWVV50PjUJOP+JRdvxPdxLnHDzcN7O3rbQ+sv8Agtb8f/GOgfF/Qv2Y/h7rVzoXhXwz4btpX0nSZGtoZJpM+WpVCAyRxJEEXGFy2OvHSf8ABJr4i+Jf2nPgL8Vf2P8A4sX0viPThoAn0KLWJHn+ziRXjMYZjlVSVYJEAIKNuZcHkeW/8FwfAmp+Gv2zv+EunsnW08R+GrOe2uNp2yPEGgdQTxlfLQkDoGU45ye4/wCCFmh3XhnU/ip8eL6wkOnaH4XS28/Y2JH3NcOi9iQsCkjkjcvTPP4FjMHldHwGw+JoRXtIwpVIySXN7d1I3ae/M5txbve10fHVaWHjwhCpBLmSjJPrz8y6976HwNf2N3pl9Npt/bvFPbytFNFIpDI6kgqQehBBFe7f8Ey/i34E+Cn7aPg/xr8R5YoNLaaexe/nKBLKS4heFJmLA7VDOAzArtUkk4BB8O1vUv7Y1m71f7OsP2q6km8pCSqbmLbRkk4Gcckn3r68/wCCJnwY8D/FX9qu98QeONFi1FPCfh9tS021urRZYBdmaKOOVt3AZNzMnB+YBgQUFfrvHeLweD4Ix9XHRbp+xkpKLs/eXLZPWzu99Ut9T6TN6lKllNaVVacrTt5q36nuX7Zn/BLT9sb46ftG618U/h78cNMutE1q8WWxi1fXLqCTTYtoxCESN18tDkLsOSOSMk5yf+Cq+saf8EP2HPhx+yN8Q/Hv/CWePIri3vLnUpZBJLHDCkqtMS4LhC0nkxk7WdY2JJ2sp+f/ANpb/gqD+2J41+OGr6r4X+K2s+E9L0zWJotH8P6W4gS2ijkKqs4A/fv8uW8zcNxYABcKPoX4teLf+G8v+CRt7+0F8Y/DMP8Awmvga9aK28QW+moJLpo54UkddoXZFLHKBIq4QSRFgvyKo/FqeU8X5HWyCvxFKlPC06tOEY0kozhOcXGnzvlXNFac6g0m+ktz5aOGzLCSwc8a4unGUUlFWabVo301Xe34lP8AaG8Y+Kf2MP8Agkz8M/h58MzL4b1zx6YrjXL/AE7fBcsssRuZiZAQyyMDBGTnPlqUGFAA8Z/4JJ/tO/EzwD+174e+H954x1K70DxfLJp2paZdXcksRlaMtDMqsSFkEiIN4GdrMOhr1P8A4KM2t18Vv+CZfwG+M2j6Qy22j2ltZXqxK5FuHtFhyck4XzLYLls8svPPPzl/wTD8B6l4/wD25/h9ZWFk8qabrH9qXbqpIijtkaXexHQblReeMsBznB97I8FleN8Mc4rY6EXUnPFyqtpNqcZTtq7u8UouOumljswlLD1chxMqyXM3UcvVN2+7Sx5l/wAFvvhPZfBr4qfF3wZonh6HTdOkvIb7TLO2hKRLBcPDMBGvQKC7AAfKNpAAAwPyc1SHZJ5P3jtZv92v2I/4L7+L7fxv+0H8WZbW2EK6eLHTi43ZkaBLdGY5P94EcYGAPcn8idWs3+d3hYqv8X3d1fkniPiMVVWTzxH8SWCouV97vmvfzfU+T4ldWccLKW7pRv8AichdW7qrP8zCqDWrwr8nylq3Ly1ePbsThqo3Nk24Mj/x18BGpynyHLzGfD5nmM/zYX7+2tO3bC70+Yf7P3qhjhS3kZ/J43Vo2sbybfJ/h/vVEpdzWnT5i3p8aeYqOGbd95dlbdnbvN8k0P7vf8+6s+xj43+R977+1/u1taWs21Y4fm2/+PVyyqR5fdOynRiaFnbv5Zm8lnZfm2763rGPzoD/AKM2xlVmjaszT4UVW2fM29WX5q3tJimaREd8fPtdWavPrVj06eHizY0exdVSHy1Xb9z5vvVsafb7m854WQx/Kn93/gNM0XT0jkRJnYln+Rmeui02z32I3cbn+X+Jlb/eryfrXNLlPZo4X3SPT7VFU/udki/5+Wr66em3Dw/My1YsbFIUe5+ba3y7m+XdV+OxSTc+xldVrgqVuWR6VHD+03MtbPbh34T+L56sJbzKzJ5Lf9dP4WrRktbOOON33L8nz7aj+zzRwo6OyfOy+W3/AKFWMZfDJHVGjy+6Vvsszfvw+xtq7WakXT7byw7/AMTfIy/e3VfWGSSG4/fLsk2/8BqW3s5o4/Jhh+Zvl+V/vVtyzluXGnGPxGNcaXZ3UhdNuxfk/wCBVBL4fh2h7lGLr95W/u/w10v9myKyWy/embdu/wCef+zTZ9Hf7QZptpXZsT/er0aNP3QlGHL7pzVvpaW8nyBi/wAyurfw1ZtdMRYfOSGbK/cjX7zf7Vbk3hmGNVcPt3ff2vuqxBpFtbru82R/Oi2szfer0adGPLoccv5TmJdNfKx/bG27fvTfM1VvsDrtmd/njbb/ABfe/vV2N/p9s80Lw/M2z+Fqp3mlwxx7HRt6t/E1dPs+U4ZU/iuc79khaMo6MvmfM0i/3qh+xJZw+ZCm9Gba3+zW9JY2fmK8MyuG+5uf7tULixdG+S552fNu/hp+z15Tn5ZSkY8apJ/qX3OrfdVf4aLqD9233vv/ADqzfL/s1d8vyZAjvj+H/Zpt3apcKrwuoZvvVlKPKZ8pzerWvlt5zp87feVqwLyaTzR/cVN23Z96uk1SGa3Xf5zFt+1ZG/hrmdUV9xMz87vurXD8J1L3oEEl9MzNB5yoW/26zr7UGVim/wCT+9uq55L7Qj220/w1n3lmdzJM6/f+7W9GW5nWw8qnwkMc3nKju+9Y2/ufepjXTr/rtzFvuL/eqw1nNbqqPbMqyf3n21XmtU3B0h2/7NdUa3N7sTqwuW82g5r55d/z/Pt27qZJNNtVJudv8W6kkXyzvdF2L91dlQv5fzyQpt/i2s33aqVafLofQYfKfcigEyecr7/9ylb98u93kZm/ib7tV13tG3nO29vuMqfLtq9Y277Tc/Nt+61ZSrRjG5p/ZREtm8bbPlG7+89Syae7J86YH+z/ABVct7fzmRNkmf8AZTduq7b6bNMrzvuxGn/fVc0sRp8Q/wCyeWJhyaCkipvfYzfcb+7TJtNjjVvLfjf87f3q66z017pf9Tt+RW+b5qq3mjoJFhhhVW37fuVyfWoy0ZwYvK/d0OVm0hFXG/afN+df9moJLH946P8ANt+VK6W4sUhm2fK3yt/urUP9nJuV5HXDfLWlPERj9o+VxmFlGRzFxZvHs2Q8fdfa21dtM+ywrGqFGb/Z31vSWMKzcpsXZ/y0+7ULaei/OkzFt/8AF91a3lW9w8ephzCm09933Nyt/DvqNbPdcLsT+P8AhrZvLHdI2/gf3qguoXVQibU/iqeaXui+r/zFOOF/vwu3y1djjmkkZ/MVGZ9v+ztqGGHH3/m2v97dWlbx7ZEh2K3ybk/2qr4YGlGM4ljT1aNkTZ97761t6azqph8njeu3/aWsqG12yLsTaWf71benwv5a4hVd3yo33a5vhO+j7ux0Oko/8c0hTf8A6pfl2112n2sO3zo9yvu2vufcu5a5fR03CF4YV2b9svmf3a63T45o1CK+8bvmZvvMzVxVI80z1cLU5Ym3Z2G79z5efutukr0fQURvA6orBla1k57HO6uB0iHy7dZXRW/5616FoMUY8JJDEgKeQ4VVHbLcV+2eA0OTPsy/7BKn/pVM+lyio5VJX7P9DlFtYZdlsjsZNy/u1/harUtrcx/uYfm2uzPtermnx3MM3k3SK39xdvzLU0MPmW/mb921/u/d/wCA1+DU6M+Y3o1pRlfmMDCMqJDbSTIzfeV6wdcs/Ojd3hj/ALvl/wAVdlqEbi3e1RFb/ZVNtYOoWMLQoX+Uqm3/AHq6PZ8p7mHrfaOI1q3fzGuXtlES/L97+Jqwr2zf54fmfd8vmL91a6zWo0jiWF3VQ277v+98tc/q0iRqzvul3Ov8XzV2UqMuU9aOIiclqVrc2qsiQKQzbfuVk6hD98OmHX7y1011++mf5Nv+zu3ViagttbjyfIY7vl+auyEJR6CqVqUTAuF3Sb03CRl2pVFo3t/4Gcs21/71al9a+Wyqi7D/AHaosqBTcv8AIy/fVf4q76cbHyWYVOb4SKRPs+7em7d/Ev3qLWP7Pdb0Rm/2lpxkfyxvTa7fw0kW6ZneE4X+L/arsjHmifN1q3Ka+lx+ZcBPO2qz/Mzferp9HkfzfOR/Kdfvsv8AFXK2FqHHKbXZflVfvN/tV0elwvHjL7H+6zK3zLUVOaRH1jlOu0u827fJT+LczMtdLpV88dwr3O3/AGGWuN09XjxsdmH93ftZq39Jme4VURF+XaF+f5t1cVSXunXHFR+yd5oeobbgJc/JKvyxMvzK1etfs/68mn+JvOmm+aO1kVI2+7N+7bateG6LdbtUTem54flWTf8A99V6R8I75G8WQwncr3D7P7rV5mYU51sHJHfluK9liozJvil+z/4q/aE0HSn8Kwx3OoWaNbfZY38z5d25Vr7K/wCCPfhbxf8ABT4b+K9N1jQ9Qsrp7uGGeGb5fl3fw/7K1t/8E8vgq/wZ+JjeNvFOgSXelWSyXpdf3mNq7q+gvhh8eP2ZvjNf6v8A8Kp0a5l1+/uZGubSCJlKFW+Zn/2a+FqqnGhyP4j2MZWqVMU3yc0Opyv7TmuTLvR5pHMMS72X7vzV8RePrHTZNeu33ybppfNlaT5lVf8AZr6n/aG1z7VNeW015JB5bMqKrfK22vnpfDqatp5mvIcmR/8AXfd2t/u15MY+yZ6EfdgeWaPq1h4f1R/sd5u/jlVvu7a9L8G/H+wtbaW1v9YmSwVdu2SX+L/Z/u1538WPhzreizM7wSYb51WNN+7dXz58T5vHmk+IrR7aNgnm7v7q/Kv8S16FHDupLmiefiq1CPu1T608XR+GPE1r9s8MeGLi5E0TM9006+Xt/wCBV8+/Gi38Z6Mvnf8ACt45rf7rSQorMse35d1eY/8AC5PivJJ/yEpv3O7fJHLtX/d21et/jl8YLjT/ACZpFltmfbtuk+WRa9WFPFU5RbR48vqM+ZRZ5d8QfiB4J1Czlhm8HtFPDuX5vlryHVrVNcvPs+j6a23eu6OP5q9u8beGbDxhqj3mq6VHEq/xW/8AFUOh+E/Dekx+Ra2y7YW3MzfeZq9ehW9h70nqeFisJKvP3YnK/C74B3OpXiXOqwthm+f+FdtfSfh34Z2FrpsSabCq+Snzxqv3q5bR9SsIY1sLZIUaP7qq/wAzV7L8IY7PXLT7HeXLJcbP3TRrt3VrWxyrQDD5b7HVfEdd8MNP0qz8Jprd4lmj2rR79z/vG3N/yzr9A/2ePCuifGj9mnVPAvifzXsLu1aN4WXcsn93/wAer4kh+Ad3D4fPim5ufJtoUVmjb5VVt33a+0/2H7rxLd/DwaR4ZsxPA0amRi/yxxrXz+Pjy1YSPqsBH2mAqQmfm7+3l+xHqX7OPxWhvPGmiTP4U17b/Z18vyrDJ/drX+Hf7Bt54i0+z8Q/C7xbD5Eksb/Z5Nr7mX7y1+rv7Q3wR8GftB/Aq68DeObBrnZFIbWRl+aFm/ir8vJvg/8AtG/sR/EpLHe2peGI7pnsrqNmZo1/hX/gVaVa05YWNSn7zj8SPNw2GpQxLo1fkz7P/ZV/Z78beEL63v8AxRCv2aGJfK8mBVbd/FX0p4u0nwlfNYQvokbz+asUU0i/Oqt8zfNXzf8ABX9szxCvhmF/E/hhjFJF/C+2SvY/hz4i1LxvqFtqb3LIrbmit5G/1a15H1irK66yPSxGBlH4djtvHsGmeGNBR41by1i3RM38TV8lfHzxpeaP8KfiH4hsLzyn/wCEXvILeRn/AOei7d3+z96voD4+eNg0Y0RrnZ5ab9sf/oNfMX7Ukltp/wCyj47165mbz5LCG3WFU+WRpptu3/vmuLCR9pmsEo7SOmph/Z5VN1Ox+b2j6HMqok1y0h+zr5s38LN/erdsdJhaEQwzKqb9m1k+Vmqa1tUt5gjurKv97+9WppdvMrfvkz/FtX/0Kv2Bw5ocyPicL7vukMOg3nnCZHjDxv8APCr7W27altdPmhV4ZkjWRl+61bsciSSedNbfL8q7m+9/31U91pKLbvMjqNvzIqp935q46lOctz16fuxOKvLab7Q6WzxqV3Km6Lc1Ys2j3KyPNI/m90X/AGq73VtPSS4e5b5Hk/1S7Nq1kXuhw27M6QzHd8zt/do9nLmsKpGEtjgNc0e5jdkmhZP4tqvuXbXK32mv814m4rv3L/s7a9U1bTkmg87fGsWz/dZv96ub1bw35kf2b5Wdfvssu1V/2a7KcYxPDxVP3vdPNNQt5maR5pvut/q1VfmX/erA1ix3L8nzLu+f/Zru77Q5rWE7Nu1k/wCBVgappaRskyIzp954d/8ADXfTlE8StG3unBatbTqr7Eyy/wATfdr2/wDYQt1gsPE+0/entCR/wGWvKNc092X92jbdny7V/wDHa9j/AGJovKsvEeEA3TWpyO/EtfqvhAkuPsLbtU/9NzOzhdWz6nb+9/6Sz5h/acsIJfjT4qk80gtrlyCAv/TQ15VqkOdyJCv3fk+bbXt/7RNktx8YfE+4DB1u4G4L/tmvI9es3Hzuiqq/xfe3V8Xn8eXPMV/18n/6UzwsZ/vdX/FL8zBTfJ1enq3k53puWmXcflNs8tR/u1CjSeXj+Fq8XlOb2hPDJtk/c8t/Aq/xVYW4dlHybf7+191VrdnG7/fqVZHjbKf7vy1X2Be0LH2ieTYibfm/9Bpv79tvbb92mpbzSbZjDll+XdV6Kzj2h33D/gH3aj+6YyjzFaFXZWfzty79tXbW1uZJFcBXFW7DR0kbfs3LWvY6WzfIkXzMn8NVKQombbw+Sq/Ju/3qt2lm8eX6Lv3bv4l/2a2LfQ0bY4tsszf981etdDMa7/vMzf8AjtYSlKUjanH7RlwwwNh3hb5fl21Yt7abd9/buTbt31t2+g7pPnDJt+ba1JJo7wyb9n/AmrnlLlOunT9oYzWc0J2dV/2aRYX2rDtU/wAX7z5a2X0vy03vSeX+8V3s1Xd8vzfw1hGpI19ifQS6S/nSpE67413IqrVlbV7dWCIoHlK21vm3NWvNpf8ApDvhWeFPnbd95qia32SbH2qF2/N/er52pT5T7Wn73xGLcW91IiQwzKh+9u+9WfNa2wxAkK5bcqsyfxbv4q6G+0/zriLZbKPvKzM33WqpeWqbhBsVvn3IrP8ANWPvHZKMOQwLu1mWPZ95F/8AHmrK1COFmX52+Zt3y/w1v6pH5m7bcsPLT+H+9WXdRpNN9z+Hcy7fu10Q92epw1OblkjAlhMl9smfaF+X5vu1DJayWTbJHZyz/dq7eR/6SU8lcb9rrUa7Ps/z/wDHx/Bu/u13c38q3POlHmJNNt4bO186H5tzbtqr81bmkw20kyTJDIyq+3b92sqzWaNwUG6Hzd27+LbXRaTNDHdJsRtirv8A9n/gVRL93qKnH2kuU09L0n7UY02YRv4f/iq1V09422Dy9v3VXO6ordkuFH2ZFbcv71fu/LWlbxpGqeTAqhdq/N/D/tVwyqc0T0I04RGLpqSfP5PzfwK3y7qhbT0t7VbmH5dz/wB/+KrrW+2N/tL79rrs2v8Aw1U1Jd0zJbPhG+7H/dpUypxj/KfTP/BKnTb7Q/2u/hnrkd2UbUPHOnKqoeVj8/y2B+oZgfY1+nP7dP7TX/BOXTf2gL74b/tefs86jrfiDwzDbfYdZ0+xV/tFvNAk6ozpPE5CtIw8t9yjJIxvYV+TX7M/xc1T4CXvhb42aLpVrqF54Uu11S2s7wt5U8kEhkCsUIbBK9jXm/7e37Y/ib9uH9pjX/2jdV8J23hj+2Ut4E0ay1CScW8UMSxRh5GC+Y+1RuYKgJGQq1/RfEs8s4WoZDWlSqODwSt7Ks6U1OclUk+dKTs+eS5bWtLyLzKjToV8PUkny+z+zLlabd9/mz7m/by/4KOX/wC1LomnfBr4UeCU8G/DbRCn2LQYkiV7to8rC7rGoWFEQgLAhKqcks+F29D+xv8A8FMPC3w8+EUn7LH7XXwyPjn4dyDy7ELDFJPpsWWfy/LcKJlEm1kberxHJVjhFX8in1jU7iRkS8nH+y0hWmT39x5w/wBMZzs+VlnPy1MvE7g+pkEMnWSuNKMueLVdqcam/tFU9nzc9/tX1Wj93Qbx2WPALD+wtFO697VP+a9r38/lsftl4v8A+Cn37Kf7PPwz1XwT/wAE6/gBceG9Z16Ird+JdVto0e0YYCuA7zPclQX2q7KiM27DZZT41+xx+3T4X/Zx+D3xe+H3jTwdqutaj8RdIMNnfWt5GqpO0U8TGXeMqMXDvvG8kqF2gMWH5ZTa7etEiveS7lb518w/LVO+1u7ijdn1C4Xav3vMLM1GD8Q+FKeW1sG8qnU9tKE6k54mUqk3TkpQ5punzWi0rJWVr6XbZjHHZdSw8qbouXM023NuTaaau7X0Psa1urmyuY72zneKaGQPFLG2GRgcggjoQa/QOD/gpp+w9+0x4O0Ww/bv/Zn1DUvEGhWSQR6zpQEwuW2gSOGSWCSIOwLeVl1BPU1+EF3q+oSycajcK23+KU/40+DVbq4fbLeSP5e07fNO2va4i8Vcl4ndKeKy6pCpSbcKlPEOnOPNZStKNNaSSs07jx+f4THcrqUWpRvZxnZq++qXU/aL9rf/AIKSfB/xT+zzL+yR+x38Grnwf4PuLhGv726dIpbiIP5jxCJC5+dwhaR5GZgpUjnNVP2N/wDgph4W+Hnwik/ZY/a6+GR8c/DuQeXYhYYpJ9Niyz+X5bhRMok2sjb1eI5KscIq/j3p/iG+lVzPNKBu3LtkNWo9e1BVd3v597fLuydy15v+vfB8ckeVvKZOLn7XneIk6vtf+fvtOTmU/NO1tLWbRxf2zln1R4f6s2m+a/O+bm/m5rXuftp4t/4Kf/sqfs7/AA01TwV/wTq+AE/hvWdejIu/EurWsavaMMBHAd53uSoL7UdljRmztbLKfEv+Cd37bnh39jz4z+I/ip8RPC+q+IDr2gT2jNY3KCX7Q0qThn8z7wZ4wGbOVDFgrkbT+YsXiO9LjbfOB5X3VJ+Wr9r4ouzMkM08hjblW840Yfj7hTD5RisBLK51Fibe1nPESlVnbbmm4c3u9ErJdtXfOGfZdChUoPDt8/xNzbk+13y306H1g2vW7eMT4n+wv5R1P7V9m84btvmb9m/bjOON233x2r3b/gpD+2d4Q/bZ+K2hePvBvgzUdGg0rw3FYTR6nPG7yS+Y8r7QnAVWkZQxOWADFUJKj86rPxBfLCqfaZWKtuVo87v92tnSdcaILcxXM7L93czHdur2cX435RUzTD5hUyuTq0IzjB+3dkpqKldezs9Irc9iPEmGxGIhWdB80E0ve72v08j3OgEqQynBHQivJbHU5JV3797bs/eP3q1bXXJHZ3ikcfL/AKljtb/arpq/SWo0/wDmVt/9xl/8qPapZ+qqv7P8f+Afpr8Nv+Cnv7NPxi+D2g/B7/goR8Br3xdP4dtxHaeKbUrPPORwHbLxSxOUWMOyyN5hXcwHSsD9qH/gpf8ACS8+AN7+yp+xJ8G7nwP4W1KXGq6nIyQz3cDDEsXlxlzmTbGryvIzMgKEYNfnXJr1zD9xHT91uX95upW8QvHbur+bhvl6/KrV+d0fEvg/D5lDEwyepyxn7SNL61L2Mal786p+z5U76pfCuiPIisrp1VUVN2T5lHnfKnvdR2/Q+6f+CZn7c3wb/Yv1rxPefFP4SXWsSa3aRx2et6RDDJeWwXO62xM6AQyEhmKsDlBlX+XbxXwf/bY1T4B/te6j+078J/hzp2l6dqWoXXn+DYJilt9gnfc1qrKPkIwrKwXaroCE2jZXyJLrBhCwvPI27+JTVJtRu8PDNKy7X3blb+GvWreNXDVTH4zF1smlKWLgoVU8Q3GUUrJKPs7LTqrNbqzbv0vFYGdWrVlSu6iSleTs0vLofq/rf7bH/BIT42anJ8SPjX+yLrVr4nv2MmqiwtgUlmJyzl4LmESsSTl2QM3U15h+2v8A8FLPC/xu+Dlt+y9+zl8HY/BPgG0ukkljJjSW7SNvMSMQxDZCvmZkb5nZ2CnI+YN+bOpXF3DGq211KZPvbdxw1ZN1qF+gbF5KdqblVXPyt/drzMs8S+FsDi6OK/s2tV9i1KlCpjJzhTa2cIunZNLRXvb11POhiMuwtWM3CUuXWKlNtRfkmunQ/TP9h/8A4KPeHvgP8LNU/Zq/aM+GMnjn4eapcb4rFpI5G09WJaVFilG2VGkCSBdybH3OCS1eqt/wUz/YY/Zl8N6q37Cf7L13p/iXWbGSBta1iJIhakjKEs8s8kqK4RjCCiMVHPFfjBquo3IVsXs/8K7mc/erntT1e+l81GnlX5mZN0h+WujHeI3B+b4+pi6uVVEqslKpTjipRpVJK3vTpqCTeivtd6u7bM8Vi8sq1pVJUZe87yiptRk+7VrH1j8UNOk+MVpq8Hj7Vby9m1ydp9TvXn3TzytJ5jSM7A5YvySc5ya8ok/Yk+DskPkG+1sKTnAvY/8A43XgOp6pqjsVGpXGxv70pb+tc/f3eoxK+7Urgtvwi+c3zL+dfT5l4ucK53VjWxuRxqSjHlTdRaRV2kv3e2rNcTneX4uSdbCKTStq+n3H0jJ+wN8D5Dk32vD5cYF/H09P9VUR/wCCfPwJLlzfa/z1H2+LB/8AIVfKt5qd7BIGTVrovv8Al/ft9386z7zUdUZj5eoT7P8Aanbd/OuP/iIfAf8A0T0P/Bi/+VnE8zyR/wDMDH7/AP7U+uD/AME9vgUW3f2n4i4z/wAxGPv/ANsqcP8Agn58Cwwb+0PEBI6f8TCPj/yFXx02p62zB01af+7tadv++utPh1PW5mXfqs4+f+Gdv8ab8QeAv+ieh/4Gv/lZUM0yd7YKP3/8A+y4f2D/AIKQFSmoa9kdzfx8/X91VqD9if4PWxBhvdaGBj/j8j/+N18f2upapt+fVLn5f4vPb/GtvTNSv5Zkht9TuPm+ZlaRvm/WueXiL4fx/wCaep/+DF/8rNlmmU3t9TX3/wDAPrCL9j/4TxcpcatnOcm7T/43Vq3/AGVfhnbSiWO71UkDABukx/6BXzNo93rB/ez6hK3zfIqzN93866LSrq8KrNJdXH93b5priqeJXh4t+G6f/gxf/Kzrp5hlstsKvv8A+AfQ1v8As8eA7crtuNRO0YAa4X/4irkHwV8IW5UpcX3y9jOvJ9fu14Xpup6hLJ5qX0qFX/e7ifmrdtr2984+TeyOW+bazn5a5ZeJPhwnb/Vmn/4MX/ys7I4/AvfDpfP/AIB67F8IfCkKqoluyEGF3Srx/wCO1MPhh4byCZLrg5H70D+QrzDRdRuNizXV84f7mybPzVeivLg2kkUch3SfLiRi1Yy8TPDfm14Yp/8Agxf/ACo6oYzB8vNGivv/AOAegN8LfDTsrmS63L91xMMgenSpF+G3h5ZFlD3GV6Eup/8AZa4EXUjSR/O+V6tuP3f7tT2M8zxn7I7ff+75h+7SfiT4bf8ARL0//Bi/+VG0MVhmrqkvv/4B2zfDTw65O6S5wwwVEigfotTL4B0FAMedlVwrbxlR7cVyOlw+YiXL3Mof76qzFt3/AAGtW0S6eNXlimX+Dcw+9/tV1UvEfw4n8PDNP/wYv/lYfXMPLel+P/ANqLwHoUTBgZyQcjdIOP0p6+CtGBDP5rkZxvYHGfwqlawySSLsw7N95VX/AMeqza25nb7Xbo6Fn+RWFdtHxB8PJ6Lhymv+4i/+VjlicNGP8Jff/wAAnTwfo0b+ZHG4OMZ3Dp+VKfCOjkY2OD/eBGf5Vet9DF5aCLyxtZvvsdu2nXGkBLpI5rgfd2qqr97/AIFXUvEDgBfDw7T/APBi/wDlZzrFYSX/AC5X3/8AAMs+CtEIUBJBtORgj/CkfwPosgIlMzZILZYckDHpVwaWzXRS5QlNv9/azVQ1LSr+3iUoiDe/zLn5WrT/AIiBwFb/AJJ6n/4MX/ysyni8FF/wF9//AACB/hh4YdizLP8AMCCPMGDn8KZcfCvw1chRLPd/L6SgZ+vy81T1KwnRmeAzbF/5aSSL92svU9MnSNo453lMm1Ym3D5l/vVMuP8AgGGv+r1P/wAGL/5Wccsxy2DusMvv/wCAbI+DPg8MH33m4HO4zjP/AKDQfg14Q3tIj3alv7sq8f8AjtcbNBcIxRbqVmba33Cq7qpXVreBiIWbePmlZnLfLXK/Efw/5uX/AFdp/wDgxf8Aysc8wy6muZ4Zff8A8A7a7+AngW8yZGvVJGCyTKCf/HaoN+zH8OXBV7nUyCcnN0v/AMRXH6qskqhBeNGv3U3OfmrY+Cv7K/7Rf7SviePRvgz8M9b1x3fynktY3WNWX+JpG+VVrN+Ivh7y8z4cp/8Agxf/ACsn+2MsjK0qCXz/AOAasn7Lnw3kIzd6qMYwFuk7f8Aph/ZV+GRJb7TqmSck/ak/+Ir7C+DX/Buz8Xbq3g1v9o7456f4ZhcsZtH00m8uFX/eX5Vr6D8Of8EH/wDgn9ocMaa/4p8ea5IyrumbVBAu72Vf4a8yv4ueF2GdpcPU/wDwYv8A5WaU8ywtX4MI3/Xofl5P+y38Nrn/AF11qhOc5+0pn/0CmSfsp/DGTGbnVB/u3KDP/jlfq/cf8EQf+CdMg8v/AIRnxdDuRv36+KnZt396vOfGf/Bv7+yjrDk+EPjX490hPu7JZYp1rmXjP4VPbh6n/wCDF/8AKz0aGYUlLSg0fm/P+yH8K7mQyy3msFj1P2xP/iKYP2PfhQCWN3rBJOSTdp/8br6q+Lf/AAbwfGrRklvvgh+0Zo3iaBf9VZ+IEktblm/u/L8tfH/x0/YH/bc/ZzkuE+KvwZ1qK2t5f+Qlo+bu2aP+9ujr0YeK3hnV+Hh6n/4MX/ys9nDYzDV95cvqjXX9j74UJ9261f8A8Co//jdSw/sl/C6AbY7rVgM5/wCPtP8A4ivBIJ71L86Wt5NFKv34Z2dW/wC+c1p2IvlYb9Tuf725pDWkvErw6cdeG6f/AIMX/wArPZoYJV43jP8AA9xP7L3w2IIE+pjLZBFwnB/74qZP2bPh4gws+o4/6+E/+IryJEktlBW5lLSfc2yP8zVoQT6nGDDPqE25v4d527a5ZeJ3hx/0TNP/AMGL/wCVms8vqxdnL8D04fs3fDsNkSah7j7QvP8A47RL+zh4Clk8xrzUx7C5THt/BXlqGaSTyLy7ddr8t5x3VQvJrpTsS4lx6+Yf71S/E3w3f/NMU/8AwYv/AJUeXiMOqbbauesS/ss/DSZy73Gp5Y5P+kp/8RTJf2VPhlKQxutVUjptuk/+Irxa4vb6RnH2112t8+1jVSW6vZGb7NezIP73mHbTj4neHD/5pmn/AODF/wDKj5nF1sHTV5UE/n/wD3F/2TPhhJy95qxwMf8AH2nT/vik/wCGS/hbgD7TqvAIz9rTv/wCvFI31G4VVS9uA2cblkO1qe1ver88d9M5X+HzDWs/E/w6pxV+Gqf/AIMX/wArPKWOyxy/3Vff/wAA9n/4ZH+Fe8SG41UnGObmPken+rqB/wBjf4RuxJudYGRggXif/EV43u1JoykM83zbmSTzD8tZd42o5WP7bKHb7zLKf8aqn4n+HU3/AMk1TX/cRf8AysbxeVf9Aq+//gHuo/Yx+EIGDdawRnJBvE6/9+6kT9jz4UIjILzWMNjP+lx9v+2dfOEs9/aXYZdUnQ7vutK3+NSf2pdqWlTUrlpG+bb5p/xreXiT4eKOnDdP/wAGL/5WZRzHKnLTCL7/APgH0jH+yR8LovuXmsfX7Yn/AMRU9r+yx8NLRNkdzqh92uUJz6/cr5xsb/VJbjzprm4jVvmT5zXRaHHeTRPFPdyOVbP+tO1qUvEjw7ir/wCrdP8A8GL/AOVl/wBo5Y/+YVff/wAA98sv2fPAdgu23m1D6m4XP/oNaEPwi8KQLtWS7PuZV6+v3eteUaIbidQq3TN+6YL++KqrVvaLcX1orPcK5eMqPMOdsi1nHxG8OZO64ap/+DF/8rG8zy2l/wAwq+//AIB6LbfDzQrU5jluD6hnU5/8drWttOtrTTxpsW7yghXk84Of8a4eyiu4lSZp8+Z9zaK7DTVdNAVZcKwibJJz681+l+GfFvCecZli6eAyaGFlChOUpKfNzRTjeD9yNk7p3122PVyrMMFiak1SpKLUW9+mmmwg8Naech3lYHHBfHT6AVYGm26xrEC21egAAz+QrJWULue2D+rw5/8AHqtFQkQfcERfv5NfnUfELw7Wi4ap/wDgxf8AyszjmmB6Ul9//AJxoNiu4K0gDMWI3DGT+FVbjwVotyhjlEpBOSN/f8qfLa3Lzec0BdF+bcx27awvEdrKJAyySB4fRvl2tTXiF4dXt/q3T/8ABi/+VnZSzLD20hb5lm8+EfhK92+d9p+X7u2UcfpVKX4DeCpozG9zf4Jz/r1/+Jrltbnlt4HNsSyjcrLuK/8AAq898Q6tco7RR3Eqsv3VEx2tXRR8QPD6e3DlNf8AcRf/ACs64ZlTteKPXpf2afh3KBvuNT4IIP2peo/4DUcn7L3w3lJMl1qhDDBBuUI/9ArwaTV76NvJhurj5fveZKfu1Vm1TUmkXZdSfN8zfvD92uuPHXAD24ep/wDga/8AlZlWzanCN3G/zPfH/ZP+GUgO6+1fLHJP2tM/+gVBL+x/8KpQQ17rPzdSLxOf/IdfPV5rF8JHb7fOuf4vNP8AjWdd6vqjReVLqE+xd3Kyn/GtFxzwGo/8k9T/APA1/wDKzya2d4KMbulf5/8AAPpOX9jn4UzIEbUNaGO4vI+R6f6vpTof2PfhTbqFjvdYwO32tP8A43Xypca3eyMTHqVx8v8AC0zfL+tOttc1adkji1ScH+FhM3+NJ8ecA/8ARP0//A1/8rPOeeZZf/d19/8AwD6xt/2TvhfbYMdzqvyrgZu0OB/3xVyH9mr4dwjCz6ic9Sblf/iK+Y9F1q+df315cNIv8X2g/wCNdDY6zdzXCma6kVdm5VaQ1jPxC4BTs+Hof+DF/wDKx/23lslf6svv/wCAfQ0HwE8EWyhYbjUBtGF/0heP/Hamg+Cfg+3IaOe9yO5mX/4mvC4b+/jjEiXbyt95f3x+WtTS9YmbMM1wyurbnXJ3Vzz8Q/D9b8OU/wDwYv8A5WdEc2y/ph19/wDwD23T/hh4e01w8FzdkgY+eVTx/wB81raboNnpWowanZySLLbyiSPLDG4fhXjlnq108CJFM7H/AGWNeqfA7xbpug+PLK/1WTbZxyrvMi53L/FWdTxE8P1C3+rdN/8AcRf/ACsl51l8XdYdff8A8A+lfD3/AAUV+Pfhf4fXPw50bTfDkdpd2pt5bo6bIbjYVKnDebgHB9Kwvgp+2h8VfgBoWp6H8OtF8PQtq+BqF/c6c8lzIoOSm/zBhT3wBmvvD4kftGfs1fFP9kbSU+G0Vkuv6IlvLp6vZLHI0kf3scc18taf+zB8bvht+2d4V/aV+NN3aDTvF9x5ljamQPujWP8Au9F/2a+fxniT4Y0EpLhalLT/AJ+JW/8AKR6WV5zg8x5qc6fI77b3fTseReNP2qviZ48uWudatdKUs+4Lb2jqFPtlzWIPjZ4x3ITDZEIcqhhbb7DG7pX0n8fLPSjq19NBaRrKZd8iLEFIj3bvSq/wk0q08QrNNFpKKscW7aYw3/fVeLPxZ8K1HXhKl/4NX/yo+mp0/aHg9v8AtI+MIbt7648NaDcytAYla4sXYID/ABKPMADe9ef+LjaeNdVOr61pluZD0WNCFAznAySf1r7T8QeKvAvgnVEsLzTLfUXKM0qraqyw1xl74i0DVll26RaM3zPueMbYa9DB+KfhhUhePCtKP/cVf/KjzcbKlRnyyhzHyK3gHwizMx0SLLnJPOemOtZ158IPCV4csbpOuPLmAx9OK7f9pP4xSeHrddG0bTHeOXc3mlwBXxr8Q/iDrN1qRtri8m2yMxz5pK7q9GPif4cyjpwvT/8ABi/+VHnzxOAo6ypJf16H0Bc/s8eCLh941HVY/aK6UD/0Cq4/Zn8AgY/tTWMYxzdp/wDEV8j6n4x1J2kha9nP+15p/wAa5a88ZaxPOiR6hdeWzbX3TN/311ran4i+HFf/AJpen/4MX/yo4qmdZfTdlSX3/wDAPubRf2cfAeh3K3Vvf6rIy9pbpcH8kBrudCsbfw7fLf6cmHUABX5HHevFfhx/wUB8P/sw/CbRfDPg+K0vboWbf2neXdos5mZv9+ty9/4Kk6N4n8FC70qO1h1Bfv8A2e3Cttrkn4k+HUXpwnTf/cRf/Kj0qeLy1K9op+v/AAD6D1L42+OdV0X+wLme3FtkYRIyOn/AsfpX05/wS8/aiubT4lR/APxDooZPENrJBpF7ZqQ0U6RvIfO3PypVWAKjIbHGCSPx/P7WfirxL4qPiPxD4gvrmeWXazXU+EZf4V219u/so/FJ/AHxL8HfFVSD9mMVycNgESQkHn6Oa+jyvFeH/HvDmcOlkUMLUwmHnVjOM+aXMoykrWhG1nHXe60sbYephcdSnToqz/qx+hPin9rrwf4B+IF78IvEOpLb3bRbYmml+6u7b92vP/E3jLSfiRpMvhW8mWaxVvkkZfmkb+Fq8E/4Kha54Y1r4Xr+1F4Pv47TWNNv4/tUa/6yaFvvKteN/s2/tOX/AIkuIEu9TuJ5mdVlWR/lb/dr+Rlzyj7WD92R61HD4eUOWovePqr4E/s9+LU8bPNo+pXFxEt/t+zzfd2/wr838NfUd5rGq+GvDcOlXmiRw3bJuna3Tay/w/LVD9kb+yNb0K31Sa2VZWbczRv+93f3mruPjSumqxSKzb5YmX5W+ZqdaMY0HJfEeVWlKOMVM8K8X302pXn/ABMkaTb/ABNXhX7a3iC80f8AZ1ltoYvJs9Y1GGCBZIt3mSK38O7+7X0N4k0220vRvt80bDd96T7rL/s18b/t2eMLzxJ4g0TwBczSS2Ol27XirDcbo1kkX5fl/vUcNYf6xm8ZS+yXneI9nl7ivtHz5a6RebW/fRhlTd+8StbTYXbZDbP+98r723/2anabazW80XmzfIz7WZU+bbV+G33XSuNvnNL/AH/4a/WZRhL3T4vDx93UntbeFbd32eUy/N++/irSW3ddqP8AeZfn8tvlqGGzcRs83lojP8ke6tOxsy37lHX5tq1PsY/EejGXL7plalpcLbpZtqN/EzfN/wACrI1HTXZfOSaTf/Ev8K//AGNdj/Zb+S0I3RFflaNvmqCbQ5mt1mFm3zJ/EnytWlOmpS0OeVT7J51daKjQO/kwzL8r/M+5mrB17QftDNc71j/56/3K9LvtFhj+eEbxIu1FVVrm7/QfMhZUtsKvzN5la06f2jzcRV908m1vQZomaaHb++2/Nv3ba5bWLV4VdEds/d3N92vWde0N47OW58narPu27drL/wDY1xuqaK8Lf8ey7Wbc8a/3a6adOMjyKlM861bSxHCR8zNGv3mf+Jq9S/ZCs/sdrr8ezH7237YzxJXIavo6bXR337m+SNq9B/Zjtmt4tdLoAWnh4H0ev03wjv8A6+4W/ap/6bmdnDUYxz+lb+9/6Sz5v/aAsUk+K3iQgr82sT7lK/7ZryTxBpPkyH522V7v8b9O874k+IZCmVbVLgHcvfca8p8TaS7Yfzm2/wAKtXxnEMf+FvFf9fJ/+lM+czDm+u1Y/wB6X5s801Sz8ubfsU7qghtWkVnxjbW9q2kvGzb3Vd3zKtUo9Jdm2P8AL8n3q8bl5feOMpw2TyZ+fH+1VuK1eGVJH3fd2otatjo825fITen+1WvY+H0umWaZGT/Z2/dqOX3dC5HP2ukzSSbI9zbf4a14NBmk2pD93+NWrpNN8NqyB4flRU+WtPS/DbztFcp87L975avl5/dI+E52z0Py9qeSw/2q2bfR9pXbw395q3F8PiOHe+1/+Bfdq9H4bm83Yib41/5bK/y7qylzco4mJY2tzCqf6Nv2/NtatjTbFJF3um5m+Zdv8NaFnod55vlodqt9/wAz5v8AgNbmm+F3VVZ7b97/ABL/AA1hKMzppmRb6G9xILxE+RZf4aW60d5GfEOP9n+GuysfD77Um8vCw/M1X/8AhG7U/f8Allk+ZGX+7XLU8ztpy5ZaHmFxpEMg8l7ba33vmqhLo26Rnd1G7/VMv8Nejal4XLSP8mX2feb7tYmpeH0t1SFId7b/APdrKPx+6dMZRl7p7akMLSB5tu5fuR7Pl/4FUFxbv9q2743iZfnZfvVsyWr21r9pSLa6syr538S1W8794k0MOz5NzfJurzJUT7SMTGuIUt5DDN8rSfMu1fvL/tNWdf2Kecr20ioNm7dJ/DXTahbJcLvSzZhIm5v96srULW2aNYfmZ1/h2/w0nS5dUKpLlic7q0c0cbpZzL/e3L826ufvo0C7PO5b70bV0lxZ7l3ui7du1N3y/LXM+II0s2MyfIrN95VqfYyUrHFUqR+0ZepTfdm379332/u1Ta4/5Y+cp3fxU/Up90jSQp+62/Nuqrat5KfvnjxsVUjVfu/7VbxjynnSlzS8jYs2maNfnyq/3V/hrZ09khh3I7Jt2sjN96uZjvNzeS6cK/8AC3zVu6Sv2p/JRN38XmL/ABVjWjyxKoyjI6zTbxGhS6mm27m2NJWlDqFt/qfmdY93y/drnLOR4YUSE4TYzbf7zVaW8ttxvPJbzW++y1zcvvanVzSNj+0N0LIlsyuqfd/vf7VUby4cR75tzr9146zrjUpmZXeZY23bWjZ/++ai86ZSZpp1x8y7d33WojGZUqkuh7D4eYN8F5GQYzpd1gf9/K+c2uEdlTfsl3fd+8rV9DeF23fAt2kO7Ok3e73/ANZXzVqV1+72I67Nu5l/2q/fvFayyLh6/wD0CQ/9JpnTn7k6OH/w/wCQ/ULhGVjN5jOr/wALVnX+sRhWSxdQn97+KorjUIUVnRFV2+/uesa81ZAqv/e+7X5DCjzHy0sRLk5TZt9SkuNrufmX+791qbc30PmL/pkjt95l/hjrnYdYeOZk879yz7U/u1dutQhmhDw7gm37tdlOPKYSr8pozXUdxGN+3O37yv8Aeao47qaJWS2fZtbayt/FWTDqW24fjejf3m+7T11Kb5od+U/vMtb8suU4albmlc2luEaTZ0H8G5qsLfTKPJR9jf3q52S+e14++f4Ny063v/NV/wC8z/8AAqcpfZFKp7v946iPUsMUd1fd9+rNvqTzM/zrsh+6q1zUNwZG8t3Yj+OT7vzVo2d88jb/ADFwybdy1zVJTN4/zHV2OpbpVCXP3k+Tb8taul6pt2qjybl+bcqfLXJrqCWsMUe9f/Zv9mtXS7yZV3vMrbfm+Vfu/wB6uKodtH4jt7fVJmhT9yoT722N9rVpLrW+3857zczOqvIyf7NcVZ6pDcKyedtVf+ejVYbVHMaOkzMzfNtj+WOvHrLm1Pao1OWB2C6ptVLmNP3jK3zM1Mk1TLM73O/7vmqtc8uvPDZ/67BkTZ/lqkXVpolb/pomzatRGnIupUgdCtw8krI7ttm+5SrdWcarvm+dV+dY03Vi2987xqjvIpV9v+1t/vVJb3CXEh+fzd25Xm27dyrUezI9pD4S3NdJdq800rNE391dtYV800sazW0jJ95XVv71bWyGaFX37hGn+r3feWqN3bvHC7vtZG2/u4/vVcY8vuhJcxzupzX7R/Y3O9933f8AZrm9Qt/mmSaZtq7WRW/irsL6F4W87yZMMn3v9quf1zT0kk8t4d3+0q11UakY+6jklR5tZHL6havDJshdn/2v4dtYmpfKzTbPmX5UbZ8tdRq1r5e77u1fvVgatMgGx+iv8+3+KuuNTmmOOH5o8py19bzbmf7rfwKy1n6hbO8bBPmffu+aty+UyD/U/K3/AI7WXf2TyK3zso+9XbGXumNTD8pjzK8ch9KW1t3jbfNHt3fNVjyUbO/5v4qljtQzbHfedvy1pzcpz+xn8RY09X+byUYN/eb7tdDpdqlrcRbE+b7rMv3aztNtUh2edCwbZu/3q6PS4Ukj+eFW3fcZkrz61aB0U6Muc1dPhRdjpu3fd/2d1dDZxzRsm+RQ+z7ypWPYrCIvJy3zfc2/w1sWcfyMrzM/mfNu/u15sn73M4ndGPL8JrWavHH8k33v4mX+KtXT1DK37rfFGnzMv96s+xaa48uFNp2pt+X+KtSxV2/0ZwuPuurVjKXu8x1R980IUfa8Lw7yzbauTrM0cUybc/d27vu0Wq+XiaaH5fKbbHGn3lp+n2u0lEhb7/3mrl974oHSuWOjkWbPzrVUdEXds+fd81WrNXmZJrZNjbf3sci7lqOzhmhQu8KuJN33m/8AQa19Lsd0iujtsXa77f4V/u1pCXvSOyPN7L3S/pdq8W3y4Y9n3ZW+7/3zW5p+moq/vk+78vy/3araLps1uqrNc74t7NLuXduWugtbNJpFeJFcQ/LXp0o+5eJl8MinHYvJMuxP9Z8vzVp6PpH7v5+rfcXft27atLp6RhU3q5X7kcK7latezsvL2vMjb/7u3+L/ANlrrhKNMcpSlIrf2Z5liNm1U2NvVv4akt9JLfuYduzbu3N/erYt9N87fNMOI2+Vl/ib+7tq3JH9ojX7TtRdi/KyV3R5Kkipe7H3jnV0Pdcb/lZvl27vvVV1DR9sgR0ZQrfvWb5lau1hs7b7Q0yWfnPG219r/wAVMutJdpi8f3lX5GZ/l/4FWsfdkY1pLk948x1Lw7H57hE4X+Ff9r/ZrDvtO8jbbTOsI2/uvlr03UNB8qOW8dP9ptqfe3fxbq5bxBodnJL8/Rv9VtrlxFSPwnnxlCUzzvUNNk8yOKGNVbc3y/e3f7VTeHfh7r3izWo9B0S2a4mvH8q1ht4mkaaT+7tWuo0LwFqXjTWLfw9pWiXE13cOsEEdvFueRmav2S/4Jgf8E0fDH7Nnhiz+J3xLsYb7xdcRLJBG8S7NNVl+6v8A00/2q4oyVSrGETnx2NpYWlJs+f8A9gD/AIINaPdaNafEf9ryBnE0Ucll4ahfDNH95fOb+H/dr9B7X4f+Cvg54XTwV8NPB+n6Ho8UW2Kz0m1WJdv+038VeoMqbCBXnvxm12DRrAtM+0fxf7Vc3EK+rYHT5nzeBrVMXjo855n4x1+3tsrEGl2/8865STxBCzKdi7G+b723a1ZXiv4laJBIYUv4dzK37uR9tcRJ4uttVulvBqXlIr7dqv8AK1fkmI9mpn7Fl2DoRpe8z0m78SQiEfZrjLbf9X/do0/xZpzolvczNE7My/N92vL9Q8bY02Qw3MLlpV23CvuVVqjp/izVbP8A0bUvLzHL961b5WX+GuX2nLC6PVjl9CUfiPZriaCKY3KHzpP+WSxvS/20n2WS2uYVlST5fJkTcrf8BavGbHx5r3h23vtS1XWPt8Mcu6KG3i2yQru+7/tVtW/xOmvI5X8hlVov9HkZvvf7taU6s1Exll8Ho3cpfG79h79i79oBoj8YPgjpbX7RNEuraOn2WdVb+80f3mr4u+N//BB3SNHvH1X9mT43yXULK3kaD4si2tu/hVZl/wDQmr6/1r4qPafZg6NdPJ8sse/ay/8AxVTQfEabT1uEnuY5Ujt2ZPJl3MrV6WGz/HUbwvzGdHDTwsuejOSl+B+O/wAbP2Vfj9+zfNFD8Y/hvfaYm/Yl9Cvm2kkitt/1i/LXDXFuG2wwzKV/jbf91a/dSPxponjrSx4M8c6VY6lpVwn+n6feRK8Uy/8AAq+Mv22f+CS3hHVLCb4xfsS6kbcKjPf+A9Vus+d/e+yP/wC02r6TA5thcauSXuz7Ho0c/qxfs8XHT+ZfqfnrdfuZAg8tlV22M33v/wBmse+WG4bdvVdr/PWz4m0/VfDuvXPhvxVpVxpWoWcuy60++t/Llt2/2lrHutn2p0f5D/E38Neh9qxljcRSr+9CXumX8m50toWHz7mbbu20v2G5WbZM6qyp/f8Alar9rH5URTbhl+6396plt0mm3o7bf9patS5ZRsfHYyn7TmRX/s879iW+3zNvzU5bPdCzJ8qr8qLHWhDZzbQ7wqgXds+bczU63ieNj5MPyyfP833t1Epe0PI9nGNjEvrHy43mcNu2bVWsXULV418wR7WX+Jq6y83rMd9tyzf71YOoW94uoSvsjxvXb83/AI9XRHl5tCfZ+6c7cH7ZiZEX+9uZPmZqqLHuk+f76/M6/wB2tK8017iR7nzvm3/xf+y1TbT5oZGm2K5k+5/drpjUjzcsjj9nIls43vLhXd2/dsu1Vau40m3RdtztU+Ym35q47SLV47hHkh2uv3o67jwrb7m+0yfNtT/Vt91q1jL3NCffOi0m1RWG92RV+Vmat/SVeQN9sdfJVtiLN8ysv+zUOhafDN/pM0zLu2t5ez71dDp9i8sf75I/OZ/kj2fw/wCzWkTjqxl8UR9qrRshfzHTYu+Rf7392us0Nd3h5FBPKP8Ae69TWHp9j9nkV5oWVlba3+0tdHZIY9OVGIYhSCVOQTzmv3TwPi/7azCX/ULU/wDSoHu8Nz/f1V/cf5ozUhmW6L7/AJpvvtJ/6DVuNoZo1mgRVf5vu/8AxNPhhWGTzpkm+58u1NzbqfpNjtdrxU+f+JW/hr8ajT92585HFDLiN7m3Xypmf7y7Vb+GsfXLebyXd3Zm+6jKn3a6C+V7aFXl8tP+eUcaVgeJp3s7d32KAvzbf4q3jROyOYe9Y4DxUqNayOlyzLv2/wB3c1eYeKrpJLr50ZVVtrxr/DXpfi5PMhlSFI96/NtX7q15h4q3sxn+6rfM/wAnzM1dVGMInTDMOWNkYd5JuXekzRvu+b+KoGum3K/3V/iaq1xdXKyOm3dt++1V5fOZW+fC/e2q1dUfdicWJzKUpDru42yM6PGRv+9WPqU3mSedvZTs/v8Ay1PfXDyDy0fDN9xttZl8z/Km/d/e2/xVpKJ5dbGc3ulZZvLk+cKy/wAVT6XfbZmk+b/4ms+6ZJGCb1H+ytLZ3CfKm/b8/wB3+9WUo8xx/Wpc2h0+mz7ZCiOx3fwt/DXQafqSRxgvz/erjrW6SOPY/wB37v8Au1uaTcfvPJeeuWUeVs6KOI/mOr0+4hgk3w/8Cratb7z5vOeZmZfvfL8rVyNnefMmblSrfL/s1uWOpOZE+dW3LXFUj9o9CjWlsdj4fvPtEfnO6p833V+9XQWerfZ5A6PIybNqN/drhtPupvkwY1Hn/PIr7WX/AIDW9Y6ojM0cm3zf4JJP/Qax5Ym/tOaNj6W/ZF8Vf2l8QtK0HW/EO+1k1SNGhV9sX3q/Xb9r74e+H9Z8J+HvEtkzEeFrWJrWRW+WNWjxX4a/AnUrmHxxbTWcMnmNcQtF/F8277y/3a/Yz4i+NPFWtfstaD4q8T6fOsGsWq2cDN8v7yNfl/8AQa8PN8LX9leEeaJ6GT16EsbBTlyyufLXxx8VQ/8ACSBHeSWW62/dbdtrqfgvZ2Gn2MmL9T5lvuZVavF/id4sfT5Gmd2edpVWWaR/mXa33a7X4V+Mkh8M/b5rZYUZGRmZtv8AwKvz+o58vKfqdFx9pZnOftAePLDwrqypYXmfOn2Iqxf3v4q4m1+MGlaTp8t5fpsiji2v/emrjfj58WJv7cub+51KF910yxLs2syr/FXzr42+MlyrN9muZPMV2bdv2qu6vWwdGXLFI8zMqkOaTNb9pD4wQ61eSXXnSS7Xbyrf7vkrXyz4m8QzXl083nf8tWZVb+Guk+I3jq/1dX+0zSOzP88m771eYa9rgW48lNufup/tV9Hh6fNLlPgMxxyjLkLPmXOrXC2dr5hdn27q9a+GvwF03UrEf29tt3b5vMk+ZVrkvAdvo+kWcOq6leR+dJ/D/dr0HS/FifYXSG88pN23cv3q9KVSNP3IbnnUY+0lz1TE+JX7H9zqelteeGNYhl2/fhWvErj4K+PPDd43+gSFVba7R7mr6x8N+MLaxhR01Nm3L86t93dWlofiDR7fWIRNp9vMjfM7Mn8K/M1XTx8oR5ZRMsRgYzqc0JHjfwD/AGcfiR8VtcTw94e8MX15eQv88KxfMv8AtfNX2bf/AAw+I9/4Lj+Evg/daeJYraGwi+XJimi2rIMD0CPXef8ABLv49+Fda/a41WbVbW1SG4s1gt2ZFVV2r/dr074NXmk/8N8C/wBQt0ktD4x1N2jLfKV/0gjn8q/Y/CXFUa2U8SrltbCTv6ctQ+k4bjVpUa8m/s6fieB/Hr9jX9oHRf2XX0a78YXGqPI0dxq/mbmdo1+7GtfL3wV8Val4B8UR2E/mW81vOq/MzfKv+7X7+fH7SfDHijwf/ZWj6bCkFwu+VY13bv7u6vxT/wCCj/w3h/Z9/aKtNbs7Nrey1iVtyqvyLIq/3q/nn2dCdL2dM6KebV41Y1JS8j9LP+Cf/wAZH1LS7W1Fzsm2f6xpf9Ztr6f8Va/ZeI/9PuUVHZtyyN91a/LT/gnv8YodWkhtt8aOrbEkWX5q/QbRdal+xw2d75yeXErfvPutXz8uajTlCZ9Th5fWK/tS18WJIbXQYbaZ98HzNLtTd/3ytfml4w8RW3jLx9rGtveb/Mv5EiZm+VY1+VVr74+O3i59P8A6hqt5NthsbCaVVX5Wb5f4a/OHw/fvJZwwzbklm3Ovyf3m3fNX1PCGFipTqng8RYiPtYUjbs2hkhV0RXZk2vIv3Vq3Y2r28wMKLI397f8ALtqG3unbZNv3tu3eWv8A6FWto9vux5033l3fc/8AHa+85InjUakeTWRoWOn/AGrEyOy7XVvu/K1aEGnpIzzpMp3bm3fd/wC+aZp9q8iv+53N8rJGzfL/ALtbFrZzKzFE/wBd8m1U+7/u1py+4aSrc2sfskFnpb/fuSqI3yqv+7/FVmTT3/2m2pt2r/7LWhptil1tmufluG/iZ921au29vJJbNM6ZH8G5fvVrTjDYx9p7vMzjtQ8PwyMUhdYi23ytyfN/u1z2peHZrhZofs27b/tV6Hc6a9xJ8ltuib7+75tq1Tbw3NHMvkosKr8ybauNP4TmqS5o+h49rHh3c7w52uybdrfwtXJa54Z+8iJ5bq/9zbur3LU/CrpcSvsb92+5ZF/irlNU8JpcSPC9hI7796SN/DW1OPvHj1qh4xrHhHMZhRFxJ/EtdD8EtKGlDVI9pBZ4c+nAfpXSah4ddt1sk0e6N23KsTU7wxpLaWZy2QZQjFG6r1r9M8J1/wAZ5hX5VP8A03M6+GakZ8QUv+3v/SWfP3xe8NJdeMdYmhdhNLqMhTC/7RryfxN4T2tvuZmO776qvy19K+P/AA3Nc69eTblbzZ34H8K5/irzPxR4UtmWV3tsiRq+Nz+N84xX/Xyf/pTPnMfL/bqv+KX5s+ePEGjuZm5xt/h2VQt9Jm3K6IrN/tf3a9Z8TeD0VWuURh/s1zDeE7lZPkTf/stXhSic3MjK03R52Vk+X5v4q6bS9BRdruiqdu35av6LoM7L89tt/urXT6P4bdpF3plF+9HWXvS90rmM3R/CP7tXg2oWet2LwPbQyb02uqxfPtX5lrp9F8N20atvhkd/7u77tbdrocLTBHSSIt8zxx/equXlkZHBx+Ddyslmm5Nu7cy/NU9n4Zm270g+X7u1V/ir0q38N/a0byT8kcq7vk2s1aVv4Pe4X5EYf7Ozaq0csJEylM860/wS/wAvyNvXb/q23bq6LTfDKK+zydzr8qrJXoOm/D37ORMlg3zffmb5t3+1trX0rwKjXUk0ztt+780X3m/hrOVM2jUlE4HT/CLlhM9mw+XbtX7tXJPA9z9o2THYrfLub7q16NF4LS6VEHyGN/uq1adp4HsxGLZ7NijJ95q5qlOR1RqRPINS8EOqv9mhWVfK3Ksf8TLXNat8Pf3J89PMk+83+zX0JdeBbYLLM8MYeOL5WZPmWsLWvA8MjP8AOq+Yu35Yvut/tVEqfMbRqfynKQq6xqiSbW3tvaR/lWoJN9psRLbeJmb95G3y/wDAqJrh7ebHzGJlZU3fdX+7WfcahcybtiNlX+f5/u153LKJ+gx7k0cfnTK77c7mZmV6rapZw+T/AMfKxBv73/stTx3bvvezEKPs+X/d/wB6o7i6hffLc2yod6ruVt22q9nyrQupU93lkc9qVm7QvvSMoybfm/h/2q4/xEqNHMj7f7vmR/3a7e+leS1dLPbvbds8z7rLXHeKIY5tyfZsKr/P/D/vbaz9ieTWjy+8cPqTeXJs3sRSxzWzbfX7vzfxUupLFHmZJmRpPmT/AHapfbELIkPyv/z02/drKUeaBy832jbsZE2kof8AvqtjQ2kjkPOxPvLurnNNmkjVIXnXH95vvVq2d0kjCaF2YbG+Vqz9n7vvExknPmOh+3Ha77ONn/AqsrI8kLJ9xfK+833V3Viw6hDNC9tv3fut23+7/wACqWK6hhYW2/5VTb8zbqj2c+W3KXCpyyuWLiR7eP8A0lN+3+L/ANmrO1G+mjs2SF9xVdybvvN/tVYvrrcvk7ONyqjM9ZerSOtw77NqMm3ctVGMuawqlT+U978FzrP+zs06twdFvMH/AL+18q3lzDHEPsyMu35d26vqbwQY/wDhm19jZUaJejP080V8malMk2dkPz/e2/3Vr968VoXyTIP+wWH/AKTA9DP5JUMLf+Rfkindas/2hYXdWP8AeWs3UL7ywV37tvy1DfXXkyO/mY/u1kXmqpuw82HZv4q/I6R8TWrcpZutQSPbsdgn3tu+rEOveXbhEfd/tL91q56bUk3F32qFam298nzP527+8q1tHzOb2k/iOqh1JJJFeF1fd/47U39pbleFvutxu/2a5nS5N0i7H2lf7z1rR3CfKh3H5K1+GRHNzGqbzzmV33bl+/8APSw3E0395d3ytVKFk8zf/A38X8VXLRZ9rJv+9WUpFU/i0NKzkf8AjPzfw7auWt15cfycf7P92s2CZ4ZBDN93+9/DV21bd9x2K/drGUf5jvpx6m5DcLMyb9qKv8S/3qvWd9tmHz/KqtuXd8rVh2+FVZJ5lRGb5a0FvCxKb1Td91f4q5pRgd1Pmj7xt2t062aOj5kbds+T/wAdardnqMaQ7E27v4925lrm/t00e75Nu75vmq3Y6h8qI7L8rbkjrz60Tuo1I8x0C3DzbbK53Oi/d2p/47V5bj/SEmtvMRvvK23dWDHcTLH88zMv8XzVow3Tqqu4bH8ar91azlGXLoaVOU3JLqZSgQMoaX96zL96rdrIk0myFF/h/eR/8tKyLOZJr1EmSR0b5krUtYZo1M0PyIv/AH1urCUY8vxD+KZdWPz4diOqbX+8v96i8h2t9mR28qR12bfm+anGbdboju33P3rSfKtPVrxlGxGQqnyfJuXb/d3VnU54+6dMeWWxkXizLu8mXzPJRlRv4axNQt0khd0uWyqszLXS6hY+Ssps/LEf92P5vmrDurVwoguU/wBr5U20ox+0Wcvqdu6wujwqtxu3bpPu7dtcteWrxsXm5rufEVq6tshTc+35P7tctdWrzSO8MKvu+9t+Va66dT3blyjy7HKaxazblWN2Ufe2/wB2s26h3Mfn3N/erevLH7VMyP5ny/w1VuNPf+//ALq12U6nLoc/LzGB9lQbkT7rfxN96rljY4aMuvP92rbW6blQRqzL83zLV2xs/wDlm77lZN27+7SqVv5S6dGMfiLGm2Matv8AOVpf7ta9nCnmB4UZd397+Gq9nYwx+U/k5f7qsvzVqx2T2u5H6fe/4FXnSlE2lTgWre33SK+zDr8vy/xVqafC8c3nQpu3Ntb5Plqrbrtk3q+VkT+L5q1LGN1b53w/y7Y1p83LDQ5eX3y9p9rdbvkDff8Al+f7tb9nGgtxv3Ef6tNy/NurJ0+H7RtTfIEk++392tiyZI42tkmb5UVfMX5vm/3q4pXlL3Tqp+7ys0dNhmWREm2/IrL83y1pbf3iwpyv8W3+H/ZqnYx/apA86MzzL88jfdb/AHa2bRd8Z+dRti27WTbub+7WfNyxOmO4/wAmGSRZNm5mVmRm/hWtLS4UWWJ4dxXb8jM3y/8AAqo2zIyqkMMiyfxbvu1saLb3N5JFeQphV+VFV/lWrpxh8R0e0l9k6DTWhktv321om+55db+l6b+7R027JH3P/wDE1k6PapNEiTJ8zPu3M/y/8BrptJs/OZETb9752/vV6WHlze6c8qnNEsafpKQwuIYWBZtyNt+7WlbxusgdE3fLu+5T9PhuYY1h8xn2pteppLNI03zbirfLF5f8VdNPnLp1CXT5vm2eWwb+CSNvlqSPeszbHbZs2vti3f8AfVQW9ncxsz723r/Fs+X/AHa09LtLmGEvv80Kvz/Jt211xly+8VKX2h0Nl5y+dv8AvfcVk27f71T29i/l/Oih/vfN/FV61017hdlzNGwVF/1fys1W5rbFvsL+Vu/4FureM5ROSp70TktWhhlkV7kLtjdt8bfN/u1yOqad9qk+xzIzpvVn+WvQdUt4ZLaXf+7Vk/hTdXY/su/CVPFXixvGGt2dvNp8O37Gtw/+sZfvf7y152ZYilh6UqkzjjH2MD6N/wCCX/7IGm+CJE+NPxJtrV9YuNy6Tbybf9Fh2/eZf7zV+ieheI9KsLSO2u7lYxt+8z/LXxLp3xqs/DP+gabND5zQbPLb7qt/D/47XP8Ai79rTUtr6gqSW4X9xFIt1uVmVf4V/hr4mjm2JjjPbQPMx1OOJgoyP0Sutc023sft7XKiLs+75a+Yv2sviheWWl6q+k38OIWy+5vurXzov7fHiSPQbTRL7WFjSafa0kn3dq/8tF/2t1eZ/tIfHh/EngiXxDo9y13Nbz+VqNxNcfNMrf7P8K17GMxk83oq552GpxwdXnOM8ffHC8utUkRNV87zF27l+ZW+b7u7+Fqx7X44X9nb/wDH5Mkkn3FX5lavEvHXxB022b+yra/aSZn37o/u/wDAazNN+IU1hZ/Zv7S8pVfa/wDFXx9XLakp8qPtctziVOPxH1N4d+Oty9vs1W5VE2fuo/u7v9qpbz4i3PiSN7Cw1j7P8isjN95a+XNP+KUN9Iba5hZHjT5ZmlVfmWut0v4lQ+IIYrl9eVJWRvN2t821fu15OIwU6J9TRzql7I+jtB+IFta2z6bf6/5vz/vfl/8AHant/iRZ2ObOzv5PJ83dAzPt2/7NfPA8aX9vavDYXKjzt37xvm+b/ZrO1b4ieJNF0+K5h1iSRd/meTMm35vuttasPqs3H3RfXnUlpI+jdS+KVtqEySJNveFvk/u/8CoX4kaba3CTWfy+c6q21/vbv4mr500/4mQ6q6PDeMjqm64Vfl3VteH9evLr7keXhRl85flX73ys1ZRw/vHVHE+0peZ9IaT460pZP9JuWhmWVWabdu/4DXf+GfiBp6/Z3R2YyM32eRXX7v8Aer5f0/xU8McRmdcM22VYV/i/vVreD/HF5HYyLbP5Kea3kbf+edTOi1PmpnHWrQ5PhPRv2xv2J/gb+3F4fhtprlfDnxCVW/snxdGi+VI38MNz/eVv738Nfk38afg38Uf2efiJefCX42eFZNI1rT7hkRd3yXi/wzQt/wAtI2/vV+qdr8UvtVpbw/bJPI2/e2bW/wB2q37Snwx+HP7ZHwVk+G3xIto/7b02Bn8F+JpF/wBJ0+Zf+WbSfeaNv7rV9nkub1eT6vivlI8lVquGnzUvh/lPyUjhfaqbMn+D56u2qoqNC77lX7yt/D/s1qeNPh74n+GvjC/8DeMLNbe/sbpom/hWRf4ZF/3qoWqhV+f5gv3/AOKvYqPl0HKpGt7yLCtNJCqedt+dWX/apbXfNI01zDsVZdibqW3/AHKu8NywMn+xu21b8iGO3EKJvDfNub+GnGVonHKJm3kkMMZaFG/3W+9WPfW+6KV4fk2/wsv3q32hdWHmCPym+batULrT08kIhbDS/Oq/w1pzcsomfvnMzWIhX+JWb5vLVPlaqf2N5I/O8na3zfLu+7XSXFmJI/kTDL/D/eqrdaW8cf3G2sn+srb2nv3MPZx5TK0uzuZFH2aRleT+L+Ja7LwvYzW8yQ7923/nolY2n6Y+4bJtn8MUiptb/erpdHsZI2VJnbdJt+b/AHa7sPyyPNrylG521jCnyl5vNVUX/V/NXT2rfZ7z/XL8qfIyp/DWT4fhhmhRLaaHaz7kjjT+L+Kt61t/9HVJX+Vm+TcnzV6NOnCR5dat2LVrawybrq5m3Iqbljb71XraJRp4iRNo2EADjFV/nWVU8vDMm1mZPl/4D/tVctolhgEcW7Azt39evev3DwSio5vj7f8AQLU/9Kge1wxO+Mrf9e5fmiG38mb/AFL7Bt2qvzNt/wB2rLbFtWS2i+Tdtbc/zf7O6oWjeSRVSFtq/LuVquNYpHMv77en/PNvlr8g5mfHRqGdqESXAXZcyD91t2/wrWTfRzXUZ3uxTyt27+FttdDdQzX37mEsWj+/5ifw1j3yzeWqI8eyNNr7qs0jWl0PPfFy+XGUuX2M0X3q8r8UWvmW3nWbq0avuRmdvvV6x4wkSaFJkhX5mb7vzLu/vV5n4kjfa6Xnkl/vfu/lVWqoxlylxxB57fLLJJ8j7lb/AMdaq91JtjV3hw6/eZa1tQaG2ZvkXdu+fbWYypMzok+5W/i/u11fD7oqlafIY01w8kbd/mXYzJ93bWdfRvH++SH5meugutH8yRUcNhk27ao3WjuqMiRt8rbd33lWq5uX3Tikc7Mz+Z5OzP8AEtLC25jsTeG/h/2q0brR3WYL8zfLtqKPTdrNDs+9/EtTUj2IiuUfp8m2ZXmfG3+8v3q2rG6SGRJv4Nn3l/h/3qy/LRfkdGZl/hq3b/KrJHu/6Zba56kTppyN6zkfbvfaV27kjrStbrzI/n3ASLtrB02V7XbHNMy/wvurVt9nll3dv4di15eI5vhPUoy9z3TZsdWuYZEcJ91PvL/DXQaXN9ujWaYMwV1bb/7NXKwxzSLvR/mj+5tevUPgh8P7rxVqBd4WI37POZdrVz4en7SdmaVK0cNSlKR0vw71T/hD9c03xPeOyPb3Su7btystfuP+y94p8Pftq/sFX/ws0q/3a94biWXTvMX978q+ZDIv+98y1+O/xi+G+meFPCqabC6veNF80avu2rtrf/4Jtf8ABSXxV+xj8aNO1LXLy4m0yGX7Lf2twzN9qsWb95/wJfvL/u17/wBXpKjyo+UhjKs8Z7W56D+0do+sWsl5Z3NnJDfWtxtlj3/NHJ/Fup3wj/tWbwvc20yed+63xNv+Zdq/NXvP/BULQ/h/rnxc0n4/fCrVrS78LfELRo72C5g/1Zm2/Mvy/wAVeLfD3R0ghm037fsga3byF+6u3b91v71fk2d4L6pinBfCfuuS5j9fwEKq+L7R8V/tKfEy2s/Gl/C7srWtw0XlyJ825fvV89+IPHX26aV0ud7M7Nu316X+3pZ3/hn4jX9qjttml3Juf7zV82x6k/nb3dldf4f71e5luDpyoRkfOZ3mVWFeVI321Ca6mZ33YX/x6uY8U3U8OpLMi/w/I1aWm34mZUm+f/Zql4xXf5fOF+7ur0KEPZ1/ePjsRUc46SK9rr2p3GyFPm2/M9dt4Y1p5nSG5v8Ayv8AelrlfDekw+XvT7/8H+1XfeHdL8K68YrfWLNYivy+cvystd0vZSi00PC06svikd74T1DwlJCs154thVY0X5Wf5m/2a+hfgr8Ifhp8SvBtzrdt4wt5b2OJlt4Y/mbd/tV8leIv2edN8QN9p8E6rIVVdzR/aK1fhL+zr+1FHqyp4GuJi0m7b5dxt3bfm+7Xm1sLKXvUqp9HhYx5eSVJ/wCI+n/2bfgTrej/ABqS80fUoRcWsv8ArGn27v8AZVa988Bz6tp3xzjmjvSl5HqlzunzuO/EgLe/evizwX8J/wBtjUtQS80SHUIrlpWiaaOXazSbvurX1p8KZPiJ4d8WaSNPsBf+JrYiOSCVgPNuAhWTJ+u41+w+EWHqU8k4mlJ3vg5/+kVD6TK8PRhhq0YJpuL39GfYVn+0vNa6f/ZWvPI80Nvt27ttfAf/AAXU8eeG/GXw08Ia14e1JRPZ62Fe3+XzG3feavYf2vta8eeF/hTeeLbm5sbPUoYvPnWO48xo2/u7q/Kjx78RPHfxm16O48Z6xJfeXLuih3MyLX865Rhq8sYq0pe5E+TxUvY/up/Ez6G/4J8/Fi58P+Orezmv1hSSXdLI3zeZ/s/71frl8NfF02oaHbagnnPaTW+7bJ8zV+Mn7LPhXVdP8aWEypnbcK21l+7X61fAW+k/4Re1d3kaOGBV8lv4mrgzWUfrH7r7R9lw/UlChzTMf/goN8RLPwr8DW0HTZv9N1q6js3kkf8A1MLfM21f738NfH2gtCot9jrn/noy/dWvSf27Pie/jv4vR+GNKdW03Q7f/Slb70lwzfLt/wB1a850GB1ZHh8v5X3bpK/SOHcH9Xy+PN9o+RznGfWcwm4nV6eEaMJC+/yfm8zZ95a6DR7Uz4mdGRF5RVf5mrH0PHyB5oy//LXb91v9mum0eGGGRXSGP5n+Zq+kjH3Tzo4rl92RrabHNbtv+zM8jLsRW/iroLKFlUJs/dsny/xbWrN0+18zbc/vN2zcrL91a6Ox0iFY1WF/lb5nZf71aU6f2jf61L7JHDb/AGO8V/JUiRdqSM+3b/wGtCO1ea3MqJhvlR2jdV/8dp0OnpcNI9z9+OX5a1bXTxCqIlnt27dm1KvllzEVMRKMTKk0m8imST5R5bfP/u1X/sXd8k0EkXlvuRli3LJXWRaWjTL/AKNl2X7yp/49VyPQ3+SH7TuSP7m7+KumnT9w46mKnLY4HUvDu6Sa5tofJ8x9m3b/AOPVz+reGZt0kzx4ddy/N/er1680CGaNIXhYuv8AyzZ/masq48Lu0m+5dd+9m+7t21fs2cFatKR4nqHg94V87ydiyN+9kjXbuauc8SaONLulcSu3m54cYxj0/Ovcb7w+8W/f/E/ysyfw15d8YbD7Ff2mVILCTORjOCtfpHhUpLjnCX7VP/Tcz0+E534jpL/F/wCks8w1zw2t7dvOu5CRuKg431xXirwWHjkhjeOL+Lds3V7n/wAIzHd6Zb3UsDfPbj5x9K53WfB73CtCkMcoj3Oqtu/76r4/O4RlnWJX/Tyf/pTPn8fKX1+q/wC9L82fOfiDwTM1rsmdWeOVl/dxfMy/w7q5q48CvaM0/wBjYtsVv7q19Bax4NmMn7mKSbd/07/N8tc/qnw7trqQLNYNGVbft3turxJR5TllL+U8r03wrNHcbFhZ3Xarrt+7XV+GfDdreTLLc7Yk+b5Wrr7HwXGsfzpJv+9t27d1XdP8L20EZ2QMx+bZt/irl5eU1jIzLTw+kcafuVYM3yMq1vab4PhmukuUjVH3bHmki/hrU0nRZrfZ9mDFWRdkbfdWu00fw+7bYUeOWNfmeRfvbqn/ABEylE5O18OusabLb70v3v8A0Guk0Pwak2zfD80bbpY5E+9/wKur0/wyiuiXUMexfmSFf/Qq6PR/CMMiP5KfeRWdmp+zM5S5TkLXwTttB5z7vm3fu/4v9mtO38Jwxsv+h+bt+/JXaadoLtmZEX5fu/w1Ym0naqOnmbI02bV+61EuX4Qj/eOQtvB8MCvss4ZJvN3bvu1pW+jwyM3yKsUafdb5a3bjR5rhd7pnc3/j1OuLNJF2TIpb+FV/u1zy973jeMjmptHtpoXS2dS8ifxJXO6x4fhWx+SHb97zV/vV38lv5M37lF2/dVtn/oVYviKzSeQo6fdTajL83zNUcv2jXmPkbWNcRY2htod235E+b7q/3qZb3FmzJ8kfzfM0n96sNdUfcHmffIq7f3fy1PbzOvzo8iD70Ue3+GvPjH3veP1SXJE3ZpnhUPDe4Rl3bdv/AI7UV1HDuMMyZT7+5f4v9mqXnSNMNibQy7fmb+KmyTXLXSu7L+7/AL1KXuyFUlSUSWSOzyyfMjqv3WSuQ8UW7tHI95MyvJ8ybn+6v92uq1C5TzBC9zlV++y/ermvGF0kkMjo6uioyJuX5qiXunj4qVP3kedeIfmUOhyivt3bKyZLjcySbNp37dy/w1d8QTTeTvR8u331Ws6JoWVt7tv21ly/ZPMqSNKxa1kbzo/3rKnzLW5b3U0NuvkouyT/AL6WsfRtk0iIk25v4/krftbP5ofnZ/8AgFT7vwi5fd5izbrCsWya2bZtVvlqaSN7eIb4WQ/7vzU/R4YbfLumx1fc/wA3ytV64bbH/o0Pz7N25v4a05eWRXvS1MxpPM+eaaMhvuMyfdqlqUcKsiLNvT+Nv4a1Ly4hhRpkh3n+792szUWRkSVE2Mqbty1MYilyr7R7v4Kbd+zc7qSc6HenPr/ra+QNajmaNnR2+VtytX2B4Jbf+zgzJnnRL3H/AJFr5G1yRJLffIm3b975K/dvFLTJMh/7BY/+kwPU4jd8NhP8C/JHF6lNcjKO6kVj3Fx8xfYrCtfXNizP5P8AFWK1vt3vHu3L822vx+PvRPialQgkm8wLs+YNUdv+8k2D7392pfsb/wAH/AttTWdkI2MvzBl+b5lrWPUx5ZSkS2av52+ZOa2rOTdJtmTnZ/eqjb2Mqzecj8SfwtWrb2MLSfJ13f6ys5S5jSNORbtU8zZ2C7ldatQ/KyvLuyq7aZa2v2dVD7Xdm+etKOGFWWZ/MO7cvyr8tZylE6KdGew2G3kkb+//ALK1dhhaFV2W2TJt+VakjtZvs4e02iT5V+797+9V23sX3f6SF+ZNq7XrCVaB6MMORrbxrDv+bLN/ElEcm24Lvu/d/LuZank098/cY7t2756dHDDJbp97HyrurGVS5vGnKUhVH2mNfOfJ/wB6rNuu350+fy/4aS3tUVdkMf8AF8n+zWrY6R9nw/lq25fnrhlU5TvjRkyDTw7Lv+X7v3Wf71adizyTNv8A4k+8v8NMgs0VUdCwRvl/3q0rG33R7Jpv9rav8VZ+2Y5Yf4S7Yrtk3wjcjfJKq/e/3q0VZ4XXykklRfv/ACbW3VDptvtjVERQ3/j1a8NvIYV2TL8zs1Y7fZKVGUh2mqhU+c+d25nVv/Qatxvu27E2jYzfL/DTvsiR26uEy2+rCwolv5MM21lRtn+01c1SXMdNOjOPumVeWcLfI+4q38Uf8NZ+pWMdw2yH7m/bub+KupksZpFXYjFZFVfl+b+Gsy4jubiFZvO+bf8A98stR7TljzHTGicfrFq+Hd0+bdt2qvzbawtT0l4/4F+ZPvSV297ZpIvkuiu2/wCdt9Y+oWe5gjpwv8Vaxre6VKnJe9E4G80d4ZGmuU3hk3bqgm0fnznhVX27ol/vV1Wpaf5bKm9XDf8APP5t1VGsEWF4URQZF3bf4q3lWvuOOH9w5CTS3lja53+UW/iZfu1LZ6TN5hR33/7tb8mjoyrJ5P3fmVWpfsdssPzowlVfnVaca3vBKjH3bFW1t0hUJDym7a+2pYbzduREV9u7azLU726R4SFF+b5nkb+L/ZqvJJDHMrujIrfKv8VRF80uaUTmxXu7Fy286b/RvOVfm3eWq/eatXTV81R59ttbd8zM/wA1Ztq3lsJkfJbc23Z92r9jN5n75ZtjNL8jMn/oVFSUpXSOOKtys6HTVkgUpD8qK237/wAta+nxvbLvttvzSrv8usvSYUmJS5h81pPlRo/l2/8AAa19PjSSZf8Ax7+Hb/vVxSly6ndGP2TXsFFvCj/u2C7tyt97/eWtDTW2lHmdiZPvbqzre13N5MKZ3f8AjtaVsvmMz3Lsrt8v7uspcvNzo6I80tEadva2fmec7btrbtu/5v8Ad/3av6L/AKP/AKNNO2/733NqstZMdvdeZ+5mVkmT5vm/eVraQqW3l/aXaJf+mjbtrVtGQ4y97lOw0u3hbbMm1fL+bb/drrdHheaOJ4XaIsu3/e/2qwPDckMcfl3Pls3y7fk+auq0nTYYbtLxLmRHVNu3buVt1d9H3TnqS5tTQs99vIg370b5Nrf+hVakV5RG8O7asW1YasafDDMywpCzN97d/e/2auw2b+W6Rrt213U4++Tzcu8jLsbOZ4/PTl2+6y1tSW8lvGr+cwdXVmZaktdFeDdbIkmWdf8AdWrTae8Nsz2zqw+83+1XVGPNMmVSPKFrNcwsj71y3zeWybmZakkZAqzb5GRUZv3n3dzVUuIbmxZ5ryGbfsXypFf5V/3lql4o8RWfg/Q31vU900K/JFaw/ekk/hXbSlLl94uMYxXvSKN9ND4g8TWPgy2RW+1Sr9tWN/3kcP8AEy17LD4q0Hw3odnYeGN0Z0+38pYdnyqqttryn4NxwtY3HjbUEmttavpWWW3mXb5duv3VX+7WX488dTW+oTCGZvObd5XlvtX73zV8Tm9aeOr8kdkeVPEc2sT0Hxt8cv7J26t9v8t/ueS3zK3+1Xm+tfFt5NQe2eZndpWZFhf723+KvKfEnjy/1y8ubm83JB96JZH21x3iDx9c2cLecP3sf3FVv4q5sPg5ROOVSXKetap8WrmwjSbWNS81Ld28qbY3mR7v7tYGvfFpNWt3mg1KaNJPmlhZvnavHZvipNdXT2015v8AM271b/0GsDVtehuJvkmZBub95u+Za9ajTlH3eWyOGpKB1WoeLn1bUvtKQyJLI0kXzP8A6v8Au1SbxNqumxqIUV5lXO6T+KuM1bxdCyuiQ/NHt+b+KRqqt4yubrY/nKzfdaP/ANlpyw/wyjua06nKdlF423OZriZWeSVju2fMv+zWxoPxceGNLB/JtkZVR/L+b+L/AGq8wm1ZJI/Jtkx5n3938LULLc2Z/fbW+X5GWuHEUL/EdsMRVhrzH0F4Z+I0M0Ys7O5mfbKzSrN/D/dZf7y11LeItS1JY7DUkjuUX/VNH8u3/wCxr500HXNSWSHvJ/e+8zNXq/g/XtVvL3fczfIvzKrP92vCxlNUavMfSZfWnWja51FnHeWeobEhYbn3Iv8AC3+81d/b2qaa0NtD9omtJkX95M3977yr/wACrN8O2dhqVuk2n2bJtVftDTfxN/ervrPww8mm/aUh8xY9q+XGm7y/9qvP5qUo27nuU8PV+yOXS0t3RLKbymXaqL/E1dXpscOmwpHbTxod3/LZflVdtZVvp5tdQ+021szRsm1ZPvfd/wBmtm38P2euaa/9pSyPtf51VtrLtopx5fcOfGe1ia2jww31nsuUwu9WuGVNq7f9mr9vpWpNfO9nNhPmaKNvvrU3hfR4brVrbR97XT3FrviVV3Mqr95Wr0K18B6bcLbm2muCI0ZXXbtXc396u+GDnU9654NbGSp/4j4n/bw+EPiHxJoo8T21tHcX2kp5vmNF89xH/wA893+z96vkCKJIZGhhDMVav12+JnwbTxJpFxol+i3iSRbdqxfvIY6/LH46fCPVfgP8dNR+HWpQyRWd5K15oi/3o2+Zl3V7GBxU6l6M1sRRxHs5afaMmGZFUPM/yr97bV1fs0cgR93975azbNkNyiIn97za0IZHlUfuVlb7vlsu1q7Obm909OMftDprPzmEP7tDHt+Vfvbf71Rf2fcw7d9mx+9tZfut/tVpL9j87yY03Lt+9tqOGPEx852+aJv4N22lGOpqYt1pO6aZN/zfxstQSaakil3mYsybYlb7rba17iOZZo3ebc3/AC13fLuprIn2cedCyuv8TJ93/gNdTjzanHzRjzGJZ2LzTYmhZFX+Gt3RrG5hvPOublWSP5ait1+0QOkM2/zG+eRmrV0m3uY5khhgkcL8v3PlavVwsZHjYqUY7nZeG1hWGNIdqM0XzeX95q6CxVGj+07G+Vlbc396uW0uORYUmR2V1l+9s210Me+aBkaZV/v/AD7d3+1XdGieLVle5pWq225538tZd2/bJ/FuqxHCsECxEkAIM4OSOKpQttkijmh3N5X3o/7v95qvp5YYFHyuchge3rX7d4JyvnWPX/ULU/8ASoHu8Ju+Lrf9e5fmiWOSGFv3cPmfN95X27W21ZdXMbTfaI0fyvut8zbv96q1vC8Kyu8zfvF3bl209FS3jLukkg2fMrfeWvx2NT3uU+R5BGuXbT0tZpvlX5t2/wDirn9WvJriF9kKtufanz/+hVp6hvaNXh+7s+aNfvNWNqkkMymF0w7fNuV605mTy++cP4uXZl5nZdv/AHytea+KI0ZZH353bVr0vXkebzUdMqyfvdzfNXCavpv2mQ21zDs8v5dv8X+zV85pGPKefXGlvcSOkKMzs+2n2eivHGd9ts28fKn8VdZHoLzXDpbfK0fy7m+XdVuLwwi2/l2yMz/xtW0ZQH7PmOQk0FG+583mfxVRk0RNmxE3/P8A6vZ83+9XosPhU3kaeZujZU+6qU5vC6KrDyNyt8q7lq+b7MROjOR5Y3h0LcSh4cTKnz+Z/dqhdaLNGo2PtbZ/dr1ibwTMsnkyp8jf3k2/8CrO1Lwn57ND9jZVj+X7n3qFKcTKVLl+JnmMGlvIpjc5f+Nmp8Gm7W37M7V2qq12eoeC0hzNbJlf/Hqzl09LdVR4Wdlf5NtTUpy5JWLp+6YwtfLX7jMZE+VZPurVmPeW8tOWj/i/hauq0P4X+IfE1wf7Htmd1TcsarXMXGk3mk3Ettfow2y7WXY25fm+auCVGcjeOKhT6nU/Dnw3c+JNaTSk6zOuyNf4q+pPhb4Zt/AVwlzebfssa/6VJv3KrL81eG/AfxF8N9H8caakOqxm5mlVU8xNu1v4t1e3ftZeLLDwv8N7/R/D2pbb68t2g8tZf9XuX/WV04fD+z96XxHmY3GVakuX7J4l8Xv2pFvviJfHStYjmhjuGT5m+9XCeI/G3/CZM2t200cVzG25I1+ZdqrXzzr1rdeH9Sf7TqvmNJu37X3V1PgvUr+O3+02E2/au371dP2ji5ZH1r8Cf24fFvh34er8AfHN5HPoUN19q0O4un3Np8jfejXd/DXvfgH4sJeXFo7uzxsisy/w/wC9X5oeINceRn3p8y17B+zv+0LHMraPqV5JHcwr87SS/K22vkOJsr+sRVWB+gcIZzDCz+rT+0d3/wAFJNDs9U8cHX7BJClxFu+58qttr46utJvIZC7/APfVfVvx4+JWm+PtJgvL+5meS3+Vmb5ty/wrXjLafpWpbkS5jG7721a87KKkqeGUJI789w8a2Jcos86jjmhbf/49Ud8v9oTrbRorbfvbv4q63WfCP2GRXhT727+Cqmn+FftTb3/75r1qctj5lU5RdpRGaTZ+XboiQqp/g/2aZezzW7P5M2GWunh0V7e3VHTdIvy7mqva+DY7++VJNyn+9975qrmhz8x2+zvDlicnY+MPEmjzYttVuE/6Zxv96vUPhT+1V8SPBeoQ3lhK22P5Uk3bW2/xVT0X4K2Gvaglm9ysKr97zH+9/wACr66/Zl/4Js/Cvxd9gufE2tyOlwu91jTcse7+KuLFVMM/dmehgaOb05Xpv3SP9m39tq2v/E1tYeJNKmBaVmi2y/xf7Ne4fC3WI7n41Wmuxnylmv55l77QyuQP1xXpvh3/AIJV+Bvh/pMV/wCGNQW4SFfNWaa1Vm+avNvh74eZPjrD4aT5TDqtxCMDpsEg/pX614QKP9h8T8r0+pT/APSKh9vldTFVaUvbb2MT/gpl4jh0H9nbX9Re0Yrcr5cG77rN/s1+cfwN8Dtq1y07pv3bXZq/Yn9rj9mf/heHwXvfCX2b52i3W+5/vN975a+Avhn+zT4t+H91qFh4hs5oXt/lTd8zN81fzvg8bSw+AnTUvePl82w1SOOjKfwnT/AXwGYdct4fJjzG6s8nzbV/4FX1t46+Nln8IPha+qo8f2zylitVhb7szLtVtv8Adrx/4T+Gr3w/p/8Ab2sQ+THH+8lmb+FV/wDQq84+I3xGvvid4qmv7mRls4W8qwhjfb5i/wB5lroynAf2jX55fZMquYSw+F5IblGHUNSvtUudS1u/a7ubxmlnmb7zTN95q6PQbo/aFR7zc+3ci7Pu1z+k6fDDJvR5MK+75vm210uixw7ldPkWNfu7PmZq/UKPJCHLE+Xmm3zHY6XDDIpd4VCN8z/L8zN/s11+j2tz5yTbN0apufcv3v8AZrlNHU3Cqk1yzhfuRsn3f9qu20eK5VsTXKkKm5Pk+XdXfT96BjzS+0dT4Zt0Zt7usUTfMiyN8tdNpdnbX0KYRdituSRmrA8MqjPsSbLx/O2371dnpMc1xG29I03bdm3726t/hD20uhJDo7wxiF3jy3+1/DWnZ6clv89s8MkU0X3VX7tT2lm8zR3PkrEi/NtZPm3VdhhWFvOm8sKvzfLW5UqnYq2FmkbJ+5b5fvt/erYt9JtrjZMltCzfwrv+b/ep1nY/6R86fKzruVa1bPTXkZUd1AX7jfxbaIyOeUio2k20iuiQrGV/1W75qy7zw6jM7zc/w128empfWuIYdyq+1WqGbRYG8y5e2V/3u1JK6KfunBKoeX6x4bh2nzE81FX5V/irw/8AaR0/7Bf6SOf3kMrfP97qnWvqjWtBTa/7tf8AZVflr5z/AGxbOSz1HQRKSSYLjr7GOv0nwsSfG2FflU/9NyPa4Nk3xJRv2l/6Syp4f0Z7jwtp0iMyq9lDuCr975RWZqnhm8aZvN3INrOkka7VX/eWvR/BmjS3Pw20R44iC+nw7WK8fcFLqHhm5aU7I1dfu/7W6vj87Vs5xNv+fk//AEpni5hP/b6v+KX5s8b1bwukO7YjY2fJ/tVk3fgpGuE2JG/lpv8AMj/i/wD2a9i1HwqjL5IfY7fN92sa68LzQ7njRWCvt+X5q8OtGPKckZHmH/CJmNjvRWVvmWSo/wDhHoVt2uYYV2N8qN/dr0m50O2jWXZDsH3n/wBpqz28NyW+H8mMLI3+8qtXHKPMbxjy/CctpOhPHC7okZ8yL7sjbfmrstD8P7Fi2QqRJ/d/vU3S9DT7ZvmhZx8reX/tV12i6ci3CNNNs2/8s/4azkOT5iLSfD3lqrvbK4X/AJZqv3q6G08Mu3kuifN8rMu//wAdq7otrDJG6I+4fM/lr/yzaug0nSXupkfCpF8rNG3+1REzlzSMmHw26qERGJb+6ny0smhfZ48ojF/mXayf+PV19npm2NPJ5WP7jf3aJrNG3/aZPlbo395qmp/MEZfzHGyad5cex4ZG3Lt+b5m3f/E1n3Vr9lby3kUqybfL2/xV1uoafDHbj528xm2pH/C3/Aqy7rSoWvHh8lXPzL5f+1/s1hL3ionI3Fnsj2TbYtzbt2z7v+1WTqFukaskNgzuq7vMrurzw/MyxPCiqv3XXf8AMtZF5paR3iwzJvjb5nZW+6tTKoan5uQzPMu+H5X3/ulq1as8f7533bf+WLM25qwbG8vLVndP3X8UW371XrGd75ldyodfl3Vyy/mP1CWIjKldmurX7BPLTa/8KyN8rVJ9tigRtm4v/Eu/dUFmr/aPtME25mXZub5ttSSQJbun7ltzfKkmzbWceQ8+tiOaPuhcfvlM3ylV++2+uS8VXzq0lzDJt2/KitW9qV19lkOzy5EXlpF/vf7Vcf4gm85me56MrbmX+Gs5SPN5pc3McnqRupvuIu1v71Jptm/2pYLlG+/t/wBqpVt5pNk2/cPl+b+7W7o+mw7t72yuW/hb+H/gVc0pco6cfae8SabpdtDIltbQsVX5vm/iroLHT5FkXenDfMm7+9RpOlzRys/zY2fPt+9W9Db20i/ZpE2rIv3v4lpQ5feHyGa0fkqyfZt27+JUqX7P9ojVHtmfb8vyvt/76qb51kWL5dvm/eb73+ytQ3C3NxHM7wq3lt/q9u6r9n7uge0k5amZeW8KsOwb5mWsbUP3iy7NzN/AtbV15jRnzkkt1jT5I2i+7VC8kgjtGuUTft+X5WqqXNH3TOUYy8j27wVgfs1vg5xod70/7a18iaxE7Qsjuyov8VfXXgtz/wAM0ySSNk/2FfEn/v7XyJeM8xfZyNu7a33a/dPFBWybIf8AsFj/AOkwPW4jlGOFwl/5F+SOM1aF2b/XMG+9838VUfsczMkmznZ8+2trULcfaG3vgf3W+7Vfb919+Nv8Vfj0tj4upGUinDa7T88P3quLB5n7nr/fp0dn50y8bm3fPtrRsUTaZk+6r7W+So5kdVGiV4bXavyJurVsbG52xO6KgVtyMv8Ae/2qmgtoWcIm3ZH99lrSjsXZU2TNj5d9c9SpynVHBy7Edtpbr84RSd+5mq5b6fatI33sbPl3Vp2dm8sw8mP/AHG3/eqwumgR73fY27+KuaVTmjrI6qeFlzRK9lb/ALxSn/LNP87ql+ywvNG77nP3n8t/lX/ZqRbOZpNmxU2/xL/FVy3sHkbenlqv97+9XHKpKPwnpqhHm+EhjsYWO/8Aebo/vrJU0Nqkcmzydrt95Wq/BCYVh2bnP3WbZu3VZjt4WWHzn+Wb+JvvNtauOVaf2jtp4VfFIpw2MKws6Q/8B/8AiquQWNyzfvk+ZV3bV/u1fjtdsmx/4fmRauWOnzTEu9ssK7fvLUyre6dEcP7xRt7dEyny4j+/u/hq5Dp/lTJ5M3muvzOqrVhrGGObyZvmfYzJu+78taGmKnkyzOipIrbIt33qz9tLluFTD82hLp9jbLIqI+5mi+dtvzLV/TdNdpGhmRdrfLtb+6v/ALNRa2Lxqmx42+RV/wBpm/vVrWcbyqsKQxqy7vNZvvVnUrcsviD6tIS1s442WHyMhn+TdU66fNDI81zCyLJuaJamt7Xy1HnI3/Af4asMzrGn3nZV2rJWEqnve6bex5YxKE0ZVo5rZG3bdysr/eWqEweFghhVPvbtyVsXTQCBYYH2CP8Aur/31VbVLN47dZn3bfvLTjLmL5Z8hy19DNHM6Q+XvX5vlX+Gsa8he4kL3MPnNH8y7k+Vv92uu1bTYbpXmSTduX5GX5a5/ULNrht77R/dXdWnukcsonNXFukjH7NCqNJ/47UFxGjbEmRsxuyfd+9W3dWsMMs32naNsqtuj/u0xrO88v54967922tvae4ZS7GHHZzNI/3Y0X+H+98tMWN2jdI/mZU2u1bMdrC0L3PnK33t0a/dWq8cSKrbIVUN95mojH7RnKp9kxLqPzV+Taj/AHdtVGhvJFZHTYy/Kv8AtVs3lq8B+5/tfcqs1uklykw+ZmTbuVPu10xlb7J5lSXNLlZBa28zSeZcopP8TfdVlra0ezea437FRVRm+/8Aw1Bo8KSN5ezG3+HZu21sadYu0w3tuVUb7v8AF/tVlUlyy0RFOPM9ZFrSY5Jp97w4P3d1dHY2cLbnmeNkkX5W+b5mqjbx20eN6M8i/Oqr81bWmx7meHfGF3bkXZXHzc0eaR6FKNqliSPT38tEtvk3bd+75dq/xVfhs5o42hhdlRfu7vvM1WoY4fL/AHPzOyfNGybvMq7Hp800nmeTlflX94n3ax9pKR2RjDm0KlrazSzLcvw+z7rfw1v6TYpcKHdM/Nu3bKit9PSaNbZ/Lfd/31W3pWkvcKh85l3S/wCrb+GtY+9ykShKL3NrSbeGRvOTpD8ny/3m/vV2+j2u6FA+4lfl+auY0uwS3/fbGab5vlX/ANCWur8OQ+XGkLvIUbbvb+L5q9bCx5oHnVpSpyNqztb+Fm8mHafveYyfdX+7Wtp+m+ZKghSQ7ovvL92Sp9Lt9tqybNyt8v7z+KtzQ9NuLe2S2udq+ZtZ2+6u3/Zr06NOWxwyrGfZaXeeWXfbt/5aqv3auR6bFDjiSZ/mVv4du6t2z0tG3wrwiuv3v7tXofDaTTulym7dt8qNf/Qq6+X3iIVpS0OPutLSa2aG5dnWT5W/2f8AZrjtT8Mv4m8Yf2NZ6rCkdi6u9vN/E38P+7XrfirQ303w7fTb4UZV2xSSf89G+Va5ux+Htt4Ft7bVdYT7Pef6q6WR1bzm3fe8z722vEzrERw9C38xVfETl+7MTx5dQWuhszpa2lzvVEaH7zN/F/u/3q8C1q6vI9auLm/vPMfb8jM/yxr/AHa7j4uWd/Z6lqdhO8bzTS7d3m7v+Bbq83urW5vlj0ewtmeRom3eZ91W3V8pQUpbHFL3vdOe8bWN5NZpPawx7FiZ/wDZ3f3q838YTGOFpobndc7F3ei16v4uW80XS4dEvLnesbblWOL5m+X7u6uAvvC9zrgTPnR+Y+3ayfK3+9XVRqRjPyCpGUocpwEOmpJdrvRWeF/NlkZ/utVDUr+S4vlmtk4+47fxN/tV6G3w9uby1EKuquz7V3Nt+b/a/wBmsnUdD0rSY0s9Vv7X7W27zZI/+Wf+zXrxlQlI4KlOUYHAeIrKG1kifzpJkZN26Ntu2s1Y3tYpX+0tvVt21mrqtet7G1kR9/mBvmdv4dv+zXLapqENxL+4SN/7jbvmWolKEjL3ojvO1Web5H3Rybdnz/NXQaPYa9JGdh80bvl3L/DXP6XqFhK2x5sS7921vl212ek606sjpCqQt/t1wYyo6cYnXh/3kviNLQ7waVMX/drc7du2RvlWu/8AA+uTLIHudr3DbV2qn/j1clayaVqGIYZrfd99Gk/vVoaXq02m33k3OpQskjf8s/4a+eryjWlK59nldqUo8x7v4Z8TTW0Ze5+5s/er/Dt/vV7X8JfG2la5o7Wf7mUbd3mN/er5NsfEDsv7nVW2sjL8392uy+G/ji88M3CWVtfraJ5q/Nv+Vt1eNiOWn7p9vQq0oy5e59VR6DZyKtzFCspj3bfLfatVZNH0/RdWtr+GFmhkb/R/MlZmkb+Jv9pawvAPjL7ZpOy/vI0MMrLtX5d3+1Q3ipJr5YrmbeluzeV8zf8AjtTRrcs/eNa2FhWPq/wL4N8Pa1qFt4h0ezWK8ktVR5I28uONf4vlr0S3+HsMccsKWEm1n2o25f8Avqvm39nf4mWGpeKrOzubyZotvyRzfdjb/wBmr7t8HaX4f1jQEfR4hM6xL/pTNtVv+A17uBrVK0LwkfDZ7lcacuZnkV98NbbS7lLm2voXlb5Z1+823b91q+O/+Cs37Fd58RPAdx4z8AabHZ6jocX26CNkZpJFjXcyq391q+/fH3h3R/D6y3V1CsJV9zeX8qyfLXN6lqWia9pAvNes1vbab/Q/LZty/vFZdzVvLEezr899jx6WEnKN/sn89lrqE01nDc3On/Z3miXf833WrVt5HWPfDDtljVVfzPvfN/FXpv7aHwTm+C37RGr+GDprNpdx+/sLxU+ST5m3ba87t5Ps5/cJhf4d1erTqe2ipLqevT5+QmzdSMEd9w2bd23+GlkkuYWlmmtv3G3bFMr7dzU6OV2ZbZ/MV2Xcu37rUy437Ve5h+Rv/HV/2q3px98uVSJBcQpJvtpk3nbu3M/8VN2eTIHeZlXd8256hkupvtDuj/7K+Z/tUBoZrh7Z3Vnj+b/Zrrpxl8Jw1JQ+IkhW58tHhRfN+ZXXZ8tdBose7Y+9lk3f3vlX5axLVfOm2Qo27b/u10mkxosnk214odfllZl+9Xq4ePKeZiOV8rubOjyJHdfZp929UX5m+6y1tx2MEMOYYVkb7+1vmrM0/ZIqpv2bfvq33Wrdsyb6NIZkUeX/ABK+1mWuqMvtHmVI/EmETTeXE80zDc6r5i/dX/gNXo42B8qfaSGKnb0IzUK2L28m+GbDN8ryfw7aktUnVFj2nzAcAZ754r9q8E4xjnWPt/0C1P8A0qB7vC0eXFVl/wBO5fmjQtVhVpNkMYC/Kit823+9SMySTNcojN5fy7W+6y/3qVdn2hJvs23/AHk+7UTXfmKqD5n3/NGvy7f7vzV+Iy5ovQ+a5fdKd5sjs2dNrS/Ns/2f7tc1qm9nYb5MNt3+WldCsP2hmCIu7+L+81Z2oWcNqyuJtsf3XX/araPuwH7Pm+ycXq2kpFHcOiMzr/y02fM1czq1mImZ4drs3+t/vL/vV6Bq9r9lkESPv/2v96sn+xPMuGdIFUN9/an3mpxl73KafV+bY5XR/Cs1wu+bdlfmRV/5aV0mi+C0uI0+zW0kok/1u75dv+1XX+H/AA35ipC6SebC67VWL/0Ku20jwe7pFNsjI3fxL826u2jH3jT6vy6Hm9r4DhazTfZ8Kv8AF95m3UN8P3Yr/oeUjfcklexx+DUuJvntmE0cvzeWn3v92luvA9tbx74Y5JPMl+638NdHu05kypT+0eK3XhD5Wgez3Rt95l+9XP6h4TS181I7ZmXZt3f3v92vfZvA8NtZ+XNCqybm/wBrbWJr3glLXfcPDCU2f6z+7/tLS5oyOeVPljeR8/Xng/arQzW3+kfe/dt/47TPDvw1m1TVksktmmaRljiWNNzbm/hr0rWtNhvl2aDZ+cyttuLj+GNf7zVc0fxp4S+Gtrv8PW32/WfK2pfRptjt2+78taqMYx1PJxWIjT+EvQ6HoP7Pfgu+trxIz4gvlVZ4V/5d4f7rf7VfL3xO8UW11qVzc2yQ/N/Cq/xV6T8QrrxV4u1Bry/uZNm/c80jtukZvvVwGv6LolvCEmvFLf7SUvi1Z5/PKUjyXUpb+a4W8015Eljfcm1f4q1td+L3jDX7NdE168uLiVYvkkZ/4as+KNctoXYWEKsqy7VZU/8AHq4vVvEDxSbH25/2ajl9/mNI8xwXjaO/kvC81zkq38VXfhr4qubFntpvuM2395UPjRvtUjFNpXZuTbXNWtxcwtvR8Or/AHt9XH+6OXmel641s0bujq4ZdztWBY6lc6Lqi6lZvt+T96qt96oND8SPdW/2aZ/mX+Km3kKMrTfKV+7RKMKnuyHGpOlPmiem6x4g1WTw7DqTpus5tv77/arndN8Y3drdN/q9jfd+SvTP2E/Fnw38Ra5N+z98YLOGPRPFG21t9Wm/1mn3TN+7kX/Zrn/2zv2SfiR+xj8XJ/Afjl2u9OuH8/RtWhX91eQt91lavMrZVS5ZTpRPWo51V5488il/wkyX1q8KPGzN99tn/oNWPDeqJDdLZ71YbfkZl3V5xDq1zGqok2f4vmrV03WpGm85/kZf4d1eJKj7Pmcj06eKUpczPSdYntpLUI6Kq/xt/wCg0zRdRSGT7T8qbfl+WuIm8WblW1f5f/HqSHxE8MezG1t25Pm+9WVOhOULHcsbScj0/TdesLi8SaG88qZZfm/utX2r+yP8ZP7PtdP0F5lk23EbeYv3W3fw1+c3hvxJPcX3+kzL+8bdu219Q/s0+JLizuLP7HMvyyqvzPt+X+9Xl5hTnGOp9Dk+MpVpcvMftJ8NfiBpXijQX0jUpoXCxK1u2/a23+7Xxf8ADo2i/tiuZwghHifUchzgYzNitD4Q/FS5sZkea8aaBty7Y5drMv8AerkvhvqS3X7QkOrM+8S6vdSlnP3gwkOT+dfrngvVcuH+Kb9MFP8A9IqH1+HhShdxZ9V614ss5dS5eOKFX/dNv/8AHq8H+K3h+w8QeOBc2aKltJuWWRdu5l/3q7L4saf4k1LTQdBGx5vlVl/8e2159qVvqvhPwvfa/wCM7zyYrW1bbGvzM0n8LV/LVCM6uIvH7R89mFSNSrblPEP2ofilYTTxfD3w3N8lqm66kWdd3/XP5a800GG18tprxGD71+X/AHqzLrUpfEGtzarNt824Zt25f4d1bOlw21xM73k3k/dVK/ZMow9LCYeK+0fJYyd6tzY0ux+zTbJtvyt/ndXR6bBtk2Rwb/7qr/E1YunWrmPZ9s3J/GrN96uh0tUtWhRCsq7fkkV/utX0FPkkeNKR1fheOaPy98DK/lbXaaVf/Ha7LSWmhXZCWR1ZdzN8ystcZpt5bLiREkdf41bau3/drorHWLZY2RN25Zf7ny13U/g905ZShI7nw7J5l0jzQ53bvNbf8qtt+Wup0GSe4sY7l/LZ5P4m/wBmuC03WEVvJeZVDbdi/wATNXVafqkLKvkP/q/mf+61bRlzC96J3WmzvGuyHywv3dyv95a0LGRJ5D5CL5qv+9WT5q5XT9UT5N7/ADfe+atSG+S3ka8hkXYqLvbd81a/CZ80v5jp7OQxwqkN4u6SX5m+9W3p/kzRuiJICzt5Sr/EtcpY6hMzjyXhwv8Arfk+Za6PR9UhVkdJPlZ6uJjUl9k6/SfJnt0y+11T541/iWrlxGnlh4YVXa+5FX5qzNPuraKb9zMu5vm+58yrWh9oh3K+/G5d23bW8fh5jjl7plalpv7p5vlLsn3m+7XzD+3hbSWeteHLaaSNpBa3Bcx+5jNfUF1fbbiWG2mVwvytHIn3Wr5k/b4Yyax4akZFDG3ut2zp1i6V+j+FKtxthvSf/puR9Fwc4PiWjb+9/wCkSPQPhVp3274UeHxlAF0eDJPUZQVe1Hw+iw/wptTbuVPmb/aq58FoFl+EXhuRQpC6Hb/J/ebYK1brTbw/8e1qz+Y3zRs/3Vr5PPP+Rtif+vk//SmfPY+X/ClW/wAUvzZ5/faPDNJKjWzfudu7dF8rLWdqHh9LdiiWcfzffZn+Zf8AZ213MypH5ibM+X/C38VUNUsEmG+5h+dvm3M9eDW2JpyPPtS8P2ccbw2ybXX+Ksu+s/Mj8nzpPl2r5bLXX61bxvveF9rr/d/iWsK88mF22TtIFX5tvy1wyNfi2KGn6Z+7RIdqv/HI33t1dLpGn+WqO6bf9nd/6FWMtxNJGjwIy+X8v91mrpPD7Q3EexPOTd/eT5Was/fA1tH010jRPJVPMl2o3/PStiPZGFT5k/e7W3fLTrGNJLdZvJ37dqp8+1qWa38yTfN5JTb8+5vutVfY1MZS/lLcV6n2na+3Yv3o4/l3UT3CNcNDs2rv/hfctZUWpJHI0KfNM3zIrfepW1JLfd+8UH+7WVSXKSS6hHcxyNND5eGTdtkf7v8As1mX29mL2yKryMvlNu+7/ean32qQyTbEm+VotqNIi/NVaGbzmi+Rf7yK38NRL3tjaMixcWs0zeSkm/8AvySfxVWbw+kkbvO+Ts+793dW1ptvNJGj3iKob5nZXqe60vzFd403Bvu7n+7WMom0D8drW+S4ZH8/59u5Fatax8mHDpyzfNurnbJXjkjE0Ma7fl/3f9qt7T3dl3o+N3zJ/vV5vtD7ipiJS5jUhuHsbpN+1lkf/d+b+7Us0011H50d/Hv+b93I1VWvJlZvJ4lj2l/97+9Ve8uvLX7T8pdn3I393+9VVKhzxpuRnatePskCTM7/AGj5lb+H/ZrntQmdg/zso/3PlWtrUmmmn3xztmbd/H8tZjQ/K6Q/MWTd+8+6zVh7bmj7xcaJQs7FJPmRP9lG/vV0Ghw+XdIk21WX+Gqlnbw2bMk23d97/darenyWzKXfcPm3Iy/w1jzIqPLHludDZW/kRmZH+Vk/esv3ttT27QsVudjK8f8ADt21W0ybzIzM/wAu5du5qka6SPCPzuXbub+9/s10RiTUlCIqzTRtve5XZI+1Vki+ZaikaaRNifKzLtdm/u02adIV87Yvy7WZd/8A47Ucl0jqEmhVkm+ZF/i21pH3vdOaUipqGy4j/ffP8m1Jo3rC1aSKzhaG2hX7nz/7LVp310iyNCkPyKvy7X+9XN6ndbn2PtZm+bbt+9WtOMI7EVJH0L4Dcn9mNnI/5gV9x/39r5EvLjhXmdkO75P71fW/w+AX9lxtpJ/4kV/jPbmbivjnVNSdpNj7WOz+H+Kv2zxRV8lyH/sFj/6TA9biRp4XBp/8+1+SM3Uo08x7jfn5/mqsq7ZFdE3JRMzySNC+7C/9805WhXYnk/8AAq/GD5unH7Mi7DZbtjoiov3mX+9V2wt4X+T94u5Nvy/w1Ss50WZe277q1raX50LeZsYrv/4FWVSR34enYv2cKWsfko652/w1r2Nu7bHmiVdz7dqr/wCPVTswn+p6H/dre021mST9yjH5d25q86tU5feZ7NKhzbGjaWO0LsRv+ArVmbTYfLV4U+Rm2/3tv+1T9Ftdmdk33X+dW+9Wu9jI0QSbazb9rs38Vccqx6csLDksYElj5cy+S7bmXbTrO1MczIn3vvPGyfe/2q3bizSOZX+/5fypueq0luisu+H73zfKtctSp7xEKMea5Ts12yJNCjbmXbu/hrStbHdMzw2zN86tuamR6bsXZNc7Yt22Jl/iq9HJDHvhhTajbfKVm+7WMqnN7p6NGjHdkUlq8VwvnO3D/wAP8VX7VZlX9zMrBfvr/dqBbWGT5JnZkV/4f71TxK/mHy5t3ybfu7aUqn2TdU/7xMq+QPueYjfd/i/3qv2ckMdwmxF+6v3U+9WfHdv8v2mZdv3UVa0tPnRmTYjfK25mX+Gp5kX7GPN8Jq2tvCrDyIZLhf8A0GtCxdEmTyYWbb/sVSsxC7bIZd/z7t0f8Na8LfZ4U8652/7qfNtrOPvBKjMkjmmb3PmtvVk2qv8Ad21JJ5jhEtnXaq7XX/2aljjQS796h1f/ANlpJpIYZG8l2Z12/e+Wn/DMPZ8xJNJ5Mi3Lwxt8ir5a/wCs3f7VVNT/ANcE+x7mkXcy/wAK1LcT+XumttpZl+dmXczVVZXkb7+1f4/MXdR7xZQkS5+yqgRYXVv3qqu75f4axtR03zCfJdUZXbezL/rF/wBmukktZnkVPlVdm7d97d/dqv8AZbwTTP8Ad3fP8zf+g1vT97Y5JS5fdOSns4VVoY/MKq+7a3zUyb7TaxbJnUpJ97+L/vmuhvtL3R/Oi7JPm3b/AL1Zk2kpGvyJ/Fubb/EtVGMFscdapIxHs7lIV2Ivzbt6rVO5tfLZkhTc33n8z+Fv9mt2Sze4ZkhdURX3/wC1/tVV+zzTXCwpD/eX5l/8eraPxnFUlKUdDJWHazI80bf3m/iVqj+zwzbZ4XV5P4f9qtKS1n3Sp5MZ2vt+b71VdxRtk0Ko0fzp8lbQ5ubmZySkQ6XZzWZfyY1USRf6tvuq1blnst3CD5/kVHkX5qzrG3tpJHmRFRfvNWhptukczfO3m/df+6y1jiI8xth/eNuFVt5Gmk2lmRVVdm3dWrp8iLIqbI938G7+Ksm1ieR/Jm+VPuxf3l/2q2obPzlXfyisrPu+VVbb/DXBLm5fhO6n8Rr2az3qpsTLSPh/l+XdW1DC6tvfj/2Zf4qy7PfGhcvgr9zb93dWxoMkLbo/3bq3yozfw0R5Oa3Kd32OYv6fawSSfuYflb5vMVvl210em2tnJILxE27flXanzNWbpum/Z2Xy03p/Guz7q10ej2SWqom+QKr/ALr5q7KNLqjnre7EtW9u+7HyiX+Fv7q11nh9YVkjffub5vm+7trM0tUm83z0Z13fvdybWat/Q7GY7fnXbHK2/cnzbf4a9nCx5fdPLxEpcvMdP4dl86QoXj2/xr95v96uls13Qs0yYXftiXZu3Vy3h+N2medAw/vN/wAC+7XXabInmrNC7bV+/Htr16ceU8mUpc8jZ0+N5IfO2Rqq/fZf4q3tPhtvmR9x875YpNu1lrP0dra6mR0RZ0X5dqr8rbq29Nt2a3+R1ZfuV0cvuj8kZvijw1D4im07R9jbFulnuIV+bzlX+9XM/Gy1v9S1y4mtbO3a2tYl8r+Flb+Fa9C168+w6em+5hiNvas6SeVukjWvK/FGqalqmmh9Vto3W8uFWWSPcvyr/FX5/wAQ1JfXbfZIpylUnc8X8eeHZta1ybWLb5FV1WWNX+62371cpJ4ZexvDeQ22ySb5ZZFf/wAerufF2nzN4gFn9sWG0WXc25du6s34gK9uW1CwRo08pYov7jSf+y140Z8kdDX2c/anCeLrG51BbPTYUzFDFueTbt3N/F81c7rmh6Vo9xb3J3Mm/dcRzP8AKzL/ABV1Hi37Tp+mvcwzbxtVXX/0LbXnmuapNHbyec6sv8PmfM1KnU925208PJHH+OteeHVnuUvFmXzf3Xlt8qrXAXF9f6lrTzTfvdv3lX7zVf8AF1xbbpHkfbt3b4/vLXF33ixLWP7NZu25UVmaP79elRqU1DzPNxFOUi94pu3Wb/SXjjSaJWVZH+aua1BnuL5Utn3/ACfdVayLzXLm6uH865bYvzIrUln4iextxIjru3/e/irqp+7Gxx1Ixeo+4kuVvEm3qJd+1m+9V688Y3NrE9sk25FRfmb5dv8Au1nfbLa6ZLmG5Vfm+633m/2mpdc0u2uLdZvOVt391f8A0Ks5QjUiuZkU4zj7w/T/AB1qsjecl4yOv3Nr/LXTeHfHF/HeIl5DvWRW82SR/u1wcdu9nHsS2VlX5ty05fEV5br+7Rvlb5GrhrYKnL3oo9Cjip05R5pHvmg+Pvstiz3OpKv7rb+5i3sv92uq8G+OLnct+k0LIzK3mXD/AHW/u7a+b9N8ZO+62eZQW/i2/LXY+F/E1gq7Jnkd2+4u793XkYjC8vvSPrsHm0JRjeR9g+Cfixpt9dLazX+2WaL7sn8W3+7XoUnjbzo4kv7a3hWP/ln/AOzbq+V/hb40sLm7hTUtSsYnj+W3mZtzf8Cr3Dwr4ZsfiRdIlz4h2Sr/AKpreX5WX+9Xh4iLjPm6H2GHxP1ilzQ2PbPgb8XvD2k+K4rO5hZtr7XkZNyqv96v0V+EvjXwS/h3Sdam8QRyHd5SRRy7V/3Wr8ytD+DWq/C/xBJqWy6u2W3Votr+Y0n8Vfbv7MPiDwV8QPA9nZ23l22pW67GtWXay/8A2VVQxUcLpHqcmOnSxVLlke9fEm5XXkNnpTxzwrubc0vyrXi/jBr/AE+5s9BFk0TSSxsixq2yuzuvBOpeHrq41J/Ekjxxtt8n7ytu+9Whpuj6d4muLC8lf/j3n/f/APTSP+7U1sd+/wDfPNq5fD6tFwex+cn/AAWe+FltoEnhL4hWdm0Dx38lrK0b71aSRd21q+JJFh+dJn3/ACKv39qs1fpH/wAFydPkvvhBaa0ty0aWviq1eKGOL5Nu1lZq/NqOW2WH5IfvV9dklT22D5vM4a9OeHlb+6TR3B3LDsVJVT5P4tq/3aqXlw0jL+53bvl3K+5f+BUt9HtYXNtbb327d2/a1Q3Vx9mjbYfkb71e7GJ58pe6JhPJV03A/eaRvvVY02HzJH859yt97/4mqdvM91j7rBv4v7taVjbyXDo+9UT7v3PlrooxnE5akixpNql1JLNCjE7vlX+7XRWNnD5j7JG2/wC0vzVSs7ZNqbvmbY3y+Uytu/2a3NP0maPZ8m4f3mr1acfcOCp/hJtN+aMb/wC9uSNvlatrTZHWZSibv9pvmaqmnw+Ym/ztxXd95P8Ax6rDM9o8To+6Jv4dtb8hyVP5TV8x2UwRuro3yyrTnAZyqPuz3A61QhuraGRnh+T5/nZv71W42IgDOwchfmKdCe9ftPgpHlzvHr/qFqf+lQPoOGl/tNX/AAP80W7W6e3kZ/l3bNu5n/hqG4V1uns5vnh3L96L5vu/3qqTagkLi2hfci/dZqurN5kKPcvuVmX5t/y7q/FfZ+8fMRj7xE0KQqdm5Ny7kX+9VGUw3ELXNs+1W+5u/vVbkv8AzJJUEGWX5vlqgt9uk8nYvlw/Ntb7qrUSlOMdTejR9pLQp3EaMuxLZcq25JN38VaOi6Dc31wj36K3zf8ALP7zNTNLsXuLhJng2RK/ybW+Zq9B8D+F/MxvmZl+bylk+9urSjHmPUjg/ZxViz4b8Jpu37I/m+aWTZ8zf3Vrr9J8NzXStvdVLLvTan/oVaPhXwv8qXLpGm196f8AxNddHoPmW5e2RY2j/i2/w13xlyx0IlT7HKWOhwyQ+fCnyyP/AHdu7/gVSX3h+2sbf7TfoqRw/K9wz7dtavi34heD/Celu80P2iWNNzxqnyrXgHj/AONWveKLpraFGuEkbbb28K7VVf8AaojKXNZI8vHY6hho8rkdF4u+JnhjTZJrawtZL64Vt22OL+H+9urzbxp8SptXuks9ShmuIZEZoLGxXcqt/dZqd9ovI8/29fx2X96GH5mb/ZrMvfHGg+HY/N0eFUm+YJNs+at6cT5rFZjXqe7E0re71W+hW5fw9Z6fab/+Pf7jSL/FurE1q88N2a74Y7fdJ8ySN83l7a4vxZ8ZLydZYU3B1Vv338O6vMfEHxQ1nULje9zu/h+/92tOZHmxi/tSO28ceOLa8upIba537fllZv8A2X+7XmuvaonL72VV+4u2sm98WXNxNsmOVb+L+L/easi+1a5jmb59zN/C1OS5jWPvfEUPECbpGmhfhvmaNVrkNc08M2/zthVvu11VzqRZX85/9la5/UJppH2STKp+6rNS+IuMjgdfa5t2b59tZsuyZTMiMB/FXVa5pcNxGyfL8rN97+9XMeS9rI9q6fLv+9soiacxVjupo5vkfdt/u1safq0b/uZuVb761z90r2VxseHZ89WLe6SObfv20+VE/Ebc19daPfRajbTzBo2Vt0b7WX5q/Ur9lfxR8Pf+Cqn7Hd9+zf8AFHUoW8b+Ebfd4cumf97Mu35fvfNX5VLfJdW/7ybmvQ/2Rf2kvFv7LXxw0r4keD9YmhaG4VbpY/8AltHu+aOqjOVOV0Zypxloh/xm+APj/wCBfjvUfA3irR7hJrGdlaTZ8rf7S1ylrNtkP2lGRl+Wv2P/AGkvg/8ADf8Abm+BumftB/D23t/tN1oyz3/kr92T+JW/2lr8wviX+z/rHhXV5Ib+w2Osu1GjX5f+BVxY7CxlHnhHQ0w+O9lL2VTc81kbzMTfcOz+F6csiSKHn4+X71ad54N1LT5pUmhZhv8Au1XXSX3bJoZNjfd+SvD9nKOx7Ea0ZRvGRf0W1TcjmbP+7Xrfwn8Ra94baJ0+dfN3bd/8NeefDT4c+I/Hvi+18M+GLEz3FwSyR+aqDCqWYksQBgA19sfAL9mTXPh94c07xv8AEn4WXKabc3rQ2utSRM9rLPGQZI0cfI7qjKSoJI3DI5rqfDXEmb4T2uXYCrWg5ct4U5SXNa/LeKettbb21OrBYqFGpd1VF+bS/M3/AID+NPFuuapbaVbabMBNb7omb5fvfL8telfDAPpPxis0u5gGgvplldsYyFcHNerfs1/ALxv+0L4uvx8EPA11rv8AZNuJr0QQrBHboc7VaSUqm9sHam7c21sA4OPIDN/wjvxWvG122msmttUuUuoLiBlkhbc6lGQjKsDwQRkGv0rwi4c4gwmX8T4DEYOpTrSwcoqnKElNynCpypRa5ryv7qtr0P0bJ8fh6mFqz9tGfLFttNWSVz6x0m/1LxIyWejwxzHZt3N83/Alr5P/AOCjPxU1Lw/qNh8HP7NutPm1CJb+XzomjeSFW27l/wB5q+mf2Rv2gf2Z/A3i+01r4v8AjmWCzjX97bpo9xMR/s/IhrH/AOC3XxO/ZN/bL0bwX8Qv2Z/FIvfFnhmdrK6sptFuLMT6e4zw8iKvyt2Jz6V+W8OeDfG8JOtXyzERt8MXRqL/ANtPj804lw8aqjTnGUX2aZ+cljeJG2x9qL/G3lfMy10uh/d27Nz/AN1W3K3+81QwfC/x6DiXR0wq4BNzGM/k1a2mfD/xfbsslxCRwBtWZPl/WvsKHh7x1DfK8R/4Kn/8iefWzHBT/wCXsfvRPa77VUd0+fczVq6bqCNIiXLrH8zMn8K10/wt/ZM/af8Ai3FLqvwy+DfiPxFDbORNd6RpElxGhPYsgIB9s5rJ8UfDD4l+Cdem8N+N/Ct5peoW8o+02Oq2pgliPoyPhh+VdFDhHimtiJYaGCqupHeKpycl6q118zzKuKw6V1NfeXtNvLaRdn2be67f4/vVvabqVz9neHzo2b7qf7NcVpmm+I7OV2ewQqwwVMoOa0LdNfRmPTccBfMHAr1o8B8aR/5l1f8A8FT/AMjjWMoKWsl953+l+IJo40muXj2fdbzF/wDHq6XTdYeO3TY7IsifJN/tVX8Ofsh/tc614ei8VaP+zT41uNOuoTPDdQeGrh0nhIyHXCfMCOQR1HSuT0/xJNp90Y9UjmjeJjG8EynKEcHI7H2rmwfC3EuOlKNDCVJuO6jCUretk7bdS/rVKMdZI9d0TxB9qT53j279v+9WvY6hM0e+bbt+6nly/erjfhJ4Z+I/xbvZbH4Z/DvX/EM1oGLLo+jz3YiRv7/lodv44rofHng/4nfBeC1v/it8LPE/hu1lfEVxqvh+4hikb+6GdACfbOa0fDXEFPFrCvC1FV/k5Jc3/gNr/gRKtSkubmVjq7PVIHVUtnba33m37q3tJ1a2hZPnmD7l+VU3L/vV5D4W+JehaxqbWmnXjj9wWZdjK2B15Ix3rqtL8QXgHlvcqyt9xll2s1cmNy/MMqxXsMbRlSna/LOLi7PZ2aTsZ+0jKHMnc9b0nXEaFZELb/uu3+zWwutLJbhHuVBk+Xb/ABbv9mvMNN8S3knV1ZPvPHDW3H4ihZmfzFLK+23X+Jm/2v7tZRkYVI8x12p3ztIHhRn2/K+56+cf269h1Hw0YidnkXW3P1ir25fEFtNs3ncGZv8A9mvCf225mfUPDsbFDthuiChz1MVfo/hXOMuOcKvKp/6bkfScGQtxHRf+L/0iR7X8Eiv/AAqDw0zbVP8AYtuqqf8AcHzVta00ccKujr8q/Kyt/DXJfBzWBb/Cfw/ayFcNpFsu70+QVs6priyRy2yPGgb5EmX+Kvjc7nbOsT/18n/6UzwcfT/4UK0v70vzZT1a8gaI/wCsLKm1PLX7v+9WPeXlzG2zf5X7rb5n96nzal9oXM25fl+eRW3M3+zWPqGoeZZhH2q6tt3LXhykZRj9opa1evNseFG2L9/y221zeoaghDw/K2373yfeq7rl5Naq+zcu51+VU/hrjNa1L95LZpuRG+40b7WrnlI6OVnQ2upfMsPU/wADK/y10/hy686PY94uGT7u/wCZq8xt9Uhjx5LxsZF/heui8P6w9mqpDdLhk+7t+ZWrn5uYJHqWk6htjMds7P5cu3bUlxcOYYkhufNRm+f5Put/tVyFr4kmWMrvWP8Ai8xadceNINy5mZN3y/KlVKXQn3DYuta3XG9Ewyu2xm/h/wBmsq68QQrcF5n4b7i/7X96sK41pod8KTbHmfcm5vvL/erBvte2/uft/wC7VfvM+5mrKpUHGPvHZT+Irb78L+Xt2/x1fs9Ze6aVPOXe3ypt/hryW88WbZP3Lrvb5dqr8v8AvVd8K+NppPkdN9wvy7mrGUuYfL757tpN9Ctp++mjdF+XbI3zNVxtW+0Ls8tk3JuRf4a4Gx8UJ5KXM3O77vzfe/2ak1LxRND5bs6qmzdt31nKRfL75+U+hq62ux4WKbNvzPW5ZyILVLb93sb+HfVCz08xM/75iPvbdv3auWcczNvhEinc29W/9CrwZYj3/dPu6dGESbdMrK838Xyr/u1BdSW21Uh8vP3mWrtvbvJH+++Z/vblqG4sU8l3jeNmbcrttqfbc0uY19jymTJawrGrxupZf/Zqht7M7nkdvvfw/e21oNZzIo5VW+8yqtNa3m2s8KZPy/M38X96nzc0feJjGX8pnbrYMqPuc/dT5vutUkfkxzRo/wAzL8u7f96l1K1aw/d7Mbf4aybi72wvs3Nuba1ddOMJ8tjjqSlGVpRN2z1KZv8ARk3IPmZf7taH2wMvlum4R/3f4a5mx1DzIx+++WOr9jq03mebD8m5NvzfxV0R+02YytLlsabSJJH52/btb522/wDstRRzJHCd8zL83y1Q/tB4Zt/nKGaL96zVWvNSF0u9PmCp8u6nGJjKp7wanqkJ3fPsb+9XN65qXmNvtn4+7uq1qV08bb0df3ifOu6sDVt/mO8LqC33Pn+7W8Y3OOpU98+pPhtMJP2TDM46+Hr8nH1mr4pvNQeRmdP+Bsv8VfaPwzJk/ZBJHU+G9Q/P99XxV9jfyykaZ+fb9/8Air9n8U3bJsh/7Bo/+kwPoOIkpYXBN/8APtfkit5n2jekPyVbs45lVP7n8dLY6f5cjK/zO391PvVah0yaObePmX71fjT+L4jwqcfeJbWGGWTZsZtrfK38Na9nDOP+WyquyqMNu9u2x0zufcu1P4a0FjdRsRN/z/w/w1zVOSJ6uHpmzpLQ/fTdt+78yVu6bP5bF4ZmUMvzq38VYOmyeaodE3Bn2qv92t3TYXkj/h/76+9Xl1tj2MPTqy5bHRaS0yrFDbJnd/e/hXdW7bxpHMvnOz7fmTclYOls8ewbNh2fKrVprdIzQpMm9N38TbfLrzKn8x6fN9mRLfLN9lZNiqJn+833mqCaF4UKIONu5d396nteRNuT95Ii/L8v8LVB8/nCNPubfvK3zVnL3o/EKPuj2urmRYXR1Xy02/8AfX+1ViGNIdvnf6xtv3kqCOEtcbx0/ut/eq9DG80zXMyLv+6i/wASr/FtrLm5TeMeaRHDDCyskMLL+9+dV+9T4983yP5imrMa/wACIu6FdvzPtZqfDYvHGEeFl3ff/vbqcZc0jaMeWXMRWdqJGXZCqhVZtzN8zVo2cc8kiwoissi/d3/xUsdi6yD5l+X5kbZ81XLS1/fAnaVX5tzfxN/dqvcO6j7xZhb7PDEnnMm5/n2/xVp6fcJBGmx9219r7vmZqotGYdr3KZ/etv8As6bvlqxbyPCqOibd33f722pjE0l8HKzRhvLZvnuX4ZPmZabN9mSHYtz87N8rN92oLWN5GV0T5Fb5P7q1dtbW2uvn2KxZ/wCLd8taxjDm945alP7JXjt3kjFtD8/zbfm+VVpYbONbiGaa5ZHk3fNt+Vvlq5Ja/uWtvLjc7N+7+7/s1at7OG4mZIbbb5cXyfxLtolK0eU5qlPuUfLea3XyYVSXfudZPvbalW1m8tH3/N91FVf/AB6tBbRJmZPOzt27P9lf7tLdw+ZNvs0YP91/4qPdjojkqRMZtJRoUd33srbn/wBms7UtP8uZt8O8r/t11E1q8jI43IPN/wBWqfLVHUrdPLf7L8kv3mb+H/dq/djpE4pf3jkrjR/Oj3o/l+Z8u3+Kq91p/kyhIUyqp97+9W9dwpHCNQuvmMfzbV/hqBrdLiX5+sf+qjZ9rN8tPm9/U5qkZcpzd1prsyPGka/N8y7vm/3qrSaWv2NIXdi7S/d8rc3/AH1XSR26XkYmlh2qq/6lfl/4DTZoZGby0ttzR/L8rfw10c3uxSOXl5pXic3HbzQ/PCnLfK21Plb/AHqt29rMsS7y33Pk/vVsfYXj2OifvW+5/d2/7VO0+1mto0e12sGZlZv4VWolyy1FR5o+6QaXYpDDJvdoljTbW9pcKKqQp+88xPn2tTdPh3JsdPmb/a+ZW/vVqaevl25hmT/a8xf4q5pS5Zcx6VGn7xNprOV85E27n+833WWuj0S1SSRU3xq0i/Lu2/LtqhbKkdvDC8O5FTdAuytLT1vG/ewwxp91d0abWWsYy5p8yPTjHlh7xq24mnb/AF0gKvteTZ92uj0mFLWQIgzt+bzP4t3+0tZGnybo4U+6Wfc3+1/tVu6fcQrMru6nd8q7fl3V6VA4a1O0DpNNhmWJrlId25f/AB6tbT2hFu0LzK5Z13R/3qy9KuI1hMPkyIsjbUb7y7lrU09vLmRZvnbdu+58q17WHieXVlyx+E6HQltrq4/ffu4mTci7f7tb2lW9rCq+SjFF+98/zbaxNDt0jbYiNtbd5TNXR6THNGoT5X3J8m1vmr1I+6eZWjyyOj0K/kZXRJtkTKzJtT5l/u10cMrwr5s21UZ1ZGVdzM38W6vN/GnxS+FHwZ0ubW/iv8RNF8NQRrvim1jVI4Gb/dj+81eV6X/wV2/Y68SfESx+EXwf1jxT8QfEOqXCwWGm+E9DZ47iRv7rSbaUqnLDmMXWoRjrI+ifFFxN4ovLlLDUpporO6+zeW1rsSPavzLu/irzj4ta9Do+mwW3nLHLs+WGN/vf7TL/AA16N8PdJ8Q2PwzvbzW9HurXVdS164e80e8b57GRtqrCzf3q+V/2n9c8Q+F/Fl9qv2BkRYPKlVm+ZZP9mvzzMq31uvPkFR92XqUfFHxK0S1vjeXV5MWb5Yo9nzbq4rV/jgl9pc2lb12Ryr5rfL83/Aq8S8V/ErXvE2rD7ZD5QWVkVfvVreCNNe81S3037MzyXG2KCGOLc00n8Kqv96vLo0eX+LK1j28HSlW+E9b0O4TVtLuZtYmVbaZF8qSR2bav+7XlXj6+8PWf2m20HWFnjjbb8rN+7k/iVq/RLQvEX/BK39iHwLpfhP8Aa9vYvFvxBvLCO6n0S2iZrfT2ZdywuIm2q397dXz98Xv2n/2cPi28+k+B/gX4L/sC6l22tvpFh5U8a/3mk+81YYqthcLCNRPmb6I+kynI8djHJVockOkpdfQ+DPiHqE0in7NMvy/M7Mn8Neaa9qH2e43wnKt99levqT4zfs122oeF9R8f/BZLzUbKzRrrV7Fl3S2cf+z/ABMtfJmuXCtcM6J8snysrJt217GV1qWLjzo+Wz7L6uXV+Uc2oQzRGTfuVl+7TDaJ5auj42t8q1Bbyfak8yFFbav3t33qdb3l/IzWboqRbtyyV6dSPu+6fO/FrIms43hLF93zfdq3b69cK3kzbVj+7VCZXLH99vVvl21JDNazb4Xhb93/AMtI/wCKuaUfslxlKPulu+Wa6t0e2+T+/tqte6XDdTvDDcyRr5W7bH/erofDdmLqJEez3p1T/dqxqHhKFlTyf3f3m3Vl7SMJcpp7GrL4TibGzmiwZpm3762H1XypERHb723/AIDUWqaOIV8yCbdu/h+7Vnw7b2cciXM0KynY29WqKkY1I8500ac6fum34RuYlX7Q8s3yvtVt1fZX7H6+J45orvw94P1KdZFVHaSDaqt935Wavmv4dX+pLJCmleBmuhD83l/Z9yt/vM1fcH7K/wAUviRpGoW39saVss/K2yq21fLk/hXbXyOZ1HKL5Yn2mSynbl5j6Ct/jN8fvBOoQf8ACQ/Bmxn0+4iWBdQaeFp1hX7zNHX0B8F/FHw68dWUmsP4e/s3VVWHb5f7v5t33q0fgN8Mfh/+0d4GfTfEunafq10sH+re42yw/L/Dt+7trzX4ofDm2/Zb8WQXmm3/AIo8PWMl1HEsmsJ/aFjJu+7833o1WuOnhZVKfPDY6qmKiq8qM9z6bh1mx0uyntteij8xvm8xdzVDo2uaKshSzmhRNu5mZvvVk+D9S8U+OvB8esaL4u8H+IbZXVWls7hkdfl+bcG+81TxaRYrI/2rw1a7JNvzRp/DRWoxjOMJI9HBuFSlKPU+cP8Agql4Jk+OPwA1qz8OaysI0OzW9WSKLclxJG27bX5RWNvNcQpsdlkaLf5cif3v7tfuZ+1/8NZvGf7OHizw74X0mSGQ+Grho0tdq+ZJt+Va/FbS9F228NtMn76HdF8y7WXb95a+q4f0pzj0PEzWpSlKHIjAk092jbejJu+by2Sq8On/AOkI8IkV9jfLXWalpfnw/uY22L80rb9rVmLp7xyDfNhoX/1m3d/49XvQ933meZKMTMtdFSRUh6P1WT+8v92tWx0l9xtgitu2r8qbttWrOG5k3iHco+7u/vVtaXYo0xSGwwy/c3fe/wB6vUp8/JqedKXNMP7DSOZI4fMYx7W+Zf8Avpa14fN8xbaFNyMm5l/u0+xsbn5oYYWd5k2r5jf+PVLb2KMyTeQ25V2bm+6tehRjE56kv5SG1Xyo3REZG3svltVhVuZlIebZ/wBM1Tdupz2vll3Tbs/3/mprRzQozv5jOu7738O6uuMYnLU5+UqyjbGQ7q3z/wDAdv8AerQtVij0gLGRsERxg9uaxL5UbytkfyMq/Kv96te0YnQctnIhYH8M1+z+DC/4Wcf/ANgtT/0qB73DP+81V/cf5oox3010yuAr/vfnXftWr7zxyWfnfdVm+TclYcM8yzBEtlKbN8rfxLVqW4to7GPzptm3dsXdX43y+5ynzXLEbfXz3UiIjx+WybvmqCTU3uplhmdpG27UjX7u6sqbUJlkM33f+esatuq7pc0zP89tuf8Aux/e/wB6ueUJnq4Wmd14TXy496BZtu35dm5lr1XwXpUPmedNucMjbFZfmVq8q8Ntua2R0+VW3/N/s16TZ+Jk0W1+2XLybmXcqxy0U3GmejKolC0j1C1utK0mxjtpkjik270jk+XdXPeIvjBCt0dK/tCO2t/mZG3fe/3a8a+KX7QFh4b0G41XVfENvbCO3ZYvtT7mb/gNfMPij9sa2ub+7+wXLXcmzylupvlX/a2rXTCPN77Pks0zmXNyYf7z6R+MXxd/tqS68GfD1GvLuOLdK2z/ANCavFdW8ZeKtPjmzYMpVN0sjPu+bdXkl1+1Br2m2d1YeFXa2mvtv2q6VP3jViap8dL+3sxNqV5nd9/b/FXVGPLE+Ylz1HzTPUNQ8eeJFZ0eGbbJ+9dpP8/drC174iefbjfcsJWVn2r/AHq8l1v9oK61i4Pkv5SL8vy/xU3TfiVbatcb9SRdn8at/FS5pyHGnI6O+8eJcSPbTTb9rbvL/u7qxbzVIZpH3zbPn/heqWqSaVeRrNZ3kafNu8tq5m8vHhfyU3My/wAX96rHH3ZG7ea0nmGV/vf3lqhcaw8iv87b9m3czfw1k/aJliZ5gpo8x1YPnf8ALu8tarlHzIsXGoIq702/7u/71RsyfK6HH91aPLh8zZs3N95GpJGT7WyMmfl+8v8AFTjyi5ub3WZl5bvIqp94turndctZll+VNp3fIy11zL5ZLu+1v4dtZ+paa8zIh+9J/t/doi+YRzGrabbapH50LqxjTc6r/erDuLe5jZUmhZWrc1TQbzTZmvLNPutuZf71XdHt9K8Tx+S/yXP93/apgcvHJNGwTa3+9UU008cyuj7Sv+1XZXnw98lWm3sm3+KsS98Nur70+b/ao5Zhzc0j7w/4Iy/trXPgPxHJ8BPG2syHStcl8uykuJd0ccjfw7W/vV9J/tPfBXQfE2qXCf2bGGaX7yrt/wC+a/I7wfJqvhXXbXXtN3b7W4WVNr/NuWv0j/Z9/aY/4XB4FsX1vc95awKl0vm7pN3+1upe05IcsjlxlOM+WX2jyLxP8BbnwrdXFtHbM9vJ/FJ8zf8A2NaPgD9lez+IUkVnDYXEcsjbH3Rf8tP9mvr34V+H/B/jLWLew162txFJLuZW+b5a/TL9j79jv9iC60Gz1iezivtUZN/79fLVW/2a5vqcefm5vdMKeIr83LE/Jf4Xf8EsPi34D1XTfil4e8N3WomeZbGytLOEtJLLMwgRFUdSWcCv1c+LP7FH7TPiv/glb4F/Zm0XwbHL4z0bWUuNU0ptZtlVIVlvGA80yeW2BLFwGP6V2/8AwVM8C+DPhz/wT88Tz/D7To7AwalpTwT2rkOrC/gIYMOQR61478aP2hfjrov/AAR2+HXxa0n4u+IrbxRqPiFYL/xDDq0q3lxH51+u15g25hiNByf4B6V+xcN0c8XDeULLZ0kv7R09pGT/AHvsVyt8sleFubmStK9mna51QbvP22r5ena/5nz1+wj8Uf2+/wBnv4ieLvhB+zj8L31jU4YJpfE3hXWdNZ0spbcFTMR5kRjlHKBQ37wlVCudgr5v1STxv8S/iVdzXenXepeI9e1qV5rW2syZ7m9mlJZFiQZ3tIxARR1OAK+1/wDgg5d3V9+1B4zvb25kmmm8EyPLLK5ZnY3tsSxJ5JJ5zXi3/BOOaztv+Ci3g271GaKOCHXdQlllnYBIwtpctvJPAAxnPbGa/dauc0sq4lz2vHCU/bYahRqynFOMqsvZ1JWk7vRciUdLpbuVlb7TgyDnlmOV3Z0pL00ZN/w6k/bz/wCEN/4Tb/hRNx5H2P7T9h/tS1+27MZx9n8zzN+P+WeN+eNueK+f9a0XWfDer3Xh/wARaTc2F/ZTvBeWV7A0U0EqkhkdGAZWBBBBAIIr9gLLTtXg+NEfxdvP+CvWgz266gJpPCudPXS2t882whF7gLs+UPzJ/FuLfNXxf/wWa8TfA/xp+1NaeKPg74s0vWLq48OwJ4lutHvFnhNyjMseXQbS/k+WDhm4VQQpHPn8AeJmfcQ8Rxy3HU4VIzg5qdKlXpqnKNvcn7aK5k09JxtrpbU+FxGFp06XNF/e1r9x8jV6J+yZ8GrH9oP9pDwf8HdVuLmKy1zWY4tQlswPNW3UGSUqTwDsRsMc464OMHzuvpj/AIJDaxpGj/t7eDjq8ir9pgv7e2LQq2Zms5doyfuHryOe3Qmv1DjDH4rK+E8fjMN/Ep0aso26SjBtP5PU5aMVOtGL2bR9B/t+f8FMPij+zH8X1/Zg/ZTtdH8PaH4J061sp5TpiXDNJ5KsIUEmVSKNGjX7u4srZbGBU3xS8a6N/wAFMf8AgmdrPxw8d6DBZ+P/AIXXM7vdaXDhJAqxvJhWJIilgYFlzxJDuHA2n5I/4KM6dqOl/txfEy31QsZH8TyzIWjC/u5FV4+B/sMvPfqeTX0x/wAEtZ4fDP7BX7Qvi7xJLt0ltMnhXfapIvmLp827hjhyfNiG08dPU1+HZjwtkXDPAWU59ltJRxlOeFn7WP8AEqurKCqKUt5KanLR300XY7oValXETpyfuu+na2x+ftfTf/BKLUf2b/DH7TD/ABA/aP8AGGk6VbeHtFnvdAXWkHkSXqkYfcwK+Yib2jXG5n27PnCg/MlFfvfEOTriDJMRlsqsqSrRcXKFuZJ72umtVo/Jvbc8+nP2dRStex9n+Pf+C2P7WNx8X77XfAF/okHhaLVW/szQZdGSRZrVXwgklYCbc6gFiGXBY4C8Adb/AMFvvh94SkHw2+P8PhtNC8S+LNLki1/SnWNZmMcULoZcEF5I/MMRfaeAgJG1Qee/4J4/sVeFPBvhtP29f2xbiHQ/Anh5Fv8Aw7p+pKQ+pzKQYrlk+80W/b5UYBadyuAUwJPEv23v2rPEn7bn7RE3jK2Se30aN107wjpV4yRm1td3ymTDFRJI5LuxYgZC7iqLj8bybI+H/wDiIuG/1Yw8aVDL4VIYmtBWjUlKKjGi2tKk4P35yd+V7tS0O2dSp9WftXdytZdvM+9viB41+P8A+z//AME9PhRe/wDBPnwTHqtpcaVbya3eaZpR1G4h3wCWSUQ7PmLzmXzHKfKQBhc8aH7A3xa/bD/aL8K+OdC/bs+Hvk+CJNBZBqOu+HhpjTBwyzRbNqCSLyg7M+35CBzzxhfFL9oTwr/wRz/Z38Ifs9+CtMuvF/i3VLaW/mXWNVf7LZsx/ezBVHyxGbcscMe3IV2Z92Wdn7LP7f8A4e/4KW6R4i/ZA+O/haXwtqfiHQ5xa6l4U1WWFbyMDMkaBtzRuqfNtYyRyKrhlx8rfjmJynM8Vwni8wo5ZCphJV51Fj2l9a9l7W7qqHMpvls9bpW15bXZ2qcY1lFys7fD0vbY/MrTrrR9F8Y3zaRcNLYRzSpaSMxJki3/ACEkqpOVA/hH0HSuw03xhbM0Sb9wb5n21x3x68D3/wAAfip4i+GGuXcc1x4f12fTXuIiGWQxuyh+CcZABx1GcHkGuWs/Gezb9nmk3L8zqv8A7LXoeM2Ko1uLoVaMuaMqNNp9002n81qZ4JNUrPuz36w8UW0MiTJfsD8qqrf3WrorHXkkjk2Oqv8Aw/w14To/jRJBsjuYyv3fvfMrV1Gk+KN1mN7szM/3Wf7tfkntPsnXyzPYbXxJCzJbXM3O7d/eryX9qHUBqFzozK+Qi3Ax6cpWxpOveTIl59pX7/z/AD7t1cj8d9Sm1ObS5pX3YSbBAAHVemK/SPCad+PcKvKp/wCm5n0vB0X/AKwUm/73/pLPYvhrrX2X4baNDblo5RpUI34z/CK2brWofJWFN3yr8zbvu1514A14w+DtOtt64FlGo/2W21oSa9DHj52ZvvL8u75q+Lz6rfO8Uv8Ap5P/ANKZ5GMp2xlX/FL82bd5qz28jzJDlJH+Zay7zWk8xkfdIn3X2/w1nTau/mFEvFDMzNKzf8s6y7zUbyNPtI2lm+bbI33lrxZVDn9mP168Mil5H37G+dVrlNWuLBlNyke5t33lq3rmrbYy6zKnz7W2r8rN/dWub1LU3+wB3dmWOXZ83y7Vb/0Ks5VC/ZsS4vHjme5hdT5b7tv8O7/Zra03XEtG37NjMy7Y9+7/AL6rh5L65N1FbQyYWRWZPM+VGVat2+v7HTznjR2+XzKzjKP2hcsz0+HxAlxalPti/d3P8m5WqHVNaj+R/tMaFdvm/wCzXF2+tQSQokM21lT5WWqV/wCJHWNXlfczfL83zNVRkZ8h02peLIYrzelzteN1Xds+by/7qtWRrWuJKyTWz7U+66yNXKah4ofzG/1mf7sf8TVg3niLzGZ5pmUMv3Vespe8VHlN688SJ5kzo3K/LuV6k0XxpNHIlykyqrbl2/3ttcBqGsJ80MNzhGbc6/d3VDpetJJdCaR2T+5urjlU5TqjThyXR9AeF/iHbP8A6m/2t975n+Wr83iaZl2fafN/hRZHrxzwnrWI0R3Un5tzb662z1BGtfkm85f4/L+Xd/u1lzSXLyj+r8seZnylHp/l/vkhZXkf5GkTbTzpryN5yf6xvl+Z/vVu3GmySSSfIr/8C+7Tls9zbNm4/wAXyfdr5n23Kffex5jOh0nyYZER/up937rbv96q8lqk6s7w7PO+9XQx2M1x8kjt83zf7TVn6lp8bODv2n7r7aqnU933jb6vzJWOZmXy2Ih8xFkb52/vNUflzbZZ3eTb/B5j/drV1S1VkH+9uZWb+KsfVriHzOXYP8uxV+Zfm/irrp1uaASwvLH3Sjqzedbkh12r821fvVgXV5tuGfew3JuStPWJHt7UmbdlfvMrferDvJkbHkupXZ89ejhbbHmYqjOQ6Obyo1RvmO/d9+rdnqU0aiH7qx/Mism6uekmfzPOT5G/753VNHceSy7UZf4mZn3V3yieVKPIbkl552f32N1VJdQQL+5fd/D96qclwk0sf77Ztb/dZaZcbFjWOZF+/u3bvvNUSlymFSnPl94Jm+0SNDM+Gk+X/drLvm3fc/h+7uqzdXTxt++Rn3f7FVpI/tHzwwq6t833qqUupyyp+8fVHwsjP/DIoiwcnw7qAweuczV8i2uk3LMYvJVW/jVq+wvhTEP+GWI4SMg6HfDB/wB6avma30MSMr741b+FVav2LxXny5LkGv8AzCx/9JgfUZ9GMsPg7/8APtfkjHstL8uFUdNjM/yVeXw7MW/cv95K6LTdBTy0huU2/wCz/FtrYi8OvGw2bdjfL92vxCWI5TzcPR+GRw66DcrComg3fNuWT+JaktdL3bUQNjdufb95q7O60H7Gphuk3/L8jR1XXRYX+dH+Vk3LurD2nNuerTjHnOcsbOWJTzj+FGX5q3dGt5vMR9/y/L91tu6pl0W2W3WFHY/MrJHt21o2tnbW0KI6MV/j3fw1yVKnNHmPUo7sWBo5FSbzJPKX7jfxVOuoO0mx3xu2t8yVB5cNvM485vKZ/k3feVdtH75Y2R33/Lt/4FXDy/zFSqRWxaa4m+0OjpwyfPtf7zU+OOZgqJ95fmdmb5az/MeGQb5v4Puqn3qspHDdR+dM6ouxd+5/vVlKP2gjU5vdL0MQW4D/AGyOFf4P96tDYjQpsmVXZ9su371ULWES2/necu3d8u3+Grq7Pm2T71X+KplUOqmW4YbZbf7/AJjb937yrNrG0i8Rskit95n3Kq1VjmS4k2TDcy/Kjf3quRqkaeWkytJ/00/2aUZG8eaUvdLljdJcR7441Hltu/efearpZJZPJfazMm/aqfdrNg33S/adysq/Ky1cb5Y/OWFhHt3bf4mrSMfe5jrpy5feLcLTrDL5jrDuXc391qnjuPlE3zMfuOv92qtqXuof3Yj/AHi/8B21ah0/fGPk2s25VZf4q1px933jeXve8ixb71txNM8m7c33f7tXrSP/AEHzHud2377fd3VHptq7W6P8u77rQ/3Wq3Z+bCp86ZdjPuePbW9OjGXunNKpyyEjjMli1y/y7k3bVfc27dWjpq/uVe2m8v5/n8v7v+1UUa2d0G37gF+X5m+9TvtCWrCFHXCy/wBz71KtRl0iYSr05RvKRcjhh+zlPmf5P4fvLUbTJ9ojuURt7fN8rfLu/wBqljuIW/cpy6/wt8tV7qd4JGheT5JPuNv+7XP7CcZKRyyqwlHSRKzQ7vnfeWX5lb5VqndW7yRvM67Y9u6tOOF2h37N+3+L71VZtjWbh9u1V2usj/w1XJOJ5tarCMveMe6VJFUNtV9nzrH91lrOmt/OjR0to98br8395a2prPbtffG25tu2NKZJYw3Ehkhmj37lRo1+8tFpxOfnhIx4YfO8tII5E3fwyPt+b/4mpYbPaz/dDbPvf3mrQuLUR3P2aNPlZNyzSJVyxt3+/c+Sieb88e373+7XQ5Tl9kx9yM+U57+z7mGFLq2uFZ2b513/AHW/2qktdJ89fs03ybl2/u/u/wDAWro4dJR43S2HO9m+b+7VqPR7aSJvs24oqfd/u1lKc+U1p0lze8ZWm2aRw7BCxdvldmX5lrXhtXKjMLbtm2KT5fl/4DVm1tfJjRE+Z2+Vdy/eqxHofl/M6K7xuyvt+bbXNyTqHo03Rp9SKG1f7OIbmZiqy/d31prbXV1tTe0YX5UZl+ZWp9nY7oVSZI1bZuRW+81WJI3s7dEe5jO5t+1Wp08PWlV5eU3li8NTh71SJNHbpDGib1R/K/1i/e/3a1dL+aMQv5iN97d/FWcti90qiZ1QN9yRv71a9tNZ2Vu9zc6rC32e33s3m/e/2a9zD4WvH7J5WIzjLI6Odzf0WJ47pYXuWfcq/u/4m/2q6TR5YdNUWFzNyqfJHI25q4aHUNYVU1W81qPSNPkXck0y/vZP91f4a1rjxNZ6PZy3mm2bRytFte8ZN8s1exDD1Op8rjOIaesaMTrb74meGPBenya9qrzeVH8sqyJtVf8AgTfdr4I/bI/4Lc/EfTJ9T+Fv7L01lptus/lz+JEhWWbb/EsLMvy/71eff8FK/wBtvXrm/f4G/D7UmjVV3a5qEbfNI3/PFf8Adr4cY7ucV0xo31PGljMTU96Uje8dfEfx58UvEcnif4i+MNS1zUZmzLealdNK5/76r9mP+DUX9lHSv+Eo8V/tmeM9Ijf/AIR+L+yfCrSQf8vUy/vZlb/ZXatfjh8Kvhn4w+KXjC08F+CNAutS1K8lVLa2tY/m3N91q/rM/Yd/ZV039jv9i/wP+z9olnHbX2n6JHea3df8/GoTR+ZNu/3W+X/gNfPcT476ng+SHxSPQybCvF4v3tkaHxM0XwxY61f3Oqw7Zr6Xz2jVvmZv7zf7VfF/7WnwNvNea+1XTnZreG93L523dNHJ/E26vpD42ePNY0uaabxPo8MDt/r449zRzL9371fP/jL49eD9BvlufFWpQtDJFvuI5vmZY4/u7a/NKOInTjzH0n1eFSryny3N+yjo+k3g16awk3szS3SyO2xd33W/+xrsvgh4P8L/AAX8J+J/2k/FVms03gu1kbQY7i3XY19IrLB8rf3fvVi/Fz9qiz1jxBc2GlPG9pHFuikjfcyru+WvHv2l/jZrGqfsi6P4TfUZmm1zxfNcXSt/CsMe1Vbb/vfdonPE4lx5/tH2/D+XYenXjJ68p8+X/iLxt+0T8TtQ8R63fyXl3fXUk+pXTLub/d3Vk6tL4i+HMzTaVdTRiNtqyRuysrf3a9f+APhP/hBfhqHa2hl1XXHZ/MjPzLGv8NY/x88e+DPD+lDRIdEs59QmbdKq/M0K/wB5q7faU/bRowjzRPWzXH1YQdRsufs//th6xDqcel6rO1tPDEyvIq/LdRt96Nq4T9orwz4e/wCEmj17w/bRrBev8iwy7lj3fM1ebJ4iln19b22tI4EVv+WdavibxBc6hpscPnZhX5tv92uqODqYXFxnR92L3ifnuZZj9dptVNSdvCaaPDseFWaRN3zfwrVG509PtDWaf3NyL/FRputf2ppZf7eyPap8iyPu8z/ZrS0W8S4tVvHTefutu+8rV6salX7Z81KhHmjymZb+Hb+ZhNbJ8m3+Kr0Xh+8t7hneHKNt37fu112i6lYLZ/6MitLs3N8lbOnww3EcU14io7Ju8uP+GuOeI/eSO7D4PmnEyvCelTWcY86H5JPufJ/DWvdafbQ2LQ7I1Ez7l3L8y/3quqyR3Cw2z8/dfbUt0sN1pvzvt2y/P5b/AMVeTVlPn5lI9/2MKMOZHD3nh/7RdtDDCzQ79u6tTRvB+m6RajUnRWK/djb5mqz5yWszwzJGPM/1XzfeqrqEl5Gqo/yhm2pt/irqlipU4cq6nlVIx57mnb+IPFU0zvDr0lhZrKreTCu1ZG/hrpvDfxu8W+E7Wa2TxLdXDyNu3SS/dZf7tc5oNuJLMQ6lD+4VN7fL92tQfGb9nb4cWKW3juOOV1vVaKFYvMkkj/irz4U/b1eSMOb0Lp1amH9/n5TqtD/bd+LvgGSHxD4S+LVxpWob922xumRmZW+VpP4a++v2Wf8AgtrqnjzwXP8AB79qDw/pfjG3urP59QO2Kc/8B+7X5IfFz4q/sz/EfZP8N3urSWOVv3bW+z5W/wDia4mO48W6Nfx6r4b8QSBoX3RFf4v7tet/ZbhCy9yX94y/tOtKd6nvo/o5+CH7SX7H1rd3Fh4Xs73SLy8dWsbdk/cbdv3dy/LXX6X8WFtfGVppuzzbPUN3lXC/dX5vu1+D37In7S3x1uvE9tpWo69G0Sy738xd3lx/xbVr9Wf2RPiVZ/Eq1h1LxDqqqNNi/wBFbe26SRv9mvBxmAlRxC5pe8foGS4uhXw8n37nvf8AwUq/am8H/sr/ALNU15qWoxnUvFP/ABLtGjkb5JJG/ib/AHa/IO+kvZNSN3JtR5JdzeTuVa92/wCCt/xu8PftOftReH/g7oV3Nc+FPhfpyyX95D/qptUk+Zo1b+Lb92vAWunaRpvut8zJtbdur6vK8L7OlfufLV6kfb27FjdDcMHR2dm/iVvmqjJCvnMkb7Ssu5o2pYZnjVHmdtzfN5f92pZGhuofuKH+638W3/davSjGEp8tzmlLrylu3s7aSNXmRo0b7ix/NW7pdrM15sRG2KnzNJ95V/vVj6bb+XG375flX5fm+9/vV1WlrDeSGEowdYvmZq7af905pk0cdzGB5McjJ915N/zL/dq19heGFkeH543+bzKm02xuY2Te8aKqN8q/8tKvQ2afZWREaUxv93f5lenT+A5nHmMaRYbePyXk3/P8rSVV1Df50r/dk/u7/wCGtKRd0jQvc7mWJldV2/erN1dtxZ5trR7VXdv+7XZGJhL3vdkZV0r+Yru8YRU+dV/har9sFbQ9u8MDCwyv41DfTIyuk275k3LuT7tT2mU0bMhDfu2JweD1NftHgtGP9r47/sGqf+lQPb4bdsVVj/cf5o51pHh2FEY/wtueqU15NIpPzFV/hb7v/AasapeQ2rb9/mIqbvLj+VmrmNUvoY18tEYrHL8rb/mWvyj2fN7x8z7bl2LMl5NJcP5M2xWf5Gar+k3CQt5dzc7mb5pWVq46G8feIZJssqf99VpWOsItwiQzfOzbUrnqU+bU6Y1vc+I9S8P65Bb2aRo7FW+VGri/ip8fLbwjp76am24mX5v3j7dv+1XMfEj4nWfhGzZEuWlm/wCXdYfu18z/ABK+Il/fXTzalcs0zf3nqI0eboeVmWbc0fY0v+3i38VvixqviS+k+06k0jTN95n/AIa4q3vnt7d7l5l/vbawZtS/tC8d55mJ37lVadqF8kdt5PnbP9ndXTGnCJ4NjRk8QP5jvI7bf49r1h6vrmpalcDZNuij+X5ag+0Qyw/OPvfdqGNkiV/n43VX90rmLf2v7PDvdNq/+hU1vEj2e5IX2Ls/3v8AgNZuqatDt++o2/L/ALtYtxfPMu/5mLUc0R8p2un+LppJPJd2Ybf4vvVvWt4mpQ+c7qPk+T5/mavLrW4fer/eH+996uo8J6xukCTPx91d38P+zURl2Mv8R1qQib/SVDNufbtanLb7Vb727/fqa2WRmR/4G/h/u1ZmtfJkEKI395WrQn4feKqxGOPfsZfM/ip11Hube6M3y/eq+LN9q7v9371Maz3SNuhbav8ADVRjGURfaKFtbvIu/YoLfxNViTSnC73RRtfdV/S7EM2/7y7tzLsrqYdDhmtV/wBAVv4vlrOJUpHns2lpJH5czqrfx1y+seG/s8g1DSv3My/3Xr0nxBoqWbPGm1XX+GvPNQ1Z217+x7l9ir83+9Ve8HNzbl7w3qmq31j9g1K2Xdv2vM38VLqWj+T8/wDAv8X96tK3js1jVEdv7u1f/QqmvI0mz/FVRAwrWFN3yJ/d/grsvhx8SL/4Z+IbbWLaeT7HNKsV7bq+3av96uXWzRmKb+aka3eS1NnM6n5P4v8A0Koj70zOpT5oH6R/BXx5pt3pdt4h0rUldGVWiZvvV9q/s1/tBTW7Wlt9v8k2qL8rPt3NX4//ALEfxmSO8f4darqX76P5IPOf93tr7d+H+tar4ZvormO5Yo207V+WtqlP3PcPFlKUavKz7d/bv/aW1K6/Zl1H4VX2qpNbeJbm1ls4ZZgZVkinjmkIHXYNoBPYsPWr/wCyt8Qv2bP2vf2AbX9iP41/GvTfBHiDw/qbT6VeX8ccKNCkxlSVGkKRSMRNLGy71kOC2CMk/I37RPjB/F9l4enaTIhiuFCnquTHx+lWbT4XeFNEsNA1nWvDj3tvqenW9xKPtbplnQE42kY5zX7zgo8P5J4UZficdVrU6k8RKtTnSjCUqdWHNBPlm0nHljqnu32OmlWlSlrrdW+R9M/8E4/En7Pn7GH7bHxC8OeK/wBpDw3qOgW3hee00zxTGzxW18yzQzMikgqZAsbDarsHYBY2kJFfNP7IHxZ+H/wf/bL8N/FL4hTGTw3Z65cjUpo7ZpQLeeKaEyGPG5lAl3FQCxUEAE8V2Xjb4FfBn4i6v4Z+CPwB8LX0XjrUna81SS21E3SW9ljhDG7HEhwSPWvlv9p3xncfsr6V4w8RXfhGLXLjwleT2n9k38skSzzLP9nAcxEOMOwYhSM7cV9TkOfcJcSf2vjYzxE3PDQjXco04NxpwnFypqDaU5JydnaKdraaH6FwbKawGPStpSk1v1TP0sP/AAT+/wCCYDeKz8dD+2hpn/CBlv7S/wCETGr23neX9/7Pv3+fsz8vleV5235d2/5q+Uv27Pip+zv8V/jrdar+zH8KdP8ADHhmxt1s4ZtPgNumqsnH2r7PhVgBGAFChmA3v8zED4y/4Jk/Fb4rfthfHeXw78Tb6yt9OnCGLS9NsfKhtw0m3Adi0rn33EV9Q/8ABe34Dav/AME2rT4Oa38G9WkisPGUl9b+IZriATg3KJG8SqZc7BhmyByfWvn+HPEfhrK8zWNx2NxuLlCLhTU404xjF2vzRhNKpN2V5zu+tr2a+HrU5uPLGKXocJXS/B34peIvgl8U9A+LfhKOB9R8PapFe2sV1HuikKNko467WGVOCCAeCDg180/swfG34gfE3xnf6P4s1iO5todLM8QS0jjw/mIucqAejGuM/aM8X6hpHx11KyivZo0W3tyCkpAXMKdq/Tc+8TspjwQs6o4WVajWm6LhNqDaalzXtzq3u2t1uYU6DVblk7dbn7o/GT4Z/wDBPX/gpjrFj8e9H/ai0/4feKZtLt4/Eul6nJBGzMqDAdLhot8iAiMyxsyMsa8cZrhf2t/2jf2YP2av2Pf+GF/2O/GVv4pl1m8kPjDxEB5w27kd281VEUkkhWONTHuVI4iCd21q/GDR/iPFIsem6lqbfaEVmtpFuDtZW/hraudZu1Qf6Y3ypudvOLV/NeXcd4TC18NSxEK9bB4WSnRoTrQ5Yyj8HNJUVKcYX9yLdlZbrR+39UjUg5wkrvd2/wCCfql+wT8Iv+CbHjv4D+LNc/ax+I66f4ptpJRBBd6y9k9lbCPMc1lGjYvJS2/KFZcFFHlgHL43/BM34FfslfFL40alr37Q/wAWtJtNL8NyrcaL4a8QSpZprQ3nbJM8jeWUTCloAxLlufkVg/5YalrlzJMkw1SZNq7t28/NWZN4yvd2Ib+Xhv3v7w19DjPFvFYqlmUacsRB4vl5f30WqCWjVJOl7qkrp2aezT5lzHLHBJOOzt5b+up/RN+2p8B/g1+2j4gsv+Ei/wCCjPhHQvDWkKP7H8K6fPYtBBJt2tM7G8HmykEqGIARflUDLlvij9sP9j34Nfso6NoXjP4TftfeG/HeoT6lhtHtIYnmiCYYTfuZZkKAjBEmzORjf8wH5VjxVOsph/tKf94u1GaQ7qjk8XX3nB0mcpGv8U5WvH4U8S8z4UjQw9OrOeFp3XsbUIxknfeSo82rd278ze71uXVwsat21q+uv+Z++3xM139g/wD4Kt+DvC/ibxj+0Fb/AA6+IWjaSIL+DU2jhUZO6SLE5RJ0Dh2jaOQMBJ8wydoT4GfDD/gnd/wTR8ST/HbxP+1jaeOfEcWnzw6JYaKYZnQOmG2Q27yYkYAoJJJEjAcg4zuH4Dt4y1OTaft86p/10NMj8ZahAoT+0n3q33fONea+NIQwEsoo1MRDLpXvQVWnpGTbcFVdDnUHdq2umjbvc0+ry5udpc3ez/K59lftg/GDVfil408RfGHU4orS68R+JJr14o0AWLzWdwnAGcDAz1OMnJJNeOaP428m6aZ7ltkjK3l7/lavIf8AhNpp0a4lvJWU8CJmJq7b+KrmaRUhuYwiv83y/N92vM4z4rw/FObRxVDD+whGnCnGHNzWUFZa2j08jTD4adKFm79T3PQ/GVhMyfJviZvvM3+rru/D/jVLiP8A0N2RWdVddv3v92vnjw/rXl7Hn8t2ZNyMv/xNeheHfFDxtHvmZF++kav91q+MliObWJ1xozPcbPxBeTSRJDNt+b5IZPvbf4mrN8bakdRmhbaQELjJXGenNcjo/iK5m/ffaVkf73zfLtrUbWX1mNJpQu9V+Yocg1+m+D9fm8QcJHyqf+mpn03ClGSzulN/3v8A0lnfeGtce20W3is2yUhTevvirMniaGPdsfj7rtv+bdXBDxUlpAtkjRq3l7dze1R/8Jh5yNDDYbTs+Zq+H4hr2z3FL/p5U/8ASmeVi8PJ4+o/7z/M7ybxVNDMiPc53Ju3f3qrzeKZpVDo6lPmXd/tVwlv4kmk8ua5jWI7tztH83y1Y/tiYM3kzMg37tu35a8GWK98f1Xm942tU1aPbMjwsf4tq/3v9msHUrh5GmdnkXyV3eWvzNUVzq9zNtmO0hvnRv4VrK1K8eRi/wBsYfN80K/wr/eqPrEpbFyw8YwJLq6hWSF0/ii/1jN935qyZPEG5nTf937zN/FS6ldeZG+/ciM3yfxfLtrBvj5itNs8zavzbm2s1bU6hy1MPH4jqI/ElnHsmmf5tmzzP937tQyeKkvpmRJlO5G+ZWrhm15FkT5Nqxy7tqt/s/3qrf8ACSOq703A/wATb61OfkOqvvEUMkawwvvb7vy/3ax7vWIR/oyIvl+V/e+7XPya/wCZC01nNv8A4Xb+Jqz5taRrdnRJP91komTGJr3GuQ/OPmX97u27v/QaWx1TbmMOzJJ92SuRvNSe5U+duVvvf7VX9Pu55o47n/VL91FZvmb/AGq5K0eY7KZ6b4d1BFaNHdVPy7K6+2u4b6XzvtMjL91FjfbXmGi6g62p/ffe2/6z7tdRpd8YlEOxkVVX95/C1YRlGPu8x0xo9kc82jzRsU2KfL/i3fe/4F/FQ0aW9w0EcKu+z523fdraktXEYSZ/uvt8v+KkuLe5aN0hKl1+Wvj/AGnN8R+g06PMc4y+TJstk3bvv7n+Zao3kflu8yfIrP8AdX+Jq2b61SNm2MzFlXbWHeXH3pZnYsr7kjWtY1OaHunpU8L7vKc/fSJMzpMm6sLUlhWT99J5YrZ1SORWKJcspb5vmrG1RXkczfMrr8yL/C1dtGXuHV/Z/u6RMe+bciTI+9/4pG+7WBqK2f8AqfM2M38VbF19pmm2P0Z/urWXqDeWvkvtZt/8P8VepRxHwnnYrK5cvMZDb5PMeRGXa33qGnebefs2xFRf+BNSszqyuj7/ALzbW/u0lrGkmx0K7WTc3zf3q9CNaLifK4rAzpyL8MKSQ/P8zf7PzUyT5t8Lp/urUunw/Z22JMrp/H/tVaksfvSQo21f9n7tY1Kh53sZy0ZkzW+1Vfzud/3V/iq5ounPtbZ8v+zsq3b6TGsiTI//AALZWppun21yrIjsZV+Z/k21Eqn9446lM+hvh1AIv2dEtwwYf2PdjPrzLXhFhoqNJstrZU/vNs+9X0B8P4PL+BUcEgz/AMSy6BGfUyV5PpemvMqoibPnbav91a/afFpxeS8Pr/qFj/6TA+gz9L2WDT/kX5IzbPQ7Zdzo/wDBtWT+7V9bJ5LhIZtwRtu+Ra3rXQ7ZWTzrZcSfLt/vNVmbTRHtSGwXydn3a/DKlSHwnl0JSiYUmnfuVhT98y7tit/FWfcaTItwXtrZRGr/AL3d/wCy11raXcyRlIbNR8n3o/vMtQyaG6Wav9jV1VfkjWX5lrmqS9melTlzbnLzaebiQI9tJhn+6v3l2/xVWazmVfJRPl/jZk3NXYRaS7Wzp9mkRFXf9z+Gq13p0f2V4fm2fe2su35awlKXwnTT/mOSuLVJo/OTbuZ/vVUkZ5I0m+7u++v3dtdDcWKSQu/2b5P4W/irEuNPm/uRtKybvLV/vfNWfN7vKa/3iOHYshSeba7Ju3feZamtW85lhfbtZdyN93c1H2PdCg+x7JF/iX/aqeO18nYHffK33KXuRIjKRYs4bmSSJCVVdm75f4a0I4Z41W2dF2L8zMqfxU2zsXZ0f5fm/vfLWxHp7tHsSFt2/wCRt/3V/vVySqHoU7/EVFsUmj37Knj8u4ZLN3jC7vnkb+GlaDeG2fK/3dy/db/eq1a2SbkuYUYRs33ZF+9Vx5ZqJ0U5E1hH+7aaFFJ+75bfKu2tOzX7G7XNnDu+8v7tN21v7tLpVrDIoeFmVW/hkStmysYVkSZI1T5/nb+7RzTOqNT2exlWMf775LZVK/Nu3bq0mt3XaIXZzIn+sVflVv4lqSa0RdSdERvl2v8Au0/hrJ+NnjhPhv8AD+bUvDztNqcz7YFVf3Vuv8TN/tV6uFw9XETjY480zShluG55y97+U7nQfBNzeRp9vvLexST5lkupdjbdv92tBfhg9xZyw6J4z0u7uY03W9vJLtX/AGa+I4fjd4z1LXpb+8166eS42rceZLubav8ADXR6D8cvFVrfJdWesSJJG+7asv8Adr6GngadP7J+aY/iXMMVV5oS5Yns/wAZvFXxa+GsLWeveDNPjtvveZprM25v95v4q8ruvipf3FoupWviFV8tvn2y/d/2a7W4+MUfxK8A33hvxg+97ja1u0b7pFb/AOyr5S8eXeq+B/FDJbXLRpH5iy2q/dZa640YdIniyxeKqSvOpI9b1D9orxbpuobLbxPJ/wB9feqCH9p7xOzDztY/d79yxt96vK9S1Dw9Haw39s/mpNEsqNJ95W/iWsG68WWbzOn2aPbv27t1V7Kl/KNYnFR+1I+iof2ltYhjDpqv3l2vGz/+PVNa/tMX9wwRL/5lf5l3fe/2a+arXxPpskmx93+z8/3asf8ACQabEu/7TJu30vq2Hlq4h9axUvtH0lqX7QmvSRj7Hfxodu3av8X+01SQ/tD+IXhihS9YfL8zbvm3f3q+arjxVD5izf2kzfLjb/dotfHDruS5vI3Tf8tP2FLpEXt8RGPxn0+37QmqzSbJtYkYLB8it821qsn4/X80gd7yOXd8ybv/AGavmqHxk7fxqf8AgVI3jK/U7PtHy1H1WG6iH1nEfzyPpmz/AGhpoXZLnVN3mf8ALOP7rVctfj99oZEsPFDRMqbfvbvmr5Sm8ZXjP532ln/h/wB2o/8AhMoYVZPtKr8/3VSl9XpS15R+3xS+3I+rtQ+N3iTa6W3iRZpZE2/M23d/tVGvxq1vT187UtSmilk++0d1/D/s18pv8RNrfPeNtX5fl+XbVab4tTKvkpebgyf3qtUIR15SY1cRH7R9oaP8evD1wrQzeNmtvkVUa6f5lb+7ur1jwjcaJr1rDf6br1rf7l2tJDP5n+7X5fX3j77Wqv5zBlf+Jv4q6b4W/FLx/pOqInhrxDdW0i/xQysqr/tba05XHYxkpVNZSP051jXtE0m3ijS//eqm1Lfd95v7q1V8TfGbwB8EbGPWNVe31PXJomS301k3RQ/3d3+1Xx3b/tCeJ7jybzUtea5uLODZA395v4mqlJ42vPGniJZtQvGY/ebc+6oi5SkZ+z5Y/EfXXwz8feKvi14i/tjxa63Vs0TbLNfljj/iXbWf+2F+0xbfDL4W3k2ia232prPy7VVT5d33fvf7NcZ4D8XQ6V4QS8sL+SP5V/2fmr5T/bg+Ll/428WLojvGsNquzyY//Qq1lHl+EVJ+0lzHguqXOq+J76bXtVvJJrm6laW4mk+ZmZq9S/ZB/Yq+Nv7ZHxd0/wCD3wd8H3eq6nfXEaZt7fclvGzfNJJ/dVa534c+CdX8eeItN8H+FdEa/u76eO3gt1T/AFzM33a/pO/ZO/ZQ+G//AAQd/wCCTPjX9rvxpplo3xHHg2S7uLxk+ZLiZdttap/tbmXd9K3hDlpc89iqtZyqqjDf8j54/wCCbf8AwTN+BvhT9rh/2UPhnNHqUXwtij1P4zeMm2+ZqWqfK0Onwt/DGrfe2/3a/U7xZDpt19pRJlhMP3WVv4q+Ff8Ag2n0maH9kLX/AI3ePXkbxN8R9fuNZ1a/umy0ytM235v7tfYHxO8QWdu815C63EMiN5U0fzL/ALVfknFGN+s4ySX2T9I4fwTw1G8ux578ZLPTP7FZ9YSzumZW3rJ/6FXwT+1B8H7XUtQbxP4eSQfat0UtrGyskK/7K17f8ZPjTqV5qV5o9hfxtF5+12b7yqv3dteLeKfiBpusWo0q/v44Nrt5Um7azNXzeFnKW7PbjhY+05nufHvib4Q69ot1ea3rtzdIiy7ovLTb93+GoPFljN4w/Zz0nTbzzm/s3xyqpcXEW39zJH833a9d+LvjLTbHS/s15qv9pSSbldVT/UyL/Ey/+zV5pZ+NpNe+Gut2d/YRomn3tvexLG3935fu16FSpXnS54n0OUVI0cQozOo+D7aJqnxE1jw89ntfT/D0n9msvzKsnl/K1fFfje6vL24lvNRud9y0snmzM/3vm+7X2V8H9Pv5Ne8QeOfDd+zyafo0k6Qq67pNy/d2/wAVfCvjPxJNLfTJsVEaWRvL/iVt3zLV5LTnWrzkYcQSjGhYbY6hpGlTx2Fr+9nupdnmN/DWj4ksZtEtfLfcV/iz/e/u1jeA7LTr7xpYvqR3RM//AAHcv3a6j4rSJb24eFF2ebt3V9HiFy4iFPufn81zRk2ZGh77y1d0RY1/hVq0dIW8sYftTpJ5bfw7qq+DYXa1R/l2yP8AxV0t1ZpJDv8AOVAv3F20qnNzSMpa0lpqVtO8SPYzM/8AC3y/f+atqx8XXMezZMzLt27pP4Vrk9Qh8y4Kb2VW2tuWtvTYd3zx7v7y/wCzXnVox+I3wdWcauj0O+8M332pD5O7bN8/zV0UPh68uYd/ksu35lhj+X/vquS8F/K299rMrqybq99+G+l2HiK4jmgddquuyFf/AGavDxlT2crn0cf31I8x8M/C258UeIBZwurI25l/3v8AZq74q+EV54f8RW+g3Ls0cf72eT721f8AZ217+3gnSvAjDxK/lwvbuzRQxp/epfh7oOieLPiM0z3MKXKy7UvLr7qx/wB6vOjinKfN9k4Hg579T578O/B/Uvjd8UofhLYaxNoNrfQL5F5fN5G5m+6zf7NenfH7/gkh/wAM0fDePxr4q1WSHX47rbYatap9ps/LaP8A1m5t275m+7X2Vb/sT2fxeWHxDoT2o1a3i3Ws1w26JmX7vzfw17BP+xL8ffEnhGPwf8Q9YZrCGLcvl6izJG235Vhr6LL81lh+WNOPzOatl9HFx5amkj+cvUPBlz4R1680SS0keazlZJ2kgZP3n97a396uh0e6mjsR9pdvlXb/ALtfqt+1x/wTV8MeF57GFP7Q17W9e16zs1uL7a0vnNJ833f4VjrhP+CiH/BNn4afBfS9Qs/ha8b3Om2dusW3a7yMy7pK9avmmGxH8RnJ/ZWJoT9nA+Kfgi/xEXWJLz4faVNeSSRNE/lp83lt/DX2Z+z78UPjf8F/B8viq70G4tbqa1aDTbeSXb+8Zdu5t392pv8Agi54H8BxeNpbD4i6V9pSS88ho5Pl+zsy/eavr7/gsD+z7Y/Db4b+C/iR8N9P3aJbzSWGttb/AHbdpPmjmk/2W+7XkxhDF47kPoKdGvl+GjPm+I+BraH7Cs32+8865uJZJ724Z9zSTM25mak+0JcMyIm5fuqzJ95v71SfJDv2Ooimf/WKv3v9qq6x7ZDH8zlk/wCWdfWunGEOU86MrT5hqSTNt+8zqu1l2Vcs/tPltDbWbOkf8KpSraQyKltO+5vK2/d+bdVu3s59pG9drfK8jN97/drkj70fM3lzEmi/vpFme5jVW+Z12/Mv+zXYaWvzwiYqdzszbWrntA0fyZmkeGF03N8yv97/AHq6zT7cf8uyLv2bZfL/APZa9DDx9nozz63vF/TYUmupU+XYyfe/iq+s3k27w/MHX5VVUVd397dTLNfJtfORI9rNtXbUL33l25e5hkcM+3y12/u69WjzchleMd5FDVtkyp5IaNlbc/y/eWsa6vJmke2mCpu2tE0O1l/4FWzqypMreTN5X+1t+9WDeW+2GW5877r/ADfJ96u+jGMfiMZe7rEoveeZJLDDNGG+7t21ctm/4p5mVM/uXwBznrWf/qo/JeFisnzeY33Wq9ayGTw28jMpzDJyBx/FX7b4ORis0x1v+gaf/pUD1+G1fE1pf9O5fmjhbyaaONvJtt259rNv+Za5rWrhG3u+51X/AGtu5q2dW1Sb7OZn3YVWV1VfmriLy8Ox5kTG5/vf3q/JpR5o+6fEynzSuMuLy5VUh+07Ubcz/wCz/s1qaPdQWqm/v/uRruaRf/Qa5+FYrp2S2hYBk3eZ/tbq5n4pfEKwtV/sTSpvlt3b7RJv+Vm/vVzVv5Tlq4j3TM+LHjqxvry51KGbY0nzbV+ZV/3a8E8ZeKPt146b62PHni8XE2xHz8m2uCuJJry92Iitub71L+7E8/l5S1Hqrxrv85lVf7tTQ+dI38WGXd/eq54f8J3l2vyQ71kfbXTN4Lm0+NPOh27dvy1pyi5oHJxw/d3vn5PkqrfTOsjb9qL935v4q39aW2tWaBPv/e27Pu1g6kqTf6xFbb97dUSLj8Rj3W+Rj2DfxVWVX+5sXNX2h3ebsG3dVbycKnybd33m3Uv8JfwkfnZ+T5v73y1e0W4eOb5G+VfmqrN8sfH+7SW7OPnQZ2/+hU/hJPVvBN5Nq0aQvhnb5fmfbXc6b4PS8jZDy/8Atfw15J4K1j7HdRTO/K7a9r8KaklxarNC+5ZF27l+bbTjIzqR5omRfaK9kyWr7X+f+H+GlttHudweZP3Xzbd1bEi+ddNvhZVjlbc2z7zVNHa2zMuxGXb/AOPU/h1MYy5TM0y38q6/1O8b/u10tir+SqJtSL+8tZ62W1lTZlVb+H+KryyfY7dt/wDu0lHlKl73vI4/xpeJpt8+xPlkl3MzV5r8SNBm+yxa3psnzruZ9td38WI3t7WGZLlnG/e1ctperJq1vJYb1YN/DIn3Vo5eWRpGUpQKHgfxMmsWPkvdf6TGn3WrpljeRXT73ybmVa8l1YXngfxY4hRkG/dt/wBmvUvCepQ65ZpqSP8Ad+8u2l/dCS+0LJDunUp8m1PvNUGUWQf+g1rXlvFtZERvv1nzxxhVR5vn/vVUZAVLXVrnwP4s07xVpk3leTcKzzV+lP7NPxQ034reC7LUkeOWWSBWlbzV+8v8NfnNcaXZatYvprso/dNt/i3NXtH/AATm+Klz4N+JUXwu8SXn2e2vLjZbs3/PRvu/99V10Ze4eVjMPzfCfc3xCiuYTaLcLgfvNv0+Xivqb4a+E/CnjP4T+DBq17b/ALvT7ZJSgzIo2DK/+O185fGvTZtLtNFtZmPEU21T/DyldR428Q+Ovg1+xB40+O+mWtzMuheBpZLHarbftEsYii+7/d3bq/Y+KaV/CDJor/n5U/8ASqhyU3dRU+p+YPxm/be+JGrftsePPjT8H/idrXheVvEE1jolxod60EkNnD+5j+b/AIDu/wCBV9B/GjU9Q8X/ALLV3rninVLjUbvUdEs7m/vbt90tzM7RO8jk/edmJY56k1+atlPc2zrctc5f78sjfeZm+Zq/RjxzOD+xdazuw+bwrphJ+ogrz/DGLjleeP8A6hpf+kzP1ThOKWBx6X/Pp/lI4X/gn58Sv+FK/tDeHtY0FIYUmvVS6aT723+H/wAer9Zf+Dpq30f4w/8ABIf4fftCaa8bzaD400u4imX+Hzo2jkX/AL6Va/EPwrqD2OsWmo21yqSW8qusjfeXbX66fF74n2P7U/8Awbl/Ev4Xatfx6hrfhHS49VttvzN+5mWTd/3zur8hjKUKyZ8NGUbuB+bf/BP/AFKLVfFV/dRyK3/EjIYr6+bHWH+1vcT2vx71N4UZ98NqNo/64JWV/wAEt9Z+3fEHWrDDDy/DofaenM0deqfFv9nr4hfGr49X9l8PvD8+p3c8UCJbWkO+QkQLX7JmNWNPwKwspv8A5in+VQz5JPFuK7HhskyXVn51q0e9f9v7tdT4B8aJrVr/AGTc3K+bC3yN/FJ/s1yd54d1Xwrq1zoPiGwuLS5t5ZIpYbqJkkWRfvblrmhrT+GfFiebuWOR12t/tV+KLklHmiduHqTjPl+yex6t5M0azJHu+dvl3fdrnr5XaTzt8brv3bf4q39FjTXNJTVbYfMyMzeXt+Vqo3Gjw+csMMjMnzNK2z+L+9RKpy+6eh7GMveMUzSGZnh8wL/B5jbqg8yaP9yPmDPudmatB9DEe6ZNzqvzI1V7rSfLV5nfyX2fP/tVlKQ40e5Sa4dZPn3EL/FuqC4unuN+xFG3/wAdq42nu670Rm+fb81Rw6JMtwdiNsb5t22sJc8Y8xtGnzSI7e6u2h2TJ8v8LLW9psyLHDC6M3z/ADsv3qp2dg/mO+zYip/301aFna3K3Cwof9p1b+7XJUlPlOynTgdNp948q7E3Af3f4ttdRpWsfu4Xhdk2/wCtX+KuN0uCb7OuyP5m/wBvbtrqND/fL9phRmj/AL2za3y1w1K3sdT0aeFjI7rSte3XG4zSb/u7mT5q7fwncNLHNF5bqse0IW79eleX2104khm875Y1216D8NRi0utsxkUurBj3zmv0vwTxMp+JuCj3VX/01M+kyLB+yx8J+v5MkvtQl+3TIoUmOVgHY528+lI+pJbgzWc0kn8P935qo38yw63dCZt26ZsH+5zUunxpM0UzvudUZm3L8tfAcTY3l4jxi/6e1P8A0tnHXy+U605eb/M0o5kZUmRGaRfmdVl20lvdO0zzW3mBPuyt/ep1rZw7Umh5LLtbd95alk0+aTDoWDx/wr/Ev+1Xz8sd7w1l/uDftW7YjvtWRtv+zRMr3S7Lm42tGjbl+7tX+9SzWs5uJbZNqJt3f7P/AO1Uc0LrMnkp/wAsmXdIvzf7tb0a0qn2jKph4xhsU763mMf2l32/PudV+bctc7qVs7QvNNDHmRGVFVK6uazeGFzC+Cr7mVf4WrOutNhuITM6N9/97/D8tejhqnLH3pHlVqPNocJqG+PaiW0ats+7Iny1j6gHtbrzhuQMyt8v8P8Au12WqaHbXcjonmbd275vu1QfRUhdN8O5W+/t+Za9WNSPxHnVMP8AzHI3SpJarNCn3Z9qSK23dVSaL98zpuy332V66m68NQxxsZvkfd8u7+JqoXli+nr50M251VU3Mm7d/tVpzcxj9Xlz+6YMNgkKqiJv8z5fmrSsVSJQjwqrr8m7d92ntp7+c7787n27dtT2NjlVtkT5PvIzJurmrbHRTo8s+Uu2t0V2o8O7b9+Nt27durqNHvIVj2b8tuX95I9ctawvGpTzsP8ALu3P93/gVbGnXD/JbO+9lT90zfd/2a82R6Macoq7OtuIdri58ld6vuTctUru5/eK77d/8S7K6G8091jLzRq7fw7U+6tZGpadCsf2yZ1RNv3tn8VfH/aP0ijTk4+6c1cfvrdXd4yV/ib5VrntW/eStMj4VX3Sr5XzN/utXVa1HZq2yGHIb5tsn8VYmqR3M0e90bcqblhVvlrop+6ezhcLzHH31+FGxJsvH8yKsVY99vZm2bVZv9uug1az/dukO5NyfO277tZeoRwyKyXKRllVVaRfvVtTqez90+gw+X83xHLalazNMro6s8f3FrL1KB9whmdUZl+7/FXTaitnCv2nzW2b9yRqm5VrBvo4M+T5LM7N/rNn3a7qNbmFistjymLNYusZTequr7U3VFDbzSN9mdNzK6/Nsq/IEhGzf8sfzf3m3U+1sRGp2Rtlf4m/ir1qdblh7x8Jm2X8vMP0+z3W6/J82/8A1e+ta30e4uIx5I3bfl8uT+Ko9Pt0WOLzk3P97+9W5br5sib9oLIy/L/d/vUe0lzaHxmIo+zgVbPS2k2OkO1I3Zdv8S1sabZz+RJtdR86/Lsqxb27sphf5ol+bzF/iWtPTV87CQuzwqu528rbWP8AekeTW5ep694Mgf8A4U/HbuQSdOnBOMZyXrz7T9LeaH5IZEfZtbd/C1eleE0UfDKJFjZ/9BlBR8ZJ+bI9PauX8O2MKyKRCy7t27cn3a/bPFybjknD0l/0Cx/9JpnvZ5BToYS//PtfkirDp5kkjTyVLL95vu1oNapFA2+zVPn3bt33v9la0rezS3V0SCORpPli8x6kms0Zvn2u6s235flWvweVT3uY8ijTjHVHONpsykbNqyf8tW3fMv8Ad+Wl+y/uzYTRq8m3d8v8K/xVtTaTczKsyJuPlf39rfepI9JdZseRsVXVmb7yqv8AtVEpe0jaR00exgfY/s7Jcw9NnzKtV9Ss/M3uLOP/AGNr/eX+9XQx2qMzukyrGzt8q/Nu/wBqqtxYwuomhfak3y7m+Xc3/stc0pe+ejT5pR90468015WkvJnyGbb833fl/wBms6ax+Zfs20ov+xXZ3Wiv9oVEEe7+JWf5Vqh/ZdmsLQ+TudpWVZNvzUv3fNzdDSUaso2OXWx875HhZAv3dvzbqS10v7++3Ziv8Lfe210DaXCyyvC7Jt+82371Q29nN5w8nbj/AGl+9/s1MpQ5gjGRDaWszXHku7bV2vtb5q0V3zSBETc3zfd/9BpYbea1+dLb7z7n/wBpaufZU3JM7bwr/e/+KrA7YxlErQ2EFq2x08pvm3/xfdqzZW/+ledDD91Nyf7W6opLV47hrn7Mz7mZUVW3bq1dFh+0tHvTeFRldt/zf5WnTlKMd/dNvinymtoujJIqWyOyrt3Vu2fh+SaNzJ9xU/i/ipvh+zhiX9991vuMz/w/w1oeKlOm6HPcwuzn+COP+9VYeM62IjBG1bEUsJQlVnLSJ518SvjNpXhHVn8N2d5HNqUjqyQ+V81uv91v9qnfE7VPDfiDw7b6CmlRxG8t/n+0L86/L95a+Z/i43ir4f8AxQPjHxJDIrXFxulVnb5m/wD2a9T+I3xFsL7Q9A8bW95DNb3EH3VRtsP8O2vvMJhY4SlZbn5BnGYVc1xUqjl7v2TwLxl4fn8O+IrizR2T978rf7NOs7g/Z1dPvr8qSf3q6341WtlfeV4n0p1KXEW75U3LG392uHtLh7hVfCl1/hX5a7tfiPJ5vcOp8L69MsywpNtO/duZ/u1x/wC0NDDNcR31s7Zk+8u+tCG8TT2aZEZTt/v/AHa574iXCapZ72ueFTb8zfNUy94qPvHDWOqXMKrZ3T70/g3VVvFRpN8Lsqt81LM/2WYOiZFRXVwksexIP4/vNT5eXQ2jLmI2uHh3bJttEupTLGu/cw2/w/xVWkVGTeqN96opG3L9xcr/AHqfwxD3i39oeNl37iqr/FRHqm5hvHy/eSqDXW1Sj9f7y1E0zMqyZ+bfupFcvKbS+IEDf6za6p92optcdlKQ3TL/AHKyfOf77yc/3v71EbIdrvubbVfZFymnHql2u5xM3zff+annWpljH77ft/iasuWZ9xpPtLrDj5TUi5ZGlJrlzdSEu+Vb+Gq11cI2E8mMVSeR3QPv203zMqv7tjtoHylppLfcifdrsdJvP+EV8NrPDNvuNQVkVv8AnnH/ABNXF6db+beJvDH+9urQvtSfVLpcriKNdkS7/wCGqk/dsxcvvnYaLrczKru+Qv3a9X+Ecf8AamoDzl3K33f9qvEPDsEN1Mi78D/Zr3H4c7NP09fJ+Uqnzs38NO0Ix5jGZ6n4s8ZW1n4fa2dGEdvBtiVfl+b/AOJr468dalc+IvHVxeB9/mS/dWvc/jN4wh0vwu8MN5IrSIy7f9mvDfAtrNqXiVpkhaV22/LULmlVHHlpw5mfrR/wa4/sAab8ff2lj8fvHug/atE8ExR3UUMnzR/bm/1P/fO3dX27/wAHjfxsvPAX/BPTwj8F9NuWjbx94+hhulSTb/o9rG021h/d3bfyr6S/4N/v2U4P2af2CPDd9qGlfZtV8VRLqeoGRfnZWH7vNfnj/wAHqOvvL41/Z/8ABkwZ7fytWvWj7Ft0a104ufKuRfZRllsZVE6j+0zoP+CLH7V9h4L/AGOPDvhjW4ZGjtYFgW3hXa0aqzbfm/iavpPx98dtB8ReHX1LQvEkM5+ZZbW3/dtH/sstfK//AASH+B/h7xB+zDpWg627Mtx5c8U0kH+pZm+b5q+ivjJ/wTu17wr4dvPGHw98etDN/wAfHk3zr5TL/wBdK/DswjTqY2c0ftdCXs8LC/8AKfPXxX+LGlahJND9jVJpnZrrzIvu7f7rV86ePvEtzHcPDo+ox3CQ7mlWOX5lWtL4qXXxR8M+K7zRNe8N3yhk/wBdCm6KRW/iWT7rV5neaD4k1icvdO0PnL95bVmb/gVGHwvu8xt7aMYf3jhviB8SJrqV4ft8flRtuTb97/dZqreBZLzWL93ke4W2uott1Gq7v3f+0teir+zdDqEi6lePHcJMnyq0DR7qztQ8F694Llms9NRltlTcjW6s33f71d3JGnEUJ1/iPObi8+IXgvXGv/DGsTQNbv8A6P5cu3ctcjqHgPwl8Qrj+yvElnDp2rzSs32yP5Vbd93cteueJG/4SzT99gjS6pGm+Bl+Xdt/hZa868YabYXnh5PE9heKt1DKyXVu3ytG3+7WeHqToTfJodWM5sTDX3jyfxF8GvGvhPxh/wAI24USQurRXG/5GVvutR8UJodNs4NKluYXuVddzQvu3Vc8ZeKtY1KEveX8kjxp8rM/8NcNobTeItYWa5mVoo3z+8WvocP7TFctWp9k+HxnLh5+yX2j0Lwvapa+H4Jk/hZmdW+81ak+tOsa22yFl2fut33qzo9ht9j7m3Jt2/w1LHpqeWn7lt0a/JXLWlGM+Y55PlVkT/2X9sjW5Tlv4lVfu1p2Nr5OD83+zu/vVn2P2mz3/IyozLtbf95q2IZEXY77fNb73mferhxNT3TpwcoxLul6lZ6fMuyP55H+bc/3a9t+A/jKG3mjT7Yrt5u542+XdXzrc6k7TCaSHcV/iX+7Wr4T8bPo99Fc7Nrxv8vmP8teRiMLUq0nKJ6+DxlKnP3j9AY7qw8a6b5M0MKrJF/rP7v92nfD34Q/2bqU32O5kuV3K0u77v8AwGvnD4V/tD3LQ29tc3m5lf5o/wCFq+mfgj8XtKVYftLySbhul3N8y7v9mvGlGUY8s/difU0KWHxK5j7F/ZH8NeM2e2sbGzjkMjbrVW3fKtfU8a/Gq50qDTZobGFNjI7R/Nt/u181fs5ftEeD9PW2ub+2ZAqbEa1+Wvq3wP8AFbSvGMca2w+zxhP9ZN827/ZqsP7Bx5eY58Zg6+H/AHihzROS1f4ReG/C/iPTfHnjN49Qk0WKSfTbOQLt+0MvzSf71fm7+0dq3iT9or42eILOG2/126CLTV+Vodv8Py/xfxV+jf7UnxO0fwP4Uub/AMlbqaGLzWWT7u3/AHq/KHxJ+0Po/hv4ral8XdHmtYvLlkllj837237vzf3lrsdOnbkj6m2X04Rh7aqvekdT8Efhanwr+H+qa8eNS0m/VpY9qrL8v8TL/s19ueFvG/w//bK/ZS8Tfs7Xs32qTWPDUkVncRxeYy3SrujZf91lr8yrf9oi/wDjJ4w8R+J9BuVh/tD97f2cKMqtN93d/u7a+tP+CZfxNm+GvxGikuVX+z3u7eFEj+bzGb+7/wB9V6NGNSlWjUUh1qcMVhKkHH/D6nwfY6Tq1nCfDetwzNqOn3ElrdRrFtZZIWZW/wCBfLUyxmRvOSZn/hddm3bX0h/wVZ+Blv8ABP8Abv8AF2l6SPK0bxZaw+INMWP5dxuPlm2t/D81fP8Ap2jpDH5DooRW2o2/7y19soc3xSPhKdaPLdRGWtinzeQ+3b827+L/AL6rSs9NT7ON/wC+f5tjbfmX/ZWnww2zfuYdzo3zLtRv4aljaaS3aF4dnmfL838VKNLm1ibyrRjuXtH8nyZnttsrR7V+Z/u/3q6Gzb93++3J/D+7+b+KsnTLXz40RHZXaVWfbW9bwwrb7/mYLu27vl/3t1enh6cYw944ZSlUkWVW1aHZ9m8vy/ux/dqtJHbKhmRN211by2+81TQ28N5IXm3MFfb5e7crLt+Vt1MuLdIIfv7PM/ib5mrtpxCPNUKGsWsMMrO8youxvmb5vLWsK8WBd0abcrEu7d/Ev8NbeqMzWM1s+5PMX7yurLt/3a526uHmmWFJt/8AFEzL/DXdT+P3ialORQvry2ZjDDu3yJ86t91Wq7bE/wDCMOVBH7iTHGT/ABdqzb7yZlWZEZNsv73cn3v92tCFw/hSVoyQPs0uC45H3utftHg875rjv+waf/pUD2OG4P61Wb/59y/NHlniiSaG1lmg/eu3/wAVXK2qpfXDQQv5vztuWP5trV0Ovb7yXf8AKVb+H+9tqtY29tYxy3l/M0McKs3zfKq/7VfkcvhtE+IrRlHUzvEnh+5sfD83lbVmuIv4U+ZV/iavnz4mWttoqun2lSv97fuauo+LXx8upbyb7Nqv3V8pNv3fLrw7xd4xm166+0u/zf7L1z80uY8r+JqY2r3T3Fxvi2vu+/trofhz8P7/AMTX0KIjE7t33KyvDOivrGqIkaMXkbb8tfV/wb+Ftn4D8Mp4n1VI0laL5d0W6tInNUl7vKjJ0X4W6b4a0dp7xI1dV3KrJ81ef/E3XLDTZJLa2mjY/wANdX8XPi08cbQ2r4Ma7UVUrwXxJ4k/ti4fz3Ynf95qzlU5pl06MY+8JeXj3TGZ5s7v9n71ULpoyrbNxb/dplvIjrsd2P8Au/dqG8Z4s84FVGJp8RFPI7Lv2Y3fw1Ey/KXxuX7zVIoSaQOflVqRIUjZo9mWb7u6jm/lJjsNaPzofP8AJ2rT7e3SSP5E3f8AAaVI23bE+7/HVm12R2+z/wBBqZRK+GAab51rcK6JuZWr2X4V+KJmsVsPlbbuZF215LZwoy7zGylvv12Pw/1r+xdSi3u3lfx/Luaj7PukSjzHpDTPMzTb9zbt21X+7UqyJ5fnec3zPurHvLzbeb4fuN83zVNJdJbqXd/l+9tZqfNKJHLH7J0K3Ds2/YrorL/FU1xdTLy/zbn3Iu/5WrO0G6S7j8lH27vm/d/xVq3FqFkO/bhU2/N/DVEc3L8Rxfxm+xyaD50Mf+rXa22vIvC+qeTqDJv/AI9tet/GAGTwu6InKu3zMu3dXhen3Xk33/2dTzfZNYy5vdOo+Lmg/wBqaTFrFnCu2GL5mj+83+9XO/C7xpPoN/8AYHmby5Gwyt92u70vyfEHh99NmdTuT+GvJtf02bw9rcsPksu1vkq5R/lLpy3gfQUapfW5vIfmH8DLVO+sXjYYT7v3P9msn4I+Lodc0/8Asq7uVLr8qq1ddeWm7986MtTGRFT+Uw7GMRzKzorD+CrV1dalpOo23jDwxc+Tf2twssUi/K25W3LVe9hNrKPO2/8AAa2PCujJ4ruho803ktJF8kzfdVqqn7szHEcsqR+lDfG7Tf2hf2fvh98TIHT7dLb3lrqyr1E8XkKSfrnIr9fv2R/2ePhb44/4J6eH/h54/wDDlrd6f4w8GW7anFOinzVkgU1/PX+wvD4i0XQvEnhDV7qR7bT9Qiazjb7gLqwdl9m2Kfwr+ij9jfVpvHX7E3gPwxpF2bTVNO8H2CxqOsiiFcH8RX7fxIpx8IcmXapU/wDSqh5b5G218j+cz/gr1/wRl+KP7CWrX3xd8J6LNqHw/vNTkihvrdNy2O5vljkq18X7s6f+wYl2n/LPwlpJH529f0n6z8I/h3+038GPFP7OXxi8PwXtlrNnJBe2VzFuZNy7fMH+0rfNur+dn9vj4Zx/Bn9nvx38HFnEy+FCuiiRh98W13HBn8dlYeHdSnUynPGtGsNK/wD4DPU+/wCBIVIZdj03zL2Tt90tD4s8H+MI76FP3y7f8/LX15+y/wDtr6D8Afgb8R/h74wMl5p/ibwRfWFnY7fMjkuJF2xr/wCPV+eGh69No98uH2bfl/2a9W0HxNba94fNt8ryLF95q/FYylzRPl6lOPLdHpH/AATd8OXHhf49eIdPZR5T+Fd8bAfe/wBIhr6E8P8A7U/j79mr9r2bxF8P9VS0lRreO8Z4VctC9ugbBb7vBrwz/gndqE178ZtahuGJNv4adI8/3ftENZf7UXiWPTP2uNZtGuioW3sy2P4c20dfseeYeniPAnDU57PFP8qhz4SpVp4lSW6R+on7cX7GfwE/bA/Yd8Qftk/BDQ2sPGHg2wW91a3h+ZLrdtVt235vu/NX42+OrOG+0eLUi+4xru3L/E1fY/wD/wCClX7ZP7Ofwn1j4UfBzxho/wDwjviCyZL3S9S05ZdrMu3zN33vu18oeJ7G5utNuE1W8a4nm8x55NqrukZtzV+IYCnUw2HVGfT4X5HQ43rurF7/ABep2nwD1j7ZpJtodpaRFl/3v9qu71DQ7aWTeltJn726vHf2arq5s9ShheGMLHL5TRs+75a+hJtFuftHk71d/u/7q0qkZU5nv4P97SOMutN2nfN8iL8u2qt1oZ2/uYZAv8G75mrvJtH8yApLCrfNtSTZ/wChVWbQ7lY3/cq+1FXc1YnXUoxitDhJvDMcciJ833tzfPt3UQ6LukfZ/vIq/wB2u8Xw680f76237f8Anp/7LSWvhm2hcoE2Ue/GJUaJxi6O8ceyaHbt/harEOl/vmeL5kb5WXb92ulPhkys7/d+b5lbd81EekvDtT5kT7u2sqkToox5ShY2MNuq23k/PJ8r7a09Ntd0f2Z3ZUX+FW/iqS302BJmj2bv4ovOVv8A0KtbTbFPOd3hjKrL93+KvHxEYy5j1sPzdCa1tflWa5hbCrtZY/71eg/D2ForecncQUj2lmz2NcdYw21vJF+//cx/+hNXbeBwFiuI1PygptBXBHBr9E8D4cnihgLdqv8A6ZmfTZQ28VG/n+RTvrFBqczbnLyTOdg7L/eq/pOmwzNvSb+Hd8v3akcr9qnZlUO0hUEt/Dmr+j6f8o2Q4T5mT/er8v4ol/xk2NT/AOftT/0uRvOMXUlbuXNL0+2t5kT75kb5N0X8VW7q1RmP2OH51ZvNZX+7UkPnRqhR40+T5G+8u2p5vJaQQu+15Pmdo12rt/hrwVHmnzEy92HLymLJZvJCXhttiqu75v4qgWJJl+0wvvRvl+V62Psdt/BMrhvl3M/3WqvJYwqkoTcP4fMVdv8A3zXpUKxx1qPumVJG7wtbOm4r8+7Z95qz5rF7qaNLlFXd9xvur/wKthrPyZGe9RUh2bvv1Vms4Vm2Om9JvmRV/ir1aUuWR5EqMKnumRfaXN5LfY/ldv8AlnVCbR3WTzpkVTt+Zl/hrpWtUZl32rQpG23b/eqnqdq6x/YzMqK38TfdX/Zrsp1vd5TJYSMtWc7Jo9t5exAu+b7/AJi7qytS0+GSHYiKp2fJ5fy7f/iq6m40v5Wh2bj/ALTfvKx7+3SOEOm4FW+VVfdu/wCA1p7SfNZHRHB0ub4TktRs5vkheCRS3yxNH8rUklmlvIsPzI/zN5ez7y1u31j5k3z2bPtfcm1tvzVNb2264b7T5cj+V95vvbqqpU933jL+z4+1MG10ua3tTCiSNu+f99825v8AZrV03TXjYTeSybl2vGrfKtX7fSZvLV96qytu+Wun0XTXnsw6Q7fMl3bm+X5a5ec1+p+4WpmS4jDp5au3ypt+bc1QND9sjMMlttdfmZZF+8tXJY/OUyecqtH8q7v4afGyXC+dD5gb7ryN97bXycacoxufZYepGVU5jULVFY/3Ff5mkTdtX+7WBq1htj86Z1RJPu7fm2rXb3EbyQyTTIpSNd33/vVzer2Kbi88SqWXc/8As/7tdUactj6bBy5Th9Q09GbZDYeaaxdWiudzFLNY/MTc275v+A12l5YJ5ZmQMfmbYuysXVNLvFhWabcHk3KrMu1WolTlI+rwMonF30MNvav8m1vuuv8AdrEltUVuX+X5vlZPvV0msWMyxnlSy/N838Vc83O5Nn3UZf8AgVKnzU46HoYinSlQ5jNvrdGhG9FT+/TI18sr5PKt92rs1i80fk71+Z/4aiVXhV/k+98rbv4a9XD1OblPzzOKPUtaXDD5bGGbe393dWrp91bLcFJEYlk2p/eWsRZpoZGh+5uXan95quWeoOrOjpIP4E3bdzV6FHm+I/Ls0lyz5TpbUf6Ps3/vFT5FZ/vf71bWkzblGyFf9uTfXN2epJ5yRzQsqs+FZq6PR7oQ3AR+Il+dG21FVVIwPnqkrz5T2TwiNvw+hBJGLaXJ79WrN0+zM0guYXbZ/ufNWr4YZR4FicsMfZpCT6ctVOykeNY/szxiJduxll+Zv71fsHjK7ZJw6/8AqEj/AOk0z67M6EqmHwvlBfki3bxwthPsyr8nysq/M1WFs0muGCfIjfMqs33VpNJjcsf3zb9+1tzVqw28Ls6TIuFRfmXa25q/BqlT4YxOOOF5feM2PT03J/tf7VU9WtUhSZ4YdzfxeW/y10kbov750XLJtRdnzNWfeNbXEbpCmP4ZY/71XTkKUY09jmrlblo3+zJHv/jaOqElrtY3O9Vf7rfJ8tbt5pLwt51rCrt/zz+7VGRX8kOEUfIypuTdRUpwLoy5TLurPz1KO/y/7P8Ad/vVX+zorGZ4Nj/9M/4a0biGaGPzH8vP3dy/3agby41875kfZtZv726oUfsov23ve8ZF5HDIu9Fk3btu1fl21R+zwx7/ACXZXVty/PW3fQzR/ubx2YR7vu/+g7qyZrN9rn+Jv+Wcj/dpSjGJfNzEbb7hi6Phtnyxsvzf71W185ZDCjthk3MzL/FVNmeSREm3fL8zqtXdPuoWWVE3F9rMqr95VrmlGZ105R5i5Fa7Zgn3l3/JJ91lrZ0e0SFU/cx5Vv4U+9VfRdkw+RFxt+dpPl2/7S1u2qoWWySGN0jVm3L/ALVc1STjLlOunHmL1jbrbvHM1tG8kf8AyzZ6o+Hfi/oll4yu9E1KzWaOGDY0cifMzVbbVbCxs7i8ufl8u32oq/Krf8CrzDS9JfVtYudVheNdrM3y/KslfX5Dgeb99I+F4pzSL/2SHzO2/aV8O+Bvjf8ADW80rStBji1G1l3W80lrtdmVf7y18d/Dm8vJvDuq/ArxOjW17Hun0hpIv3ny/wANfW+i+JrbQ9WiS5jmeVtu6ORvu15l+118M3+1Wnxj8DWarqOmy+fcLCvyzKv3t1fUfFSPhIynFHj/AMM9WtvFGl3HgPUvmaRWW3kZPm8xf71cf4o0W/8ACuuTQ9DH8jbm+7V/xJqFtofiiLxnoj/u7pVfbH8u1v4lrb+IWqWHjq3t/EKWCxFUXd/tNWcZcpcY8vwnIR+S1mu/5dvzfM26uU8RXELq8L/NW/qF4kJe2+bd/e/hrjdeuIbiR/O521RcYmLfQ7lkTf8ALVPzPl/d7WFXLry1Yon9/wDv1RjX940CJt/vVXxGsRGh3fPv+X7zK1Q3C/3z/wB8ir01u6xhPmcN/DVS6X/pn8y1JRQk3s77D92o5G3NUt0dzbAnNQfOFJf+GgcdwzJt6fK1Sqs27Z8v3ahLZC59akST5vegQ5mVfkAXO371Rec7fJimyLtaljX5hl6B8rF+8Pkpg3rgB8bqmGyM/PRbKZp0THP/ALLREI7lhpfs9nvT77fKm3+7S2uW+fZUF0yNcHZ8392pLePzG/121m/u0fETI6rwyvlus0O1TXq3he4v5rf/AFyqipXlPh9fsixzTOrbf/Hq7qz8TTNZpZ2Vts/4HWnuxiYSjzFX4talusSk0yuy/crc/YH+Ftz8ZP2jPCvgm2T99q3iO1giVvmX/WL8teX/ABE1OaS82TTbj/Ctfa3/AAb9eA/+Eu/4KAfD55oVMVrqn2h2/wB1d1aYWP7xGOLl7PDM/rI+FnhWw8D+BNI8IaZb+VBpmnQ20ar/ALKqtfhJ/wAHqVjMPih8ANV2KYvsGqQfe2nc0kdfvbpF5i1V33D5f4q/FP8A4PMfh5P4m/Z9+Fnxctot0fhrxXNa3kiruCLcL8uW/wCA1jXhOXOb4KtSpxgjD/4Jd+PLbTPgjpDwzNuhtY/3Ky/e+X71fT3jL9qq6ure4s3RbhI7fZ5Myfd3V+Zv/BOX4uPp/wAEdLtdiuY4tr3S/L8v92vXdb+NU11cMlzcssW/a21fmZa/EsXTjHGzifteCq3w8Kh79qGvfCLxBa3P2+2aK5aJvlVFZLf5vl+WvKvF2ofCy0y/+hyyR3CrtVFXd/wGvK/E3xFdbGYQ3k0QkT/lncbdu37teI+OPiFc3WoTXM2sNLMv/H02/bu/u1pRVX4eYKkqHPzSPW/it4/8K6Pb3l9Z2cMRkf70d1uaNl+7tXd8tfNnxI+NV5rDNpVtPMSu4O1vcbPvf3ttYXjj4lX+pRy2cNtazRTf62O4+9/vbq4Vdck+0PeWdqsMXy/LG1ejh6NWXxbHBUxsPhge3fCi68PeH7W08T+MJLdEhTcnmMzL/u7a4f8AaI8YeAPHGuPqvg/wwthffduri1dlW4/3l+7Xn2s+NLieFrN7nA/55791Zmk6sLy8Uvufc3zs3zVpHByjzSbDEZpT9lyQMjxZp9/cafcJsVGX+633q5fwrHDDfLHsY7fvqv8AFXqfiHS7OTT5P9J3S/d2/wCzXCw6L/Z128yHaNu7dXp4WtH6tKJ8pjpSqVec6ixk8u3QQozfw7ZKmbUZo2f/AENVTftT5/u1haHcTKrbIfl3bvvVLc303lt/CjfeZq55UrS1iT7SPL7xrTalCp3/AHCv/oX96ga4kjffyG+VWX/2auea6SaNX6t93/ZqRdSeFlj37l+66r/DWUsPzRsRHFWkb0379Sjncu370f8AeqrNvkuNzv8Ad/hVPvUumXELRomxd27c7bvu1p3K2f2NUWFd0f3q4pS5Jcp0+05/ecg8L69d6bf/AOu+Xcu3c9fQXwZ8fbrpLY37GWR925X/APHa+Zo5vLuN8abt38TV6D8LtWmj1RE38KyrXnZhho1I8x62T5hOnXjGUvdP0s/Zl8aQzSb7+8kRfKZ0jk/iavs34HePptvnXN5IdPj/AHrQyfIsf975q/PD9mjWnjVLZ0kuPlXYq/KrNt+9Xsni79pTR/DujxeG9Nv98ELK2s3DO21v4tsf+7XzFFTniLRP0mlioYjDF/8A4KdftueKviPdj4BfBTU9Lht4dy6veSy7Z5N3/LNf7u2vyk+K0Pj9r6XStV+0TfZ5WiTbuVWZfvf71cj4t/aA1rVv2h/FHjbVNakX+0PEVw8CK+1fJ3fu/wDx1a9x0/44fDfxV4VtodS1JWuo5du2aL7q/wAXzV9xDBf2dGLnHmf8x8x7SljY/u58vL9k8u+Cfxe174b+Plie5kjWaVYrqFn+Vo2/2a/Wn/gnHcw/FL4q6HpNpYeWt226JvK2xLtb5W/2flr8sfixN4G1rUIde0GzjSaOVf3y/wAS/d219/f8EmPi+3hCfTda0+8ju72wRoVj+80a7t21WqMTXoUXGrym+WxxM3Uw/N732T17/gujrGlXH7XvhHQNNljZrDwbJZCTb97y5F3f98tXx9YtDcXRheza23fcX/np/tV1/wDwWR/aB/tP9uH4dWrXDJP/AMI5dXF/5kvzeXNIu3cv/Aa5SxtbmZleMM67Fb5vvN/dr6/AL61ho1v5j4vH/wCxYl4ZfYtzepoQW8yqgRJPm3bFb7rf7NT2un+XI7Ikasr/AHWXdtqWHT0j8uF5l3tu3sv3t392ren2McfyImx2+baybv8AgTV6tPD8sfdOH2kqgabao8aXPV/m3/JtWr9nNlAiIwRXZd3/AKFTbW10+3ZU3yI33VXf8tWLiG2kjPnTMm2XduV/lVa7IU+aPwlQ5ia3ZI9z+TlNm1mZ9v8AwGmX+yS3ZLZ1zs27f+ebU+RUuGFs/wC8/usqblVdtQzWt5HG8xRkeNNzK38S1p7GUT06EY81jD1BtpO5GVv4JGX5dtYt98siwyXLY37f3afMtbmoRoqs++QN96JvK+81Y+pQpJI0zw/Kv/LOR9u7/gVbRlyyOiWHjy3kYd5G6xMsybv4lb/4mteAlvCcmCAfs0oyO33qrahBbMw8jajbW2Rs/wB1v7tXLZQ/hpk6Zt3z9ea/Y/BySeb45/8AUNP/ANKgehklJRxVW38j/NHk2oQ/Zmx1Pzb1X7y15F+1N8TE8N6HH4M012+0t89xcRt/D/davbPESw6dY3GsXnywxxNLLuX5lVa+KviRrl58RfFVzfwiSX7VKzRK33tv8O6vyLmlI/Os0l7P3TzrWNY1LVrppnfJ/wDZqteH/B2q6xcL9mhZ9332X5q9x+Bv7IHiT4lXyNDpTGPfu3N8y19R+Bf2J/B/gPyrzWPJdoU3y7v4f9n/AHq09nGMfePAeIl8MYnh/wCzL+zNBGqeKvE/7q3hTzW2r83+7Wh+0Z8ZLOzmfRNKvdkcMXlKsfy/u67j9or45WHhHR/+EQ8Lpa26Q7llaH5fu/8As1fFvjjxlf69qT3E0zFv7zfxVjKXtCqceX3iHxN4qv8AVLp5nmb5vlVf4dtc7cTSSMz/AHfmpsl5tkO8sy/7NNWYNL99sN/epRibR5ZFmGR1hZ/PbP8Ad/hpkjea4T7p+9upPM2q33jz/doZkk3eW/z/AMCt/do+H4g5ZRDa8i7H4ZX+RqI18uTyU+f+JGanMvmK+R/wKm/xA7Pm/wA/LVgWfL+YfPuMlTx2IWHZvwf4FaorCT5d6Q4Vf4a0Y5I2kWd03bf9il8IvckQWfnRtsdNo+7/AL1dN4VtUuLhN7qpZ/4nrFe33ZfZyPu7auWqTWNxG6chfm/3amURf4Tvtahm0uxhuUdmP3Xqj/ayNgPcqzf3qZNqlzfeHXhkdsbF/wC+q52TWt1vE7pg/d/8epylHluL4Ze6ej+C9StmnG+ZflrsJms5labyd+19rs3y/LXlHgvUlhvPIQZDOqt8/wAtenSXD3FuHRIwG+VP9r/aq4GFT4zmvirHB/wjUqb8rXzrqMiR6mU/2/lZa+hfilJ9n8LmF0+ZU+bd/FXzjq0/l6kyd9/zVlyG9OPKd58PdWmWRbZ9uGqt8aPCvlj+2LZPlrO8D3nl6hE4/v8Ay16h4p0mbXvDaJs37k3U485Upcsrnivw/wDEU3h3Xo5t7bWZVr6XhZNa8P2+pWwVVkT5NtfLGs2NzompvAybWjb5a+gP2e/Ey654b+xzfO1vtO3+9T+0FSPNHmLV9ZPD9xPvfw1Hod5Na6ojom397uZmb5a6TWNJ87e8Lqy/M0W5vvVzV1C9rl4du5X/AIq0fvaGMZQ6n2P+zVqFjrGl3+q2kmWljtlmQfdVlVxx+GK/az9kb4v+Hdf/AGQvh38W/h1cq954R09fD3iWzjkIaOW3IQlgOu5QG/4FX4hfsbG3n+HdxewS58y4VSmc7ML0/Mmv1R/4JmfCfxF8K/2efHh1lbm1v/El3Z61p9vdndHd2EqiQSRDsRux+FfuWfUpVPCHJVf7dS//AIFUPHxFSNOUklvsforFqlhq+lad8XfC211aJWu1jX/WRt96v50v+Cvk8culfHG4jb5W8XX7KT6HVeK/oG/Yu8RnXfh5eaDc7cWN15aIvZf7tfgx/wAFMPhn4g+Kms/G/wACeENPkub1td126t7eJcs62t3LcsAP9yFq4fDWD+oZ5Se6w8l/5LM/ROCnFZXjqn2XRb/CVz8gbxRJGJk+Xd/47XR/C/WkW+WG6ucqz7flrnWk8u3NtNuRlTbTdFmez1VXR9qK27dv+9X4zKPJM+YjZo+zP2CNJi0743a3PAi7ZvDTNuVs5/0iGvJ/289TbSv2vdYnj72tluO7p/o0dewf8E8rqLUvFuoX7gecNDZSR/dM0RH8q8b/AOCi0JX9prWZliyWs7M/lbx1+1Y7/kx2F5v+gl/lUOKD5cW/Q634U69Dr+iRvM/3k27l+WovFGiyQtNv2vu+4q/w/wC1XmHwF8ZPDfLp83CM/wB3/ar27XI/7S09LhDn5Pk//ar8R5Z851VPd1Ob+E+gz6X4qea2+ZpGV/lf7rV9U2/h/wAyxttV3rNK0S7mX5W/2q+cfA9x/ZviCPzvLTzGXe0n3a+pPDOmw3Wl2z2G1921X8t/lWscV3PbymVoyUjL/sOGSSVP3ij5n3bf4qhm8K7fk8qNv7zN/wAtF/vV2cWj/O8zp8u/5GX/ANmqWHRZo2Donys/z/7P/Aa4Iy97mPW5eY45fDsIHnJbNj+63zfNVa68NzNM3lQ7g3zO33dtd/b6L8/kukmVZv3i/wANQto1ybzeifMsW2X5PvNWsZF8qPN/+EfupJpPO8z737rb/DVabw26t51zD8yvu27/ALtemXnh942Z54WR12/Lt+9VSTQfOje5RPm+Zt3+1/dquXm94qnzROAj0vKtsmU1NZ6fM100c0y/vNvy7PmWuqvNHezj857ZfvbnVl3fLVGaxhWRvL2gN/E33lry8RTnI9XC/AUre38zZZ+TCu6Vn3SLu+Wut8FYEVwocNtKjI/GuY8txtnR5FX/ANlrpfAkkslrO0wG7eOQc561+g+CcZR8T8F6Vf8A01M+jyjl+tRt5/kSecp1WZmfcquVK/3a2Le4gZXRXZVZ9qfN/wCg1zGq25Oq3DRL1fMjf3eatafrVtb2f77cxWXbtZfm3V+T8VR/4yTGt/8AP6p/6XI6pStUfqdZDdeWwhmeMqu0bt27/gTVat7rztsW9lmZ2+bd95a53T9Q3fvkRl+b/vqtK1uEkuw+/P73cjbP/Ha8GnHlj7ope9K8jUmjRt37mOTa/wB6P+KqrRzRr/pLxoI5Wba3zKy1JHev8sNtCsSLufdG/wA26nyTOy75kUbvl2t8yr/wKuqj7xjWiUNzysyFFVJF3eZ5X97+FqpfY9t1strn5Nmf+Bf71bUlu8m3Ztx93cq1Ev2OGRfnjVd/z7ov9mvUp1IxjocFTD/aM2GR928wxumz5tvyt/wGotQhs2s1S/8Al2/Nu/iZqv3UKtGyPbRhpF/5afwrVKS3jmdI25C/89P4a6aco81xRjywMi5tdqMHhaRm2/vPustZM0KLM9480Mpk/uxfdrodQs0jxbXMG9P+Wsbfd2/w1lyQ/vPMh3IrLt+5826unm5jqjTj7pjQqjeY/wAuxtyptpI9PDKqRWzblT52ZtrKtaDWm26aEOzLGnyNIvzf8Cqdbd1uI3+6kn3lqZS5S/Zd2Q6RYI0iPcwrtX5F3f3a6/TbWz8yK2+6v3l3S/K1YNmsLW/nJbRo6xfIyv8AL/u10WiyWduy74fnk+58n3awjHmmRKnyxsZlkqW+/wCzTb337lb/ANmq/CryRiW5hVfLTc26sqORGm+R12Ku1G3bflresY0aWP596yfLtb+7/tVzfVeWPvHRg8RDnMnUNJ3XGy2dWWRd37n7tUb7T4Vgl3vtaZ9qsy7ttdZHapC3yfM0K7UkVfu1TvLNLiObzrVmVvmt41/ho9n7sUfV4PER+ycBcaNC3yfZm/h3x/dasbVNLf7QyTI0iqjNEzS/davQdS0+bavnfKyurbVSse80W1W38mGzW2VpW+9/49WVSj1PqcPiuXU8u1zQ/tFudm0bvm2s1cNq1rDb3R3wqu6LajR/dr17XLGGNXSFFeJW2LJIm2uL8RaTtm2WyY8v+LZ8rVzSjyy5uU9KWOhKHvHE3NujLs2bR/Gy/wAVZ9xJvk2SIyeZ8yK38VbWqWrpcC2Ty0/vs1Zd1bw25NsjszRv8+7+KunDx5veR8Zn1TlgVwqbm877sf3FZ/mWiO923CTeZG3l7vvfwtUd0IoVZEdUP92qzRzXDPeJ8nybtyr/AA17uHjGUD8mzKXNP3TpdLvNsgLurD/x2up8NapCzJv3I38bSfdrgdNaRYd6Pvf73zPXQaTeos7pNCoGzajbqyqPl0OTB4WdSZ9FeELjz/hrDcu2c2UpJK+7dqxtN1SBoRCky75Nq7tn/oNXvAsjn4OwSZJYabNz7/PXGaffNt2Xn3vlWJlr9d8ZFB5Hw7zf9AkP/SaZ9tisPJ0qKttFL8jvPtzSS/vo2Pz/ACf7VbGntDcYh37VWL5P96uJtdYe3aLzpGKtKqbfvbf9pq6O01aFd/kxeay7WT5tu3/ar8G5bdDnlg5RidVH+7j2TTRgNt+b+L/gNV9Qt/Lh875VVvut/FVZNShmj2PlZJEoW8Ty/M3rtmTDM1aR5ubU4sTheWN+UpahNvjif92VjVll/h+Wsa68ncH3/Lv2/K1at5cQLZsjooSN9qsq/NWJeSPNIH3xhlX7uyuv2cJanj1JSo6lW6vprWSSFEVvm2vt/hrP+2PHy6MXX7m5/lp80iWcjJvVmk+b5j/FVKS8e0kV5kWTav8AqW+5/vVhyolVJT1HzTPclER9+35nXf8AdqnI3mQPeZYOrbdzL91qcuoTXUoRE3N/DGv8VZV5cOofzdw+f56JU/d906I1I815DJFdLpHf52/vK9bGkzbRshh3J/Dt+9/tVh+e8Vwuzcm7/wAd/vNW1prXJlXztpVU2p5bVxVo8ux1YWUZVToLHfCy7H3xSMq7W/8AHVrZtbyGzj+eTyW+ZW21z1jbvcSLbQ7ZFZ1+X+7UHjzxRbaLobfabzykunZIvk/iWsMPh5YutGCOvF42lgcLKozk/ih48v8AUGmsNHvNiwqzIqv823+9Wt+zr4q+1aXqUOqvCvk7fu/eZa4hYbaRWv7x/vK33U+aSsb4f+Jv+Ed0/wAQ2fnSb2fzYlr9KwtOOHoxjA/HsZiJ4qtKc/tHV/EXx9Db+IrqPTXba3+oaT5tv+7Ulx44m8TeGWtvtnmM1vsaFvu/drynVPG0OtSx6qjxujf6pf4VqnZ+NLzR4bh/tO9G+Zlb+H/ZrWMZfaMOXljY57xp4Zv9FYeHtZTat1un05l/u7qX4d3TzWM/h7UtrM25oPk+bdXD+LviXqWueKvO1C8kfy2/dbn+Vf8AZrW0XXNl9Hre/bL/AMtVV6qXJKRUecf4i0W50mR0kfO52b/d/wBmuD8QSI0zJ5fzf3q9c8ZW9nrUKarYP8zLuljryDxtDNb3pR/k/wB2lKMTSPwmZb/LcL91tz02aRFun+6GZ/u/xVHpt2i3A85MfP8AJUszJJqT+S+4bv7lTzcpfuiMybS7hs/w1XuIXjjbZ8zN83zVa+zlpFmR/vfc/wBmlmhdfnkfe1Vyi+EwbqPbJvd9x/u1HIu35CmK1NQs0jf59uG+7WfIzqv+7/eqYFRIFXHJpfu/OvVad96Ty+opu5P8mgYrSPu+Y4+amt/fBprM+Pubv9qlb5iv8NVylcrHFvMHNWLMIkZbOHbj/gNQrJtX7i0I5jk2P81Ll7kiyLtbzEf5qns5JGb+HctRzSIsfyfxfxLV7w/Z+dIsju2Vf5l/vVUdifsmzo2xpP38yt/Ftat7+0o7e12PxtTcrLXOeekcmyGH5P46g1rWkmtRDDNs2/L8tHwklHWNQ+2ah533Ru/75r9NP+DaXQ7O8/bi0HWL+FtlnazTpNv+638Py1+Xm52kXZ8y1+rf/BuDpM0P7R0GsI+1YbBtjK+3c27/ANBrbCx/f8pw5j7tA/ps0rUY7nSVuIZg4ZMqy1+f3/Bf/wCDFt+0D+wF498GtazTX1nYNqOmxr8376H5lavtLQdcubfSVe5h2Lt/4DXjn7S2oaJ4i8M3+n6rbRyi4sJoNsn3W3Ltr0qmH5YTPGp1nTqxkfzX/wDBPv4n3kHgCXw3NcyRSw/Kit91dv3q+ktH1K51Ngkj7/L+Xc33l3V8d6v4d1/9lT9srxj8Fp4dkcetyPZxzL96GRtysrf8Cr2/S/FXjCdg3k4Vvm3K/wB2vx7PsCqWMk4R1kft2S5l7TARS6HfeNNak0zfZujJu+42/wC9XhXjbxRcx3E1mkyxJI26WT+Jq0/GnjzxI0MiXj/vY22xSK+5VrxzxLqGt6g0syOz+Zub79cVDD1Yx946cViox94s+ItbtrxWm+VnV/mZX27lrndS162jzsh3rJ8v36y9RuL/AHohfbu+8u+sxVubpmKI277rV6tHD80uaZ81iMZJz9021v3Wbe83+8rVds9STyx8mxVf5KwbGzv2X7M77trf3a6G10dGZY4UZn2Y27a3kocpz/WJS903YdchvIxa22mqzL8ryb/vVj6tY/Z5C8w3bl/hqy1reWeLZNysvzNI33aZrEn2O1KXM0Zdk+Zt9cs4xpStE29pzRMZm8tVdEb+9tZ6RtQS6xvTG35dtV7rUPPt1SF1A37fmqD7VCqsic/JWnLOpH3jmlU+yTSXO3cnzbPvJTdztN58O5k/u021WSSFvvbtv3WpGZI38nzMj+6v3anlZjzc0TX0q8JBSSFt38Tf3q05LzdGX3rtbb8q1gRq8Mfkw8NsrVtWS4ZH+78n3tny1zVKMPiiXGpP4TQ0mxSa6CIjGvafgT8J9S8W30cMNg2/cv8AD83/AAFq88+Hfgn/AISDVIk/56fd+f8Air3rUPi5pvwv8NJ8MfBu1NSuLX/ibXnm7mt1X+Ff9qvncbOpKfJT3Pey3Dyl70j0bxJ8WtB+F+kxeAPD1ztu9ipcXi/eVv4trVW8L+JLHxFY/wBlQwyR7YGSWNpdzKv97/vmvljxR8SPM8RS3szsfn+9u+61ei/AXxlbah4sT7ZeMqyIq+dD83y/7VdFDK6dGMZH1uHx8f4VI4b9qT9hfVdHjPxF8APNdabfNvWOT/WK38W3/Zr50tfDnimPVF0SFrpWkfbtjf5lav3Q/Zd+Cfhv4xSN4e1J7O5s5NNkb7L9nZpF2/8ALT/ZrmfG3/BG/wCCGh/Gq0+LV/4kXR7O1ZZ5dH2My3TL8zfN/CtfZ4bGYf6rH2j2Pl8VlmK+uSdHm5T8aP7H8ZeCvEU3h7Xtakja3+ae3uH/AHke6vob9kD9r7w9+zA1/r2t63eXiyRfuNNtW/1k38P+7XOf8FbPCNn4F/4KF+OrPTbSOCzvILG6sFh27fLa3Vd3/jtfPVncTRtsRF+X+JvvV6VTJsHj6UZT6nhQzrHZZiZcnxRPRPjn+0H4/wDjJ+0TJ8ffHlypmvtsEUcLNttbeP8A1cdfdXwP8Qv4q8A6Zr03lyzLEsXzf7vytX5z/wBnprmi3Wkv8zsu5Pk/ir7c/wCCefie28YfCtLCbafsq/Pu+9u+61ep9VjToRhD4YnmU8bXxGMlUqvmlI97+xwxI6Im8xtulb+7SWtvcsq7XbLfLt/2au/Y5biSJ4XVGjfY235V20q2t3CyJc3imVW3/wCzV0aMT0JVJhDZ3MMhfqvyq+6tJdNf7Ozz7Qdn+rVfvUSQzTQq9y6p/st/Cv8AwGr0apCsX2l5JfMbZtVPl+9XoRp/DE2oVoop2dncttcTR4VNqbU+Zf8Ae/vVLJbOyvD8z7l3bmT7tbOn6PYLIJrNJA7Ozyr/AHamm095oXSH7+791/u0/ZxjM9jDylzcxx+r6SnlmZLbcvlfJXJ3djNCu+HzJUj/ALy7ttei65psMkbTIiq3yqkO9vlb+L5a5bUrU25ZIpo2XYrNIqfxf3f96so0+WV2exH3onF39iJlR/sf75k+Vv8AZq3Cgi0J0I4EL8Z+tbdxpttcMl5NbNv2MyfJ92s+5hit7eSJ8lQh3ce3NfrPg6n/AGxjv+wap/6VA9TKafLWn/hf6HhXx8864+H95pNvcxq940aRbXbcy7vmWsP9nn9iSfUJrTxP4qkjtLPazr9ob5v+BU79pD4oaT4B17RtNeGNyzSTuqp83y/d3Vwt9+29r+rSQeG9NvPJt40VIo93y1+RU5TpxPyLOoyqY6UeY+zm8U+A/hzocWieEobWJ412+ZH/AOzV458bv2gNSjsbjTbaSMBvuSRv/rP9pq4q1+IQk0NNVvNY3Oy/dkl+b/gNeF/Gz4xPfM0drNnd8u3+6tTKU5Hl06cYHM/FjxxeaxqUjvfs3zV5pdaghmb99z/H89R61r1zfXUrvMxO2sxLhNob/vqlGJ08vuFuS6eRm2PgP/E1FvI6gd9tV1b5V2fxVatLZ5Gx/wABar+ySW1kQqrn5m2/99U7a8ap8m41Yt9PeGPyf4lqOZUjYohZT/tfdanLYrm7CKvkr5z8t/s1D9q3RqPvf32qTzP3bfdVmWoYVmmk2TTLto/vEe9KBoWrPMR/Cu/+Kr1qu2M+duB3fJVKzk/d4Sf5v92ryvuj8z+Jf71EthfaL1rskjCZ3VqQxwx4SZF3/wB6sLT7pGZ5P/HW/irZs5hIq+Tt+b/bqJf3Q5jet1SbTXR0yVX5FWuFuL6aOaVJnUlX+7/drrYbx7LdC6NsZa4TxRNNa61LC6bQzfJVe5yExly+6dR4T1ZLeRHTd8zfOtey2d5Mvh2F0+f5Pl3fw18+eH9UeGZN6bm3V7Z4buPtHhuN0f8A2k3VRE+bm5kYnxSvJv7FmR0bKru+Zq+e9ZkdtQdtn8Ve6/Fq8f8AseTCbvn+6teCXs264ft81LlRvT3Ok8Et/pip/DuWvd/DbvqGisg6bPusn3q8C8Gs8c4fzPlr6G+HLPcaLEnzKuz/AL6olsTI8a+N3hN7O4S/httqN/FR+z34lXw94si87lJv3TR16l8YPCsOqWL232bYqpvVq8E0ye58N+I1m+60cu7a1HwwJjLmjyn1pqFnCrM6IvzfdaOuS1yx8uRf3W/crN975a6Pw3qn/CQ+H7TUofnZol+61VtSsdzF3T5Wb5KI8pnLmvyntf7Bs0v9jeJLUnEa3Vs6JnO0lZAf/Qa/oz/Zy+Glh8VP+Cffw+bQfIOsRfDy2g067IwUlFuB5bMOdu9a/nN/YSSaPTvEsc0SqRNacr3+WWv2P/4IZft32euaXqX7LvjHWD52ialcLpIlkXCxeYx2LX7XxLVqUvCHJJw/5+VP/Sqhyxp0qladOps0dD/wSS/bYXx/8RPEHwu8cQw2GvabqU2k6tZru+W4hkZflr51/Zu0LSvEv/BXi58M65ZJPZX/AMQfEttd28vKvG6Xysp+oJqT4paEf2CP+C0Gs+IfGem6hbeDPiVq0epadfabEqotxJ95fm+X733qxv2d/Fh0z/gqefGlphQnj/XLpQ3PykXbYP4Gujw/hF5bnNWP2sNL/wBJmfVcBVWsuzOjU2hTl91pf5H5W/8ABaz/AIJz65+wH+294m+GttpzJ4c1mRtV8H3Cr8klrI27y9396Nvlr41+z3OnybJk3N937v3a/qz/AOC+n7CHhT/goj+w3f8AxZ+HtvFc+Mvh/ZSappDQL+8mhVd00H/fO5q/lv1a18lnhv4eGbbu/iVq/Gq8VUiqq6/F6nymDrulP2Utvs+aPpn/AIJkXjT+ONbjJzjRCc/9to64r9v+xa6/aM1zcoJFpZtF6/8AHuldd/wTEtltviRrscS/INAwD/22jql+3vo89v8AG67161wwltbeOZW6cRLX65jnzeB2F/7Cn+VQ1fL9bfofMnhnVJvDfiCJHb5Gb+9X1D8O9YtdZ0XZ9syzRfdX7tfNPjLQUs5BfwptGzcn92vSP2e/GiMwsLl4x/CjNX4rKMoy5jq5Y1D1DVLFFV33so/vfxV7v+zP4y/4SDS/7HvLlvtdr8qKv3mrxbVLdJUf+JP4mjrQ+EXi7/hC/GVvfzXrQ20cv73/AK51GIp+0pamuDrSoVY2PsS102a3XftVU3/3v/Hmq5DY7vKS9uYWWT+KP5W21f8ADa2eqaXDeWj+Yl0iurf71X5rF/tT+dDH8qtsZv4a8aP2on2kOWMIyMZrENNNcpzM3yptf/2Wlt9LmaT99bK7SfM+35fL/u1t2FvcrIm+2/dN/rW+Xczf7NTx6f5fm/Zn2su7czJuX/erqpx5SpRhL3kc/JpPmMqeSobYu/c/zLVG80G4RmdE2ur7naNPu11dxp6Md8aKPM+bc33WX/ZqrcWaLMszoxMbfJteteXrEv2fNvscNr2jw27bLYfe+bbXI6tZ201w/nOqlf7tejeIo/LV0tvur8/+7urg9cX+C5SPfv3Sqv3mrhxETqwtNxlpsZUy7NronzL8v+ztre8GmX/ShNFtYMoxtxxzisGSTbb7LN2WNW+9I3zLW14DkaRLrc+4goC2c5ODX33grGS8TMFftV/9NTPpsqjJYuLfn+RS1S8c6jcxoR8s5VAG6tUFveOqyzPN95tyeW3zLt/vVU8Q3ccetXLLF0uWUr75+9UcdxZxr5c0yp8zN8v3q/KuK1y8QY3/AK+1P/S2U5fvZerOh02+hiZH87ezfNu+6tbFhqSNMyQzbiv3F+7trjdJuIfOl3uu2P5f9qtWO8SO3i2fON7eUv8AEtfOxpxK5jpYby5W2U20OxtzM8jNtVqtrdItqsPks0n3du/+GuSW8fbsSZYxu+dZP4f92rENwkimeZGRV/h3blZa6KcvZil73wnVf2l9nWSN3b5W3LHH/wAs6hkuobi4dPO3DZuij8r7zbvm+asdbqFYVENmv7z5vvfeqaPU3877NNHtdX2p8v3lrshU6nPF+9yl9FhXek3mMPvJt+9S3kiTSIXmYbdvzVUW88xt6chfuSf7NPhkhZBsf5t/9z7q11U+Y0jThLcbLb7b5vMg+Zf+Wn8NUZrW/uJnxwW/2/vf7VaS+TcKfOkk27vkaR6j8l3lH2l8n5vmZq29pFnTTp8xktb7Wi+8LdWZXb73zVFbr5kcT3KSebH99vvVqzW8NsoSFNir8yKq/KtUJp0j+/OqM0vzt/e/2axliPsm3seWXNIfDHCqq8kflq1XNP1JGmTEysFf5V+7urMmvIWxvnVEj+9tbatQ2+rWCs298v8AfXd93bUU6nvcxliKfNEsaTH5jeTI6lvN3I38P/Aq6rT/ADiv2nYp8v5XXbXI6DdWrXw/cq0K/cZfl/3a6XTZJobYpvyixfJ/e3bq9mOFPksHjuV6m5NFcrG6Q2ak7l/75qK4Xy13xbWG/b5bfw1HHcJDGv77D/3ldvvUbnkZYftKn5NzbazlThy6H2WBx0YxKGsWrzWjJ5LM7fN+7f7tZGpaXDcRt/rFaNPkVfm3N/eZq37eHbuM8Mm6Rvnkb7tI+jp5jPDNx/47XLUpxie9RzCR59qGjzMuy/SMp/C1c3rWhwtG++Ndm/7rfLXoN9o8y3CvcopRXb5VT5WrN1bS0+z7Hhyu/wCbd91q4akYfaOx4zqeLa1oNzDOERF2/wC0n8NYOpaO7SeZbJIyf7ler+INHhZm+9tX/VN/E1cxqGivIj+SmI9u5P7y1zU6kPhPMzHEe2gedSabud5pkYFf71VPsfnTL53Cbv4a7XUvDcLRiRPut99Wesq60eaMb4YVO37td1PFRUeU+MlhZyqmE1uI22Q7VZfv1cs7mSOLe8Pytt3r/wCzVNLZ/ff5Vaoo7d1Xe6ct9/8Au1MsR7h7mW5Tyy5mfQ3w/d5PgbE6Mdx0y52nvnMledaHs+0IsKyMjJub/er0T4flk+BUXmDaV0u5BAPTBkrzfR7xIrgyfN8ybfv1+0+Mkr5Bw5/2CQ/9Jpn1uCwUa3MpLbQ6KzaS33TO7B/9n5q6PTNSfcJndnXZtdWX7tcYt0I4xv8A3Urffb+6tbGjXU0ca2c028Q/fb/er8HlsXiMvhGB2NvfQtZpcwuxaR2/dt96pZL5/MPnWys/ytBGv93/AOKrI0/UHZoVe5kxJFslkjf/AFa1ejuJoZEuXT5N7bv7zf3aujKcZe8fMYzC+zEupHYB3fcW+bb/ALNZ1zFCv+kpM0L793y/Nuq9cMFk/fP8qxfKzVnMYbiNLkeZiNWr0cPKEY8x8Zjo+8Z+qIkyvCk251bc6/3t1ZO65t4fJRIy8j7fmrXuLX7RcCaZ1Yfdba+1qpLazeQ/nPtlX7qsu6meVGN5cqKK/u5ilu+197DdH826qV1C/mbEePYrfvVVdzf7taFv9pdvO+b938y+X/FUN5Cl1I6IjIfveYv97+61ZVJcp10/eM+GNIGCXMLSIr/Ku75V3VrW7Iy7O2//AHaoNC8mwb2Dq/yzNV6z/wBVvmdVZV3O2371cU/fnyx0OyjPkgbenWd75rzJu+Vfn2/dX5vlavL/AIja1f8AiLXDc3kzOti7LBD95P8Aer1TVtesPDPw9ub+5eNb24XZEq/ejX+9Xh9xq25pnfa7N/rf71fVZTlqwseefxHw+e5tPG1/ZQ+CJLca1Dx9mflV+f8AhauB8QaxNayav5N/+8ktWb5k21sXGsQ/an875dv3WkXburjvE+oPJcSOU37lZNu3+Gva5keDGXMc74X8TyX2mvZu+3y33LVbxJr01vZvDC+3cm165LQ9S+y63cWzuqrv+bb/AA1Z1nVHm3Ojtt+7U/Ea8v2jntSkK3hmL7h/drQ0fxE9rsff9779Y+rXEzMfT/x6q0Nw8Mmz7o/vVXvDPSl8bTTWqIk/8G3atcR4q1Z7i6Z3m37XrO+3urDfM33/AOGq11cb2MzPv+ep+yVFdSezkjmmCdBVv54p2fft3N93+9Wfp94isf3K/e/iq5DcJNcStvXf91F/hWqiEi6sPnAp8zL97cvy/NTrhvOJR0bd92ls5t3yI2P4an2/aF8nDfN92iRJlX0ICj5GO37tULyPZIX2bS3/AHzWpJI8LOm//gNUbqNt29ujfw0cvuDiUWjEY3r/AHf/AB6omVBtf+7Usy7JsO7fLTX+6amBYxsK3+9TY1SRfmFO2p5fzPxtoVkA/eL92q/wmhLbxou6SnyRoyq+aiWfbJj+KpFkRmbZ8q/wU/hMyGRvmCfMK19PuPslq+yT52X71ZTHkO/PzVJJIVX5Pl20vhAueXMq+cz7T/HuqrcR/vPv7v8AdomumkkV3fK1Esg3BE+7Rze+KI61jMjq7v8A71fr1/wbr6GF+Ik9/Mv+rtY1Vt/3vmr8hbFWkulR143V+y3/AAbo2MMnifU0eNUElvCu6Rvut/Cq11YL+KeZm3N7DQ/d+G+vG8M9Wc+Uvy79zbq8H/aCh1K40+5hEMgDJt2s3/jtetabrW3R4ZvtKv8ALt+X+KvOvihrFtNG6Xm5EZGXaqfM1etWlGUbHz655H4Uf8F0PgjeeD/HnhX9pnRIdzLL/ZustGm3y/8AnnIzf+O14z8P/ipqWuaFDZ2t+vmbdz+X8tfp3/wUi+Fvhv4+fA/xX8N0hWaa4sJGsNy/NHNGu6P/AMeWvxh+B/iCbQbiXwxrULQ3lncNBP5n3lZflZa+Dz7CxrR5o/ZP0HhvHyivZSkeo+Ntems7x7OZFdmRX/2f/wBquI1rxEn2fZD8rf8AoNdL4sv5ri1d0jVw38X8S1wGqSbpN77lG7+Kvmox93lkfS4itzakEl0j/vl/4EzVWs7qbzC7u2yopo2bckKfNv8Am3PSSXDxxhNn3U+fbW0Y3PHqS5joNL1az8z/AFKsN/ztXT6fq0PmK8KcbNu6vNPOdWZ0f5fvfera0G+nuF8n7SxH91WqpU5Sjy9DOMonS+JvGlhHCkKQ7m+5uX5mZq5TVr57pdjou+P77Vt3Glww/wCu8sbvubfvVlappVsvHkbhJ8qbayjGJpUqS+Ex1keNvs2PmX5qmhyyqMr5rf3qmaxKqnyfN/ufepjW8xXfNt++yuq1fuSMSb5Gz2Zflp8MTsfLSLPz/wAX3ahjjdJmdPu7P4f4astJbSMqu7Db8yMrferP/CVH3S3Zw/vGz/F8tdHpOhvcXUNnHD5u75vl/irDs1e4/wBG2Km5Fauz8LskNiPJttr7/vbvu1wYyc40rxOnD8kp+8dVJqln4B8Oslncr9rZFVdq/NG1cHrXiSaxie5v5mNzM7PLM332b/4mtLxE2pahJ5yWy7Y1+9/eavK/iL4uTQ9Sks9Sm33iou23j+7H/vVz5Xl1Ss/5mz0q2KlGPLD4S5Lr1zcTPc314w3P/DXf/D7xF4k8MtBqulWczN95dybVZa+en13VdVvA8shyW/dKteqeBNd+Jul6cdSe+kks44NryXS/u41/3q+ixeXVI0uWJhTxWIpS5oM+w/gd/wAFfPGf7H3iKy1RfhbFqDRov+kQX/ltt/iXb/EtfYPgf/gv/wDsX/tDwPZfGfwvdeD7hYFi3TKTHNub5tzCvxK8UeNptc1BpjMs25PvL93/AIDWV9smuG+fbtb+7So8NwrYflm3GR6dPjKrhf4kFNn0X/wVd+Onwo/aG/bq8Q/EL4Faw174Yh0uzsLC6aLarNHH823+8tfPI3huZNy1FGr7lfzF/wCA1Lbwo0jpvbb96vrcNRVChCn/ACnxGKxEsZip1nG3MzqfA+oPa3SoNvzJ/F/DX2D/AME69DfT18R6U9s0aRtut/n+VfM+6y18XaLdQ299E7plPl+Va/RH9h/w2lj4FufEKJGEvoo03bPvbf4d1dfN7vKclH+PE9ks7P7Ooebc6L/Ft3Nuqe3t7OOTf5y7Puouz5v96rl1aw2Nos1m8m1U+dlp8ln9om85HbKpu2/w7qdOPunr8w/TdJTzXQ3KqjfNEq/erRtYUkuG2o22NP4k+bbUelf6tUR+WTa+162NO094pt7ou77sSt/FXXTjbQ2oykP0ezhjtVdEZ0k+bzF/i/2qufYZrhWeZ13Mu3y4027f+BVdsVSSNXZNn/TPZUqKiqs3zJu+5Ry8vvHu4eUzmdY0XaoeFGx8v75vux1yGoabCyzpvxGzs+7Z95q9D1RfMt/9Yynd80a/d/2WrjvEFrjMPy7vv7m/iaspS6nvYVc0oxOaax3bfs0TKNi/L/eWsHXrdYtTnt4yFGQAc9MgV1Fn9p85POf5W/8AHa5vxYEXWbkwtuXAKk9/kFfqfg3K+a49f9Q1T/0qB9JhMP7Jyfkz83/2zvH02tfHTWNKhvWa30eKOziXZ/F95q8z+Gun3OueKLazQbjNKqqzfdq1+0Brb6p8fPF04fcs2syfN/u/LTvAMkOi2dzr1y+Ps8X7pf70lfkPwn4bjeaWKn/iOu+MXjz7DeTaPpV5ugt08r92/wAu5a8f1zXJtQkMzuzNTvEWuPqF89y82fM+ashm8xi+/NL4viMOUb5j+WXfrUkK+Yi92/u0kcLyJn73+ytaulaO9ww2Qtmr+IciGx07ciuXZt1bWn6SzSb0Tcu2tzRfB7rD5zwbl/2q1W01NNVvMRc/3a15eWPKY/FLmOcuLN7VS4RtzVmzruk3gfdrX1q6RlZ4Wwyp92sCaWaSRn3rtrKUiox5iG4mkZhsRm/2mpbNXVmR0/2makaT92u8MWp0Nu7HY7712bqPhH9g09PX7SypvVVX+8lbDafM1rvh+Yt9+sXT7jbPs8v5P42rpLOS2EOzftDVXMTLYzPsc0LDzE2urf8AfVaeks63I+78r/OtLdJDx5PPz7X+f7tNjkh3L5Pyms4+8HLHqei6Xoej6tp6bJtpVNvy14/8YNPfR/Ewtndl+X/erudHuHjtmSzdlZdyv8+5WrhfjJNcTX1tNO6lvK2s1IcY++Zfhi58y62TP/Hur3jwTNu8PiPzFYLt+WvnTQbt4boP8pr3X4bXf2rw7M/zDyU3Oy/eq/hCXPsY/wAWtQdtLebqjbv+AtXicm+SYu235q9M+MmpbbYW3n8Nu+X+9XmKcMKZdOPLG50Xg9Q1xGm/Zu+81fQ3wzkeTR1hhTPyfNtr5/8AB8byTRun3d/8VfQ3w3t4Y9PfaPk8r5f9qjm+yYy+Mf4wvIZIz8kgMa7NrfxV4d8QvDL3DS6lbWzZVq9j8TR3OoTM9ykg+fbuaslvCr3y/Zns2f8AiWTZTjyGXvc/MWP2cdck1Dw7LpT3i+Zb7WWNv7td1qVhuje5R8n+JVT7teT+BY38A/E6G3mdhDePt8xvuq1eztcuF2b/AOPb838S1Pw+6aS5Ze8ep/sSwywW/iZXl3KZrQp7fLLxVT4X/tKeL/2Z/wBqnVfHnhm8ljFv4hmaeKNv9bH5p3LWt+xzapbJ4meOYMJLm3baP4OJPlrxj4vgp8YvEUpkwx1mcYP93ea/aOJkv+IN5Kv+nlT/ANKqHDDm9vI/oq8afD74W/8ABZf9hDSvEXhnVhF4m0y1+1aDqqsvmQ3ar91tv3dzLtr8/P2aNW1zwB+1rZ3vj7TrifVLK71WDWLeGLMhuTa3MUuF9Q7E49q80/4IH/8ABTC5/ZL/AGiYvgJ8SdY2eEPEk+2CaaX/AI85m/8AZa96+HGNd/4KZ6jNpl2TBcfEHXJWlgj8wvb77pn2juTHux9RUeFNWdTKs4ozfurDyt6NSPtOFqUaeBzSsvidGX4KVmfcn/BPr4z2HinxBefDfxBdE22u6cYfsrtuVm2sv8X+zX8zH7UXw70TRf2jPif4M0dFa20Hx/qlratH93y1uG2qtfvx49h1T9jb44WXxavrdbDQZory98OtM/7xLVYW27v9qv56de8ZXniT4yeJ/EOpXLSN4g1y8vJWZNvzSTM3/s1flmNjLD1H/LI+Cy+pCtTjzfFG56f/AME1bO5sPipr9tJjYNBbaSuD/r4q6L9sXRIdX8Y62hhYyiG2MbKM/wDLJaq/sCWhtfi7rpOfm0E8s2T/AK+Kt/8AaHla5+MGp6YzrseGAYbt+5Wv1nG/8mPwv/YU/wAqh0yl+/b8j5LmtodW06awuUZXj3LuauY8GapL4T8VGGZ8bZfl3V3HjDT30HxhP8i+XcPtRV/u/wB2uF+IGlfYNQXV7ZPk3/O392vxeXve6dlGUubmPp3w7ep4g8PR3jvhW2/Kv/oVVNQheO4kmh/hX+H+7XD/AAF8YPqGlrYPNv8A9lm2/LXfaxE9quxE3tt+8r1HN7tgqR5Zcx9Zfsb/ABSfxl4Jbw9eXO6fTX2eX95mj2/LXsPluxdPL+Zvu+dXxB+y/wCPH+HvxSs7y8m8uyuv3V02/wC7u+7ur7qlKXE3mJNHLbyIv7yNflk/u1w1o8srH1WU1vaUOWQ7S4Zm80ImV2VPbJeMpMKfMvy/7y1ZsYYVj2bNif3d9WLXTUjXfCjK+/am2iK5j1PaS92JnXdrDJhJLXarfxM+1V21QvLV1kkSa23ity+t3l2o6K4VvkXb92q11Z/u33zttb5vMrojT6oqNWXNyxOG8RLZxyfacNmRfvLXnPiK6htbgw2yMXb5kaSvSfE0CKyPCkgSFG2RyL8u7/erzPWrGaEn/Vum9tvzfNu/3q4sRGHKephfeOcnWa3uNkL7xu+eRf8A0Gum+G0iML5Yx8u9GB9c7v8ACuUure23TI9zMiKu6VVf/wBmrf8Ag87GG/jJyFaIqc5yCGr7nwXa/wCIm4JLtV/9NTPfyqP+0xl6/kZfiWbbrl7HK7H/AEhiMduazobwrMwhf52T+JN26ovGN3cL4nvoowNpu3ViW461jyakJpvO+bbGzKrL8qtX5lxPHmz/AB3/AF9qf+ls5qvu15erNuORI9QzvX5tzfLVyPWIfM2JJIzL8yfw/wC61chca48K/uXwyttT5vvUN4mgaHY8qpNs/h+bbXgxp8xh7Q7NtY2qJppv3rbt6/e+b+KrNj4iRnb9yymOL91Mz7V215/N4gFxGib/AN60Xz+X8u6mf8JFNDt+dn3fKnz7qJU5S0COK5NT0218STblT5UWPdvb+9/u1Ja6tNIzu9yu1vk3L8zbq81t/FkLKttM/wD9jWp/wk7283yJGw3bf3b/AHmrf2cuoU8RCUz0nT9Wha4EKOzpGm3+7tX+9VzT9YSS3k8l2Ks33d9eZ2/ix4ZXMG4rt3ff/wDHWrQ0vxM/meek23b8zK33ttZ806Z3UK0JTuekfbHmjVN//LL5lb+GrEkltfWfmQux3fd2/wB2uJsfE9t9ohd7zajL821tzNV+38WJbyLsfYnlfeb5W20qeI5XoejTj7xuXnzSI+zcv3fvVk6lNbSTR2LTK7fNt+Xbt/2t1Yt94geQM9lMq7n+Ztm6sW+8aeavyTN+7fa6/drKpU97midsY81K5ratrXmQtbIG+VPn3Rfe/wCBVlRa55wi8mZmTZtRW/hrB1TxVNc7kR1Z1T/V79u3c1ZU3ibbEUNzsRfmRv8Aarpw9Y8vFU+U9M8O6pbbYrZHUv8Ae/3q7DR9W+7C8yoGX5mX5vmryXwzr0LN8833f4f7tdrpuqboWRHX5m3V9nKn7vMfklPFTjI6231Sbz2SaaRfn/e+Yvy7f71aSTJcSRBPu/e2x/LurlrS83eZs8xl+6jTVrWt15kavM7Yjbcu37q1jKjGWx7mDzCrE342+0XT3PneaGi2su/7tTeWkjOiOu3725U/8dqto86Ru8z2ysv3X3fdZams45pLhb+Ha6L91furt/irzqlO1z6fD5lLlTKF9awy3Hzv/D8rfdrF1C1s5ISly7EN/E1buofvl37F279qN93bWXcKkcyzP8z/ADLt3V4mIjyyPYp4z91c43WtP2Mu92f+4uysebR1uFWbZsGza7L91q6y+jRpFs03D97ubdUH2G2TciPu3M3zL/FXn1JcoU63tjg9U0F5GaZ/3S/elZvmXdWLfaLtgZ9mG+7tWu+1O3mjjeF03KqfKrL95t33qxdW0ubzC8yKFhT/AFap93dR7T3Tow9OMp3OH1TR0t5Mum1fl+WqbaS5Zk2SJ+9+7t3V2OoaeJGXz02fw7f4lrOmsU3b33fN8yNRzS5OU+xy+nCMT07wTbfZ/g0tsw6afcjBP+1JXmmm6fM0e/ycsr7VVm2/NXqnhWHb8LVh8sD/AEKcbR9XrgdG0/zPkhfHz/Osj/dr948ZZcuQ8N/9gkP/AEmmejlKjz1v8X+Y3T7GaVk851cqu35vu1o6bbzRscOxDfKm77tWIdNMcEcMNtt/i3f/ABVWlsfOzYPM2N+1vL+9/wABr8EjU5TfGU/c5mSaWzyQqVRUDJt2yferRVXjs1R3VV3qz7m/h3feqvp+l7Wb/WMPuurLV6GFJDsRI2+7t3fd2/71dXtGfDZhLlG3Vim24mmnzt+baq1R8x1VHhh3+Z8m5V2/8Cati6bzGbZHJKrJtb/Zb+7WZfLM3zzOzKu1fLZtrbq7aM48tuU+JzD4uaJl3W+a8e2Ta5V/3W19rVn/ALmSR5vmDx/MjK3zSNWrJCjMkybWLPt27Pu/8Cqmtv5l0yJCyMz7VZv7taSlzQ908fl+0MZXt5vtk07R+XtX5V/1n+9UV5HbXUY3+ZFt+8rfdZq0ZrRGhXy9u/ft3bGakWxm8vZ8ruvzfu12+WtcNSTlqjro0zHa1RZkSF2H+z/Cv/Aas6LYPdagltNM0sXm7pd38S1alt4W3u/mJ8/3m+81P8A2r+MtW1+wsJlf+ybJnn2t91v7q/7Vd+W4f22J16Hl5xiPquG5Y/aPOfid44e81SfSrPaI7d2VV2feb/ZrgdLvB5k1s8y+Yz/dan+KLpLfxNf23zEtu+VvlZa5KPUraHWvs1zuUbN26vs4x9nG58DzTlPmZU8Xao9rM+98fPt3N/D/ALtY2oap/aWn79/zbdu5Xql481aO81BpERm3bvmrmrfUJrXd3T7v3vu1EZfZOj4jl/E8n9n+IpJId21vvL/tU2TUppoT2/2dlM8YNuvldI+W+Zv9mqlrcbY97vurWOxXMNuJMzP/AAlX2tVa4mhY7HT7tLcSOzM4f/Z+aqsr7lFL7Q4j5JnEe9Pu/wB6o5Wdfn/ipIW+Y8fL/dpxUbd79f71Ei/hFs5N0y1d0tHlmkjT+9urPs5Ns/P96r+jzeXeGZd33/m20v7opGpDHtkK9NvzVZjkfOUm27m+X+9UM0YwNnP+1Sx3XlxmBP4fv0pcsTP4hLyFJG3pu+X+L+JqpTyeaV/ib+7sq150zfPs+RU+838VQSKh3eT95qAM+4jT5nwuWqm0bj+PdWj5O5m39F/iqrNG+3eiUR900K6HaPu/8BoLbWb5P+A0/wDiaPf/ALtRSfe3ZzTjIBwERG807zHHyJwP7tRxkg5xkU5sKcZpAO3Sffcf8CoaTzDv3/71RMH6sKVW+UjtQBatI5bmeO1QBjO4RGJ7k4FfaXw1/wCCDX7a3xg8Ka94u+GsGjaxp3ha0F3r13Ym4ZLWP8YgZGxltiAttVmxtUkfGehyb9YskZP+XuP5v+BCv66/+CXf7HHxG8I/se+PrjV/Emgv/wALb8NhdBFjqBuFtAbe6hBneNSoOZlJVC5XDA4YFR9jklPhrD8OYzH5mlKpCpQhTi5Simpyl7T4Wm3GCcuyt12cz9q6sYx21v8Aofzy/s9f8EJv2tP2kPivp3wp+F3i3wndapfszkyXNzHDbxKMvNK/k/KijqcEnICgsQD+hf7M/wCyR8av+CK/xTfwJ+0uLLVRqNnHdWN94TuWuLa8iB2lo2nWJhhgVKsqsMZxggnvPEP7MP7Tf7Hn7Yfhr4R+AfHWlf8ACeXNzbHw9qXh7WU275/kVJBKFMeclWSVQHU8B1YZwP277b9pnSv2jdV8O/tZeNY9e8V6fBDE17a3EbWxtyu+IwpGiLEhDbtmxDliSoJOf3LBeGPBuYcUYaWAr05YKpQdRQ55+2k+ZJTjrbkV0nfW91ZvWPkV26tBxqJ3Tt5f8OfSdv8A8FYfgxDYx2Y+HnilNhydi2/P/kWuJ+In/BRb4a+Mdy6f4S8RQBgAWcQ54+khqP4Of8EWP2uPip4Mg8Z65deHvCSXkaS2en+ILuX7U8TKGV2SCOQRZBHyuQ4IIZRXh37TX7I/xy/ZI8WQ+FfjL4VFqt4JG0rVLSYTWmoIjbWaKQdxwSjBXUMpZRuGfUy3hPwXznNJZdgsTGpXjf3Y1m27b8vSVuvK3bqcs8FOFPmlFpGp4q/aF8J+I7meVdE1CNZHJXCoCM/Rq+BPjt+wZr/jH456p8TPhT4i0vTtM1ZxPcWWpNKJFuD99hsRhhvrX05SxxvLIsUalmYgKB3Ne/V8D/D6qrSoz/8ABkjTDV6mFmpU3qeS/Av/AII2ftwftRPIvwc8N2eswQyGG51JWkhsoZAqsUa4lVYw+GU7N27BBxzS/tC/8EF/2+P2d9NbxF8WvB9jZaSpUyavaztd2kW5gqiSWBXWIlmCgORkkAZr9f8A9uz4z+J/+Ce37Lnww/Zf/ZvvX8Malq2lNe6/qdowN2Nqp5pDkZDyzyOxcYKiIKu1eBm/8Er/ANrf4g/tQ+JfFH7In7UniGbxnoniLw3cS2j61JvnXaQs0PmDDsGRy4JOUMQKkZr+eqnh3ldbJ6nFeHwEHlsJS9x1av1iVKE+SVRO/s09HJRa2W70v9DLM8RKaoyn73orX7dz8LJf+CeHxPdht8ZeH9oOdpef/wCN1ufC7/gkj+0r8cPH+n/DT4V3+j6trWpSFLSxt5JRnAyzMzRhURQCWZiFUAkkV9g/FXwXL8OPif4j+H0ySK2h65d2BEzAt+5maPkgDJ+XrgfQVneGvE3iLwbr1p4p8Ja7d6ZqdhOs1lqFhcNFNBIOjo6kFSPUGv2qfgF4d4nL3VwVKXNKN4OVSbjdq8XJJptbXSadtmeV/aWK5/ef4HFP/wAGq/8AwVPcH/infCPK4P8AxVkHP61R8Tf8GzH/AAU9+F3ha98b694Q8Oz2GlwNcXi6br0dzMsajLMsUeXfA5woJx2r6Qj/AG7P21J5Vhh/ad8dO7sAiJ4hnJYnoAA1faP7b3xj+KP7Lf8AwTz8L/Ab4g/EjVtY+I3xEtWfxJeanqMk9xbWjYe4iDFjtUBktsdGBkPXNfkGaeDeLyXNsvwVeOHqTxVXkUYfWOZQiuapPWpZRhHffVrQ7YY/nhKSurLy+XQ/Gj4Jf8Elf2qf2iPEraD8G/D0HiG7twgumshMIbYPnaZpWQRxA7WwXYZ2nHSvUPGv/BtV/wAFQfDemXHiOf4f6LdW8C7ja6XrkV1OB/sxRku59lBPtX6jf8EqLnXvEn7AHxL8D/s2a1Zab8Uo9UmlS4nRQ/7yGMWzbnJGCI50RiAqvkkdWaz+xr8Kf+Cv2kftGaJqPxq8XeIIPCdnen/hIh4i8SQXtvcW+07kjjWVyztgBXUDaSCTjOfL4h4K4Vwmb5lTw/1fDwwTt7PEVqqrVrQUrwSlFWne0LKTel+hrTxeIcIXu79UlZH4K+Of2Bviz8PbTV5vEGsaZb3GhxTtfafOs8U8bwhvMiZXjBVwVIw2MEYOK+f7r5rjZ/ef/vqv2f8A+C0Ou/D7xJ+038Wbz4d+Q1vHpUtvqUtsuEkvo7PZcEfMQSJAVYgLllbgnLN+L16rSMmw4bbt+Vq+a8UOFMh4ew2U4rLKEqP1qj7ScJycnFvldtddL28+x0YKvVrc6m78rsSSSQy4REVf4qZHDBJcb+nyfP8AL/47UCyQ2+1Eh3t91/nq1YW9zeTLGkLbm+7X5LGMTv8AiNfwrYzX2qJbJtb/AGWf5q9Hg0dtNsW2Ju+ba23+Ks7wB4PvLOFdVv7ZovtCMq/J91f4trV0nibXrPRdFfVbny38uLbFG3y/NXkYqt+95YRuejhcPzayNT9nzwC/xB+IiabebZrPT7W41G/h27ttvbwtIzf+O18YeINRuPGni3UPEz8ve38ku1U+6u75V/75r9MP+CTXwr8T/EjxB4/1vwT4ek1XWm8IXVrYWqqzfvJvl2rWh/wW6/YD+HH7N/w6+BfjPw/8N9P8L+KtWt7yy8U2emuqrcLDGrLI0f8Ae3My7q9XKcdQw9aVGXxM9rH5VVnTw/s/tHxH+yP+zxqPxe+IFnYP91pfkVk3LurY/bk+J/hnVviI/wAHPhRZW9poHhVFtb+4s5dy6pfKv72T/dVvurXtvhLw/afs4fsSeKv2gNURYNVmiXSvDTRsySNdXHy7o/8AdXc1fDtmXkgyzMXZ90srdWb+Jv8Aer38u5sTUlWnsvhDi7D4fJcJRwcP4so80v0QCN45Nny7Vq1DCi/cfb/F8tJGqL8n3y3+zUjRvu2Rpt/v7vu17R+cS2FaTj+L/eqS3O4+Xt+9/Fv+7UO6Ers8n5Vb7y1csLczQ5RFpykKPPEkST7LNH5Ltu+9X0B+zV+1V4w+BPjTwx9p17b4P1SdrfxHbzLuW13femX+7tr5+aNFmVs7i3y1r+Ire5vvh5Klna+dLb3Cskkf3lVvvU+Xmiac0r+6fr/4Z1nwr4ys01P4e63b6xYXn+qutPuFkWRdu7d96rjOlvMl46Mo+7t/9mr8XPDvjXX/AIa6na674Y8SalZ6la/8eraffNH5Pzfwqrba+0fgR/wVM0Sz+FM2m/G/SvtXibS0/wBAmtU2/wBoRt/z0/uyLV060afxHXCUJH2/p8KTTfJIpMjbXb7rKu371b+k5b7+5Qz/ALpVT5mrxD9l39pXwB+0locuseG7mSw1G3+a60e8lVZ1/wBpf7y17ZY6gjTJNc+Yjr8iR7a1jW9p8J6NGMpQN2zi81pbmG23/L87bvu06SJ45tiIv7x/n8z5VjqGxkmhjf8AiWT5tzP8qr/EtTFopv3yPlVT5l27t1axkerh+bl0KWpWsJkKPuCfd3L83zVyniSzSHfvmVvn+T+9Xa6hCgs3+X5GTdt/irifE1y821Nnmqv3I2+X/gVctapLofU5a+aRg6aqfaGRIVZ2bd8qferkvGsXl6/dQmU8Kg3rxgeWvNdx4bt3a4MKW22Vk/h+Zf8AvquR8axMfG9xEF2s0sfB7Eqtfq3g075zj/8AsGqf+lQPpsPOLlJeTPx++NENxafHjxLaAfN/bEykt/vVB4q1aHT9Ft9EQbWj+dv96us/aY8OyaX+1H4qgvDwNRa4DL/EteZeINQ+338k+zf89fkkfgPwnF/71Nf3inL+9lO+iGNGb2pY7V5GxXTeG/Cd5qEyeTbb/wC8uyqjHmOWU4xIPD+g/aGX5flr0nwn4JRYVuZoVC/w/wC1Wn4H+Hr2savdIr7vm+Zfu1Z8UeKLbw/F9jR18yNdqsy/drb4fdMeaVSXKiLVprCxt/J8lV/hdq5TXdcj2l3uWO7ms3WvFlzfM3nfxP8AeV/vVjXl49xGOxrPmmXy+5oQ6lePJcvM53bv7tVGmIXZTppvm2eXz0qPy3ib95Ux934iveFEkZk+5xtpbNvm8j73yfPTWj3b9n+7VixjHmK+/bt/8eqhRl9ks26zM5T7q1tadI7Qqj7cR/3f4qorbzXEf7lNm3+L+9V2FXhjGxKCZe8WWt5pmZ0RlLfe21DIs0JPkp82/b8tamm/MuX+bb/e/iqxHo6TSLCkjAs+6l/dJ/wlPSdSns22IWxXO/FWXz7aGb/prXZ3Hhm5tY2eHcR/47XE/En5bZEmVi6t/wB81HL7xpCWpxtmSlyAf71ez/C3VHXR7iz87/WRbvlrxWP7w5zXqPwzvoYdKldH/wCWXyrV83LEqsc58Vr0TaosL7fl+/XKWse+4WtLxhfPeaxJv+Yq+3dVXSLd7ifYlMfwxOy+H+nvNcfOmNu1v92vVLfxZpvhuNYXudu377R/NXnmhwzaRpiuIVYqn3lrO1bULm6lZ9/8X3t1RL+6ZfEeo3nxQ02YN8m59m7buqBvihc3H/Hgiwp/d2V5jDHeTTD52+Vfuqta1qHsVZJH5VN1Eeb4iuX3eUl+IHiK5kvLS/uXb9zLvVY/4a9v8JeIn17wpa6lsWVvKVWZVrwrUoX1axkTZtCruf5a6T4C+OvsdnP4Yv5stC/7j/ZWnEnl9w+yv2OZHe38ReZtz51sfkGB0krxn4yxBfit4jYwHadYnLEt1+c167+xTefbLfxIxfcVktMnGO0teSfFBHvPjD4mijAcprVx8p/3zX7TxNHm8HclX/Typ/6VUOBXjXkcT4g1DUvDuoWnifR0aO5t3X9591q/Wn/gklrU3i/9sf4Va94luN82riSW8kkPLyTadOWJ9yzfrX5ReJrF9Q02aF/m2xbkVf7tfb3wJ+PGqfsx+E/BPx50dz9o8NWel3I5xlSsUbj8Vdh+NYeGKSyrO7f9A0v/AEmZ97wk+bBZh/16f5SP1N/bF/Z21X4w/Anx38GoL+W48Z6TfNFoytulurq3b7sca/8APHa3/jtfzMfG/wAE+LPgr8bNS+HvjLTZrO/0nVJLWaGZNvzK22v6pviX40uv2gPgt4b/AGxP2d/GE9nJr2jLpeuzaay+aqyfd+b/AJZsrfxf7Vfjn/wXK/4JreIfCWpeF/Gfh1LfUvF2rJJJqmh6fO11eLGu399Lt3NuZmr4fmpY7KOaUo80du/mj8jo8+BzbkjGXLL7vI+cv2Dtk3xC1G8yC7+Hzlh0/wBdF09qT9ou7Ft8e9UYEsRBbfIP+uKV6N+x7+xN+1R8CfBa/G340/B7VvD3h/Vov7N06+1S1MH2i4YiXaqNhsbI3OSAOK+z/wBnP/ggzYfttaVbftReOv2g7Tw9o+sTNDbaZaaY010jW7GAl2JCAExkjnoa+8zKrCj4F4WUnp9af5VD6SnRnWxbhBa2PyC+Lnh/7dpf9pQ7t8Ls+7bXB6hp7+IND3+SzfuvnVkr+mzwZ/wbbf8ABN3whpEt/wDER/F/ijam+X7RqXkR/wC1+7jWqOp/8EYf+CFc6p4Mvvgr/Z0906+VcR+IbiOTc33drM3/ALLX4U84wSlqz1qWW42rH93HY/mO+FmsTeHfFSQyrgM2Pmr6JhP9pQwukOVuIN22P5q/fn4Of8G7/wDwRTl1GbxZ4b+EGp66ltcTQSrqniOaWDdH95tq7a6HUPBX/BFb9kS7m8Mah+z54K0+e0kVbO0bTWvLiX+795mqK2a4Kjyzb0kdOHyXMsZzU4QcpR8j+e/w/wCC/FuoTJN4b8PahcPHLtRrGzkkZf8AgKrX3f8As96X8VPH3w5017z4e+JP7Rtbf7PKraDcbpmX+Lbtr9Y779uH9lr4K+EtG1lfhfoXhuXWIWk0nw9a6NCmoeXu2qzxov7v/gVc18Lv+CvPhXXfGWseGtf8JQwJYzqbeaGRctH/AMBrircQ5fGav+R7+X8J57Ti5wht5o+KfC/7OP7RviqHzNK+Bvii4RbfdK0mjSL5n+7Xf+Ff+Cfv7WPiSFUh+COrWisyr5l48ce1f+BNX3Ha/wDBUT4Jy2rTCKYvGjHyU4b/AL5rjfit/wAFk/g74B0K6ns9JuJ7rZ/o0ef4v9qpjn+W8t0/wNZZHxDKfJ7K3zR4FD/wSg/a9mj8xNC0NE27vLutcXzWb/gK7azfEH/BKb9shLNifBWj3Kf88bXXo9y/7X+01Ubj/g4D16bR45H0LTneKWRZWjuPmb5v7tcR47/4OFfiJLpV1b+HNHs7WVp/3V1v3PGv+0rVhLiOlKN4wZ6EeG84pytOcIlTWv8Agmt+3RNbtbJ8AL+UrKyxbb23b/gX3q4DxV/wS4/bvsmd5/2YNZnC/cks7iF//Hd1TaT/AMHB3xe0fxn/AGjc30dzA1nJb+U27/WN92SqviH/AILu/tAeKrZdHsdcl0rdKrNfW8i7v8tWE88jKPvUpHpUcgx8Ze7Whb5ngnxY/Z6/aE+EvmH4nfAfxho0Sy48+88PTeWrfxfMqsu2sT4Nzw3K6pLBdRyATopC/eTG75W96+vvAX/BbD496bMLfVvFlprFsqq0qX0aybl/iVt3y1zX7W/xp+Gf7QEvh74m+D/hvoeg61eQ3KeIp9EsFgF6wMZiaQJwzKGk56/Ng9K/T/BLGUK/iZgoqNpWq/8ApqZ9Fgcpx+FqRqycZQW7Utb27Hxd48vZI/FOowLKwBvpDk/7xrmLrWtkeYXx/Duatfx5Pb3HizVpba4Yj7fNGy7v4hIQ35EA/jXG61JtZXR1+X+Fmr4HiaF+I8Zp/wAvan/pbPCxk25ya7siuvEm3akMLN/Cn+1VaTxVbJu/c43fxLWHq15tkOxGXd96suaSaWNdiZSP/b21xQw8JRPna2InE6qPxY+0+TNtbdt3N8u6nN4wn2o7pGqfdRv4mrjbSWZ2L/N+7+Vfn/hqyrzLMnnfxN825K1jh4bHH9cqyOwtdcS5jffc7d3zff3Vbj8RPHJE6bnWNv4f4a46NraFv3PmO/8Ae2fdq8rTMrPv+9/C38VRUpyidFPEc0Tq4/FTyMyCbf8Axbd23a1WrDxVM22Hev8At/3v++q4/hY96J977m6pLe4fcYXfYdy/eauGpRnL3j28LiPh7noGl+Lrlplttit/d8v5ttaK+ILmaNUvE8z+6u75t1ee2s1yrGazT5t+1GV/lrVtdUufLbfNw3y7d+7/AIDXBKPLM+lw8jpr7XJVWZMyQt5W92X+GsnUtYmmxG958u35P9qmRzSWrND8yxtt2bvvL/e3VBqEf7sbIdv9xWX71KXKej7SMYlGS6htpPOd8bn+bbUH9pJIy/O2Ff8A1bfxU7UkTbvhmbd8rfN92qMjeTIsJRsbPn8v+Gt6MeaR4OMxHLzHXaHq0yyN53l4ZNz7f4t1dr4f1SaNVd3V/mX7r/NtrxrSdcms45PORj/stXa+Hdc3Wqs7shZPm/2f7tfcU5e6fj8j1vR9aE0gfylXy2+7I/zf7tbWj300zb0uWi85vu/e2steb6DrUCx+TD0bbvb7vzf3q7DQdVdS77Iy/m/d/vNRKUEdOHrS2kdtY3H2NUTezvH80si/8tP+A1ajvN0z3UIbzJnVdqv/ALP92sOLUppLPZt2yNuZ2/h+9VhpraNneH50XazV5tan9o9zC4qX2S3qV55bAOm4/d3L81QNcTXNwHmgjlC/61W+8rUyRnaMW0PDsm6JpKfG1zLCba5K/Ku5mX+9Xz2IVKR9JRrVeSJVWDzpBNDCv32V/nqNY87ntl/2fLb+9WmljM23emxVXdt+7uZqtLp/+jojou/71edWjCOqPVw8u5xWoaa8P3E3sqM3zbt22si+09Li55MgMiqz12mqWSQt/pO1X+4zf3axb6z2/OEmabb+93P/AA1lGXMerRl72pyN1Yuryu6eavyqnyfNVObT4YVbyVb/AGFrp7rT4Zd7w7o3/vN826svULFI0/vHerbttLm98+oweKOp8MRGP4bpEyuSLKUEN1PLVxmkw/vPnTK/88/4q7vTNp8DsEcsPskoDL1P3q4Kxknt2H2aFsfNu3N826v3fxoV8g4a/wCwOH/pFM9TKqsFOrfq/wDM3bWN47dUS22Oyt95ttSwh2mP7vZt/wCWiL8rf8Cqpp90ZoTvRtn3t38W6rthv8sI77Nz/PGzfK23+7X4PzcpOZYqPL7pbsYUk3lJmi3PufdVpY4Vj3Wz7V/gVU+7UCx7YHme4h2r/CqfxVYj85WZ7aFVVvlrWMpx94+IxmI9tLlBf3Hl/afvtudVqpcWe6P7Z/Gvyo38Natvbu0fFsrPHuWKTduamXFujWqO8MyBvmfzH+9/wGto1re8fM4inKpPYwbu32q6XKfJJ8yMtQR6akr/ACIzDYuxd/yrWncWLyLIdjJtZvmk+WrGm6K9vH9pdMrNt+7/AMtGWn9Yly+8cv1ecZGfDZ5V7aaZsbN+3+7/AHasNp7tiF0ZG/iZmrVXT/lXcixKz/xJ8u3/AHquJpMMjNC8yq2/dEv8LVwe0fPv7pvToy5rM5C80mGO3e5mO1dvzRs/3q4j4P8AjWHR/EnjuZ4WtlaeFd0O1lbcu1f91q6v4nao+k6hHpsKMrLEz/u3/u186eG/FD6b4o8SWcyTFr61ZvLjl/5aK3y19vklCpTw3tX9o+D4grxni/Zx+yVvilefZ/GVzC8ckXmSs3mSfeavPvFkj2N9FeQ+Ydzbf3lbXjzWvtlxb63skzt2StI+7c396uf1q4m1Kz+0vMpRkZtv97/Zr3IylI8L3TlPF2oTTaozptCsn3qzrySFbYzTbfl+4rfxUniK4VZN7p8+2ud1rVnkh8k7g397+9V/aKH+Il84iZ0wrf3axIpvJZofmrZgc3uijzDny2rMkj3Sb4PvUfCEfeIpP3f393+9TJB5kf3OP71SXCBl+5yv8NQTcMqb9w/urRI05eaRAx29ak2Oyq9RuN5yaVfu7PMpc0S+VCx7Nxy3SrWmSPHL9/738NU6ktWCzLu6UhSidJbSSeTs7L/FTZFeNm2fNu+9UdnM8kOxNtP3eThFTKf7VP3dzEb947M7W/vNUF0zxr8nB2U9piy4mk+X71MkV5FP75WWl8Og5EDL5ihEfbJtqG4DpDh91WrhflR4X+eqszO6N87FVo+If2dSvI2G+cq1RMoZ99Sts279nzVFMvaq+EuO42HO7bu2nNSbUZN9Mt13ueean2umUdFqhy3I8pko9NfZ/DUjR7V87fTRJkYC4NZklvQ5HOuWeHyPtcf/AKEK/qX/AOCSd7eL+yJ+0aq3coEHhjdABIf3Z+wX5yvoeB09K/mI+CHw08TfF/4p6P4D8JQK95d3itmR1VY40+eSQkkcKis2OpxgZJAr+iH/AIJf/tqfD39lvxV4m8EfG6PUJPBvjSwjt717OIyrazKSnmOgIbYY5JAxTL8LhW7fsXBeS5tmnh5nDwdCVR+0w0opLWbpVPaTjHvJRtp5pdTlq1IQxEOZ23/E8y/Ycurm9/bV+GF1eXEksr+OtNLySuWZj9oTqT1r7R8ZeCfDfjr/AILyWtl4muFWOws7TUbWFkQia4g0tJIl+cjGGAf5QzZToOWX5/8AHF3+wH+zj+1j8M/iV+zN8VvEniLQdI1221HxPHPYGQWqxTqwELyLCzsVByhU4AB3knaMz9rj9srQ9Y/4KETftY/s5akb2HS7mxk0q41TT3jiumgt0hfMZKyeU4VhzsfDHhTX7Dm+BzTi7P3i8BRq0YVsuxFKMqlOUHCpKpFRjJNe63a67x95HJCUKNLlk07ST07H2n+2frP/AATy+JfxmutN/aG/bE8aaJrXh5ltv+Ec0rULiC106QKCWREtGG9shjJuYngZwqgeMf8ABT/9rL9kn4sfsseGPg58KvixfeOvEGj6vBJbazqFrM1xFBHE8bvPO8cQd3DKCQrFipLAHDVq+M/jF/wSS/bumsvjF+0HrGs+A/GcdtFBrlvAJkN6URcZeKKaOZF5RZMRylQAwACgeK/8FAf2yv2f/ih4C8O/syfsp/C+y07wP4TmZ7bWL3TNt1LLkr/o7OzSJE4w7vJiWVtu4Dad3wnBHC1aGcZTQq4bHKphJXmqvs4Yei1FqThNU71YzltGMryTvKWmu9eqnCbTjZ9r3f8AkfJ9XPDupR6N4gsNYlEhW0vIpmEMmx8K4b5WwcHjg4OKp0V/VU4xnBxezPJPvb/gu3bya545+GXxNsHkfS9Z8JzJZvvymVlWXIGOpWdMnPIA9OeB/wCCJ/hzUda/bgs9WsxL5OkeG9Qubso2F2MiwANxyN0q8eoB7V3XwR/bd/ZH/aS/Zn0b9l3/AIKEf2pDeeHpQmieLrW3c7Io02Qu0kO6RZgjNGd0bI6orMSxrbuf2wf2A/2DPhV4m8M/sHXWq+I/G/iKzEUfia/tnkjtmBIRpHnSMYjDu6pHGVZlUPxyP5spy4ky3gKrwLHLa0sU1OhCoof7O6c5u1V1dklCWqa5rrZX09N+yliFiOZW3t1v2sfHv7ZXi7T/AB3+1f8AEXxZpTyNbXnjC/aBpJN5ZBMygg4HBA4HYYHOM15pXv3/AAT8+JH7Kfg/9oKfxZ+2j4d/tnSrmxmNpealYtf2sF6zAma5twrtPuXeAdr4ZgdpOGTjv2wfE3wB8YftDeIfEP7MvhiXSfB9xcKbC1eMxIz7R5kkUR5hiZ9zLGfug9EGEX9pynH1cFmkMgjhKqp0aMGq7S9lK1o8ile/NbW1r6O6Ss3xTipQ9pdXb26nq/8AwSV/Zph+O/7Tdv4z8UWit4Z8BRrrGqyTD9286k/ZomPu6mQ54KwsD1rgv2/P2lp/2qf2nNf+I1pdtJottL/Z3htCeFsYSQjAdvMYvKe4MmO1en+Bf2yPgL+z7/wTR1z4SfDHW72D4jeLr2ePxXPd2HlJa2jcSSrPynlfZ18tRu3h2dyqjBPxKvxk+ELkBPir4bOemNct/wD4uvmsno1MXxtjc9zVeyVP/Z8NGp7r5ItOpVSe6qTsoyX2Y22ZrN2oRpw1vq/0XyP0/wD2V7/wB/wT4/4Jvj9tXSPCdnrfjzxrM1lp1zc+YY4g08iRW7cqVjUQPK+zaZGAXdgIy8N+z7/wWr/aTg+L2n23xxOj634W1XUY4NRtbbSEgmsYXbaXgaPBbbkHbJv3BcZBO4c9+xh/wU1/Yn8Qfsy/8MS/toatBqPh4XZ/sXWdP1JJxBEZDKquIn86No5MlHjD5V9pUKp3eg29t/wQt/ZYuNM+O2r/ABm1rXRa3qvoVhq7z+Rd3cZDqsYe3gjldSAdrybP7wIr8jxv+rVHMM2p8S5ZVxmKr1ajpVYRVVOk9KMadRStScFo9murey64uo4w9lNRSSuttet11PBv+C4X7M3w/wD2cPihr0HwysYdP0jxT4KudVXSIGfbZzMJ45QgbIWNmTeqg4XcygKoUV+Is1vMq74du77rV+sn/BUj/gob4C/bQ8X+JviDYeLdNsdHtPDFxp/hrSbjWoJJlgEch3sqMR5sjsWKrnGVTLbQT+UckyTWju4+Rf8AvqvzDxhlmNPJshoZjVU8TChJVPeU2nzKyk03eSVk3d3aer3O/L+VzqOK0voU7PT3mkPz7G/j/wBqun8C+GbrVtUi022Te7Ovy/erC0eJJPn+7/F8v3q90/Z48MwtcjUprBnb7z7f4VX+LdX4TiJyp0pXPbw9P21WJT8Sa5b6Pp8Vgk0bvZp8/l/wrXk/j7xdc65Jt85hDG3yba6r49al/ZesXFjbTY85md12/dX+7XllxeJdRNt3Y+7t/irmwWFjpPc76uKlT/dn6ef8G/Pxavvh3458QajaWE00MGkfariTz9v+r+8qrTP2wtJ/aB/4KJftXTfFfx/4buJfC2jxNZaXpNnudLGz3f6zb97dI33q+e/+CP37QulfCL9qLSdP8Q3VnDp2oBrW8W++6yt/DX7R+HvFnwd/ZMste/aS+J3xK8JaL4L0pLjUPKSeMz3m1d0UMafxfN8tTSwl8zcXufreR43J6WSfW6qvUhH3f8j8Xf8AguDceHvhr45+H/7IXgPdHYeEfDMes65DG25f7Qul+Xd/tLGv/j1fDW/d8mzc392vU/2rv2mL/wDbA/af8fftIeIdPW2HjLxBNeWdqv8Ay72/3Yo/+ArtrzC4037O29/+AV+hYOjGhQjA/C8+zKpm+ZzxNSWrC3Z2X5+v8dTySf6xPl2L9yoY4/3ex3w396rG1/4NrfJt+aurlieSNXZuXYjfe+dmrY021eWFk2Lj+HbWNJN91Jkwu7/gNdj4P0lL5VQr8zfw0yJe6ZN5Yzww7/vvt/uVom4+x/DvVbn5d8dvub+Fq2te0V41/cpu/h+9WT4zP2X4S6mEh5by1bd/D81ZyKpnlEep/Z4mvZ5llmb7sbVd0+SZVa5cfeffuasKxtZrqUMseRW62+KFkz/wGg1+E7f4d+Ptb8J6tBrGg63cWN5ayq0Vxay7W3f/ABNfdn7MP/BU12mh8MftIaas8TMqReJLFfnVW+VfMj/2a/N+x1SaFldE2stdR4f8RTLCN/8Avbdm6plH+U1w+Iq09j90tB1vSvF3hu28YeDb+G/0q4+a3vLeVWVv9lv7rf7NSyTTR3O9zIiMm/cv8Nfkz+zX+1l8YP2edSe8+GfiFYra4+a8028XzbSb/aaP+Fq+pvhp/wAFWJrqaHTfip8K7X7NM/m3F9oNw0bbv91v4f4qX1iUdGj3cHmFCMfe0Z9htffavuQ/Lt/hbbWBqVj5ly+9F2fLsZpfmaofhz8X/AHxq0GLxF8NPE9vfpJ80Vm21ZYf95avSRvJCLZztk83c21Pl/3azrVos+owNRSipxkV9Nt92zybZY2b76r91q868cxovxPnijXj7XDgE9PlTivVNBtZvtSQfZpNy/Lub7teZePFeP4uyhhgi9t+q/7Kdq/X/Be39sY9L/oGqf8ApUD6fL6/tKko/wB1/ofmd/wUi0v/AIRv9qLxBd20OwX1lC33Nv3lr540rRrzU50jRGYv83ypX2p+3R8HfEnxs/a+m03R7Dzo7fSYV8uFN25v71dT8Gf+CZ+q6W0Oq+PYfscEn31+8yrX5VTgvtH4pm9aNPMJwXc+QvA/wR1vXJoXhs5m8xtrMqfdr2/wj8CbPwtYrea3/ooVW3s33t1fUXi7Sf2df2edGmRPJvJIYtqxyfI33fu/LXxh+0J+09N4o1Saw8PWy21uv/PP/wBBqpVI/ZPN9nOp6D/id8StN0WH+x9A2rt+Z5P4q8c17xJLqFyzu+//AHqx9W1q51SZnvJmZmb7rNSRq8j/AO03+392op+8b8vL7xN5010yuif8BpWh2wt3ZqmsbJI4w7uy/wC1U81qir02/N8jVRPxe8Zd1Dvfd977vzVJ85I3sodv4anmt4Vh2bF2/wDj1QvIki/+PPS+2VKMSNlO5v8Ae+8tWYV2zb3Vcf3lpkaom5U+633P4qtMx8pMIv8Ad/4FSXuy94Udje8Pyw3MCo/8P+xV6exTzNkJYLsX7tY/h9d1xsfcxb+61dPb2bpyibv7rK1HLGQ5PlM6xaSOTe7sqf3Wb71alncPHdfO7Mjfd/2agnsY2kV03SM33l/u0jW7rIH2Nt/h20fCTHmlA6/TptNmj2Pc7mX5nXZXlfxskha6RLbjc27bXVWupTWbbPOYGuF+Klx9ouYn3/71OPMVT+I5BPvCu88D332XQ5nd8FU/hrgq6jR5vsPh65ldP4dtEo8xrU2Od1Kd7q8kkk+9vre8G6bPNMjJ/wACZv4a5+CJ7ib/AHmr0Lwvpz2dj9p2fw/dojsKp8Jf1aVLa1CQvjctYyxwzN8/K0mtaptkaJPmLNurPt752Xe7/Lup/ZMeX7R0FvdQpCqJ/D8qN/FUsbSXUmdn/fVUrFXmVT5O2tyxtvs8fz/Lu/hWlGH2QlV5S7pOmpHbSuE/gavPJ9Wm8M+NHmhm2rv+Za9Ek1ISN9jS5UL/AHf4qwtH+BHxd+MXi6LQfhj8OtW1q+updsEOn2DSyTN/sqtVKIqcoyPpr9jj9obwj4HmvLDxhKba11ZI2S+VWdYXjD4VlVSSG3dexHvke2R/GH9lLVr+S8XUNCnuZmLyzNojF3Y9WLGLJPua6b9gz/g2G/4KA/GfTIdY+MOnQ+ANEuHV0k1yfbc+X/1xX5lr9Hvgl/wam/skeBraGf4nfGbxRr10sOydbHy7aNm/8eZq/TOHfFrOeHclp5WqFGrTptuPtIttczba0klu272vra9rGM8JzzumfmI3j/8AZhkh85xoLJ0DHRePp/qq9D8JeDR8UH07wZ4O8JHXF1ZY49M0iz08zfaFIBjVIQpyMAEDHGO2K/UyT/g2u/4J0NJbFIvFgSDbvj/tv/Wf+O/LXyx+wv4P0L4a/wDBV3RPAHhpHi0zQPG2s6fp6yuWZIIIbyJASepCqOe9fqPB/iZjc/y/Mq9TC0IfV6MqiUItKTSk7TvJ3jpsrdT7bhDCOng8wTe9Jr8JEHgz9gD/AIKeeE/CX/CKeBPhb4u0bRbhNz6Rp/iSG0gYEdGgW4UA+xXNPtv2Bf8Agp/4dvW16x+HXiuyuUT5ryDxZbxyBf8AeW5BxX6t/F39qv4b/CK3P9vazGsm/atfGn7U/wDwVv04eHLrRfh5f2s1wZZF3LL8zR7fu7f71fk1f6RuYUk1DL8K/wDuHL/5M8jA8JV8XZttL+vI+Ividpv7VWuasfhr8WPFXiHWLnT5g40nVPFP24QSYKhgpmdQcEjI7E1+nH/BOH4S6J8JP2ftD07xd8SYZ3RZZvsvmCNIHkkaRoypJzgsRnvjOBnFfj54i/a0m8J6hc/Ga5uYbwX11Ikscy/6THJu/irFs/8AgqT4ztbyO2sL+4jRnZkVW2sv+z/tV+ecb+MnEvHGVU8txOGo0qMJ8/LSi43lZpN80pbJva1763srfYYHg/AYWTlSqyUmrcztovKyR/R5B418HwKlomsW0ny9pFNc78TvC37O2seG5/EvxI0HQprSzj81ry6hQFQvo33q/Cr4S/8ABVHxnNcW1lrfiS4V5rqO3gj3MzNIzfw16/8AtOftvfEvwHotnpXjDUo5XhiW4TS7iJpFmbbujZl/2a/MqWb1KfuzpnYuEKcZc1Osz7y0f43fCLT/AAZe/B74ZWt14Z0zV3kS3ubOVnut0jfeVW+7ur83v2j/ANnj41/sMftTXfxd+MXiS38Z6P4gtW/4QjxBq1vtttNb+Lz4/wDn4VfurW5+xT+2xpXjjxY2sa3eMbi4ut3mMnzR/wC7/dr6p/aUvvg1+018DtX+AvjCdkttQ/fadqN7tlls7xfmjm/76/hrhp47nlKNZ/4fI+qw2AlgpxlhvhfxefzPz1174ueA9e1a88f+KvEk1zPqC/uLrULpnvL7/dX/AJZx/wCzXl9n8Rpv+FlQ6r4A84W0zsjMvyqy1698Cf8Agk74z0mbUfiB+118XdPs9Nsb2RLVtLl8+W8j3bo/L/hjXbUn7R3jz4LfDmzTQfgD8E76+TR4Ge41S8ibc3+03y1206PNFa83MdNbMqVGteH2e5Mmm/FTS5X1vxJqSwwMi+RC3ysyt/EzV86/tGfEzXrdryCz1hXdXZd0b7mVa9W1LxN4w+K2i2sPijxzefYJrJXSz01Vj2qy/L81ZGj/AAA+C0dxFc3+iXl+6/LE2qXrPub/AGlX71d8MjxMve0R8niOJI+1l7OVz4lvPiN4q+2LDC7YklZf3KMzSf8AfP8AFWjbt8TtcUPpvgzXLtZH+9b6XM3zf981+gPhXwj4A8Jag03hvwHoth5nzOtvYRqv+y3zV00etXMby+TqrRLt/wBXb/Km3+KvYpZXhqceWR5VbHY7Ee85H5wN8Ef2h9S8nUtK+D/iS88yX7sdlt2r/wACp/iL4S/tUaPH51/8EPFEUUe3fItluVW/h+61fo7NeXlzcD55pXjT+/taobi+vDCUS5uFXr5fm7f/AB6r+p4WJnGtjPszPy+vrj496PeJDeeFPEln/wBM/wCzZG+bd/u19Vfsp+J/GviPwLcR+MtKu7VrW6CW32yDY0i7eWx+Ar3bXPtLrNeQXLYb/W/PuZv+BVyFpuN7cyMWILLhn6nrzX6V4NYShDxMwVSO6VX/ANNTPpMhxWNjifZVJ8ylf8Fc8H8f6Vb6X4p1ee3yTJqcszMDkeYzN8v5EVxGpYM/z+Xhm/hr0b4uW0za3qDEzAG6bH93Ga87urWaaHyfJ+b+Dd/DX53xJH/jIsU/+ntT/wBLZz4uUnOXq/zMC+s3mco7sy7/AJ/k+as24s9rGzTzPm/h/irrxpe5lfYqj/po1TLoO1mSHan8btt3K3+zXBTqR2PnMRTlI5X+x08svv2bdv3qaunXMbG5mm3pv3J/FXeWPhl5YXmmtm2bN3yp92kbwjcrIf3Oz+L5l+9WntoRkc31WUoKSOMsrF0jhd3bLfNuVf8A0Kr9vo811MEhdtypu3bN22tubQEb5NjKGWpF059yO/y7vllVflpVKkJS94ujRnExV09/7/8AHtfzPu/8Bp/2F23zTJsRf4mSuh+x+X5UL2zbG+R2ZPu1Jb6Hc3DFPup91q4KlaEo8qPawuFn8Rg2envb7Psybov+ee/7v+7WlYwQsQwTD/Mvlr97/gVXLfQ4ZIfMdGX+H+6y/wC7RHbvbs88Ls6L8vlt8rNXBI+jwdOUR9vCW3zOjDbtZ2kf7tOuIXW1cuNw/jX+7Vux01JpvJ2b/LX51b+Kn3Gn+dZiHY0Qb5vlqeX7J7NGnzR0OaurF2mKImxFi+9J826siaz/AHgld5Ei/iWN/mautbSXaPZM6n+Gs/8Asl51dHeMBVbbXRRlA8PGYOXPzM4C11IzXDed8pb5k/urW5o+seYyo9+wH8Fcku9pN7vmTf8AJtT71La6k8fz71xX2EJfyn5BKPL8R6xpuvcFPOVlZF3+Y+3ctdz4f8Q2ybZkmZo2b5WWvDNC1nYyeT8/95pH+9/s13Gh+KnXc/n4ST76q/3WqK2Il8I6cep7HpmvJdSK+y4cs+3y4327f9qt/T7xNrJcozOzfw/xV5Z4f1y2lX/XNlX2/K1dZo+tM0eyG5YOrr5sleTiMRPWCPcwtP4ZHb2mzkzQs5mX+Fv9Wq1fhazaNd75ZnVU/i3Vzel6k74hS/8A3bff2/xV0ujzukZd3X5v4V/hrw8RufUYXXc1Ft0aGO2eTe7ff8xflWrs32beuEZF/hbZ/wCg1UgkEcYdH2/N8zbPvUv9oeYuxH8xl+Xatcso80PdPToylza/CZWpx2crN9mTj+Ld97/gVc3eW9nawsiTTZV/4vmZv/sa6HUrzyYVm3xn+KX/AL6rB1a8hVXmmlVmkl2p8v8AF/CtEf7x206kYGVdM8OLnzl3Mnz7vlZV/wBqszUJJ5rX5J98X92tS8+xpvmT55pPlf5t3/Aa56+kmkjdESRdv/LNaz66HqUcV7OZ2ukBIvARCsSq2koy3turgIJJmtTcwOvzP8m77u2u+0qRH+H7OCxX7HNyep+9XmU0bvtRH3fP97f92v3rxnhKeQ8Npf8AQJD/ANJpnVPMZYRp9zoLFprGFn+3rs8pWZtm1a1rWOG4zJvyyv8ALtf7q/3q5/TZvtMiQv8AMipt2tW5Z/uLlLx32wr/AAtX4DKM+blOCtmntomlYyPCG3jeqv8AIv3VZa0VsUmZ/nZWkT5P97/dqlZzQ3C+dC+NqbmVl/8AHau2sc0g2O6yLIny7k+Vf/sqXN7usjy5VOaRPb27x7NjruX+L+H/AIFSHT0m2wwpJhW2qrP8q/8AfVXrDTLm3gRNi/K3977y1qw2dtcRibZHvZP4v4a56dYytzfZMRtI+1Mr+TIFb7zb/wDx2thtLtmjj+zI2xflT5PutWla6L5bRvCilWfdtWtKGzkuLcW03yfP/q2f/wBmqeaMmk5Fxo+5zSObXTZvsYWZ/MG7ay7PvU29s4dNs2d9sSKrPLN975dtdCump88MKK6N9xWf7teeftReLD4F+Hd48Lq8twqwRRr/AHm/+xruoQlWxEYHNi5Rw+FlUfRHkln4k/4Trx7rGpbFW3tbVktfOuNy7dv3q+dfGmoTaB48a5SZQs0zI7Rvt+Wux8D+NE0PWNVsIEWJLi3VZZF+Zo683+L0iXX+nwo2/czK1fpdGn7OjGkfj1WpPEV5TcjM1jVJLy4vNGm+XvAv8O2sHS9chjmbTbybake7ZVG41aa8VLxOXX5XXd/drnteuJluH1KB8Kz7tu+tRR/lL/i6NzcS3ML7g38NcfdTPdKUf5TH9yt268Rf2lpOxNu9fuVzl1Huk3pu2f7VHvyHEv6PdJGrQ3L5Rk+6v8NVJLhIZsmZvl/hqKNvs7B9/wAtMnBlcuE20FlopDcfvkf5f7tVriNFZvKTaGqOKV4n21Z+1QyITs5quYPhKW35s05PvClk+VmSm1RXxCP901ImSy1G/wB008fIyf3anlCRr6e0a437l/vMtWZFh+dN7f738NVdNb5dm/duq1cyiSEp0VU/hrOUp/CZ+5zkEkkbbtn3qj85Gbp/wH+9SPOnl/u3yf8AZpi7Np+fbVFD92z7gxUVxGjSNIU/3/npzbNuzp/tU/anlsjv/wAC/vUE/aKkzfvOOPk/76qu/wB01Zmjzl/u7aqfwfjTj7pcYj7VtswOauTWs0kn3/vVTsXzcL8ma6RYEmtg+za+3alVGMxS5UYFwkiybJui1C0bL9zpWtdWqeWibMP/ABLVDy3iIx12/PUC5j27/gnAhH7W3h8t1+yX3/pLLXr3/BS34y/Fj4b/ABZ0LSvh/wDEbWNGtpvDommt9Ov3iR38+VdxCnk4AGfavI/+CcygftbeHc9fsl9/6SS12v8AwVjx/wALr8OFv+hWX/0pmr9zyfE4nBeCWLqUJuEvrS1i2nqqXVanFNqWNV+xyHwH/aF/aE8V+MV0/VfjJ4iuIlhZmSXVpSCdvpmvY7/4rfGmwjW5HjnVJUQZfF6/6814H+yppqXmvX135O6OG1Xd/e+Zq+gVs4W2xp92RG2K38VfiWN4mz6ElbGVf/Bk/wDM48Xyqpoj7g/4J8+LvCfxP8G2R8ceH4NSvA5jke5t1keRgvQ5HWvtfw98CvgvqGnxNc/CnQmnfh0TTo+P0r8q/wBiv4np8M/HEWgvNNFb3E+9I/u/N/F81fpz4b+PHw38G/De58VeP/GWn6Pp1nBuuta1KXbFbr/Erf3m/wBmvWwHFedTw1p4qpdf9PJf5nl1FUlU5YFzxx8B/g7pMLsPhxosA2ZVVsEDH9K+U/2v/jt+yr+yjo6X3xNk0iwvJ4WMGg29sr30hH3Ssajcqt/eavCv21v+C6Wr+Mrq/wDhj+xtYf2fZSRNa3XxC1aBvtd0v3Wayhb/AFa/7TfNX5V+O/FPibxX4uvdf8X+I7zV9Qmnbz7/AFG4aWWX/eZ6ipxNn1aVli6v/gyX+Z6uCy+75qjPrXx//wAFNdc+IurzHwjp0fhjSYnUWtvCA1zIP7zyDp/u1na/+0n428W6BHeWnxV1fTNQj5R7HVJFikX/AG0zw1fIO98ZD45qwup38YCJdvt/368+pmXEPtLxx1X/AMGT/wAz1FhoxleKPoXT/wBoD9pnWIT/AGD448VXmDhpIb2Zv61U1D47ftgC4a1tfE/jHOcKVmmb+teLab8RPGujqE0rxNdWwX7vlS7avD41fFvem34haplfu/6W3y1pDNs/hvjKv/gyf+Ztyp/ZX3H6PfCm58Z+KP2LTN45vLy41m88Makt1LfMTMzEzqu4nvt2j6Yr4+8KfB+2haKF7m1kmZv9THPG0i/7TKtfVXwJ1DX/ABH/AME+0vtTvZ7rULnwhqwM0jkyO2bkLz69BXxj8JfBOq+BfElt4qv3k+0w/N5e77395Wr9W8YfaV8myCc5Xk8LFtvVtuMLtvuzz6FRU/aWetz7S/Yv/Zfv/Fvi6yeHSvtFusvzbW+Zm/2a2P8AgsdqFtdftDeGP2afDvmNpfwt8Mq10u7cralefvG3f7Sx7Vr7X/4JY6H8Pbr4Tv8AtCaqkNto+j6XNf6lJt+WFbeNpJPm/wCA1+cnijXtT+NHxC8U/HLxI7Pf+MvENxq0rSfeWORv3Uf/AAGPbX4rTj7OhdnHTqPnlVkeN3ngWa+0/fCjKY9uxq4vUvPtbiazmdkVX2vu/ir6D1DSYVXYHZEX5UjrkfFXwoTxpH5EP7u5bau5fu7f71clbBxxf+I9LD42cPi+E4Dw7dQ2tv52yRP4fubq+uP2Q/Deg+JtBks33STSKrL8m3d/s18neNvh54q+Gd5BZ+IYG8u4T9xIv3GX/wCKr2z9iX4tJofxJ02zvJo5LJZW82OT5dvy/wDoNfF59g8RTpuB9TlGKoVKsXL4TU/ai/ZN8aeIvGH9o/D3Qbi+Zkbda26Mzf8AAa5T4M/8E5f2iviZ4nt7b/hANS03TWlX7RqWoReUsa/xMu771fob4V1azj0C38VaVcRm+a4ZZfsqfL97cvlt/u1u+LP2irDQ47zXviR4wkt9I03Tftk8zRfu4VVfur/tNXJgs1n7ONOEfePpa2V5fOXtec+Bf+Cxnhv4afsxeK/hf+zZ8DfDFnpl54b8KLq/iDWIVX7TdXlx8v7xv+A7q+M/iF8Y/in8ULK203x38QdU1WztW3W9pcXTNFH/ALq16B+1H8adb/ae+OPiT4161JIF1S4WLS4bj/WQ2cfyxL/3z83/AAKvJ5rU28hfZuTb8lfo+Gw0XShOovePhsRjKsas6dGTUH0Es7jawRH4/wB2r7SPJGA7b3/g21mQq8M2/d8rP86tVy3uETMz/Kn/ACy2128vunnSkS28myb98i71/i30s10NrP5NUdUZ4ZEvPlZPuvt/hqBbnaQjzMf760uYZr2snmSbH3K/91q9l+Degp/Zb6rcpt+Vdn+1Ximh3KXEyJMfuv8Adr3v4e+Rb+FxMJtz/wB5qfLzGcpFLxhZpDdvsRpUXdvjVvmriPi8ws/hvJZ/KryXC/Ktd74mk8yMvt2p/e/2q80+Nl0kfh6KL5t32hV3N91v71Eub7IoL3zgLKKHT7Vf3eX27qZNdJcfP/C3+1UdxIjRqibm3Lu/3aqx3AZv92o983j8RbhkRlZ9m0fx1qWN1ux5U3y/e/3aw47j+NHbG+r0Nwn2dE3baA97nOw0HXLm3uAEmZVb5dtdz4f1yFdsaRqTv2srfNXk+m3STTbAjBdv+sb7q1q/8JhDplwn2BGeRf8Al43fLuqvdKPZ9P8AFlz8P9Qj8Q23iG60qeGXzYri1naOT/gKr96uv1T/AIKkftSyaOmg+HvG1udq+V/al1YK9zt+7Xy42sXOpahLf6lfyS3DN/rJHrc8O2v22ZX+81R7GlKV2VRxOIofBKx6Bq37Q/7Q/iaZtS1740+JJZW/5537RLu/3Vr7L+BHxF8SW/7PmlfE3xHdTazqNjp095K17MS1yYZJCqMx5wQgXJ7V8H6tdWtisNtBcqWk++q19qfB9Fh/Y6jWZyqjw5qG44GVGZ8/lX6/4NU4wzbHJf8AQNU/9KgfacH4qtWxtdyk3+7lv6xOQuP2zvDFn8ZJPip4b8NzRLeRRxPZ3EXzW7fxf71dH8WP+Cgm3QWtbDclxNE29tnyt8vytXytJ4i0qzXZpqb9vG5vvf71UL64sNcVvt9t5is+394+2vyH2cYnwlaXtqvPLWRzPxi+PHiHx5qjzXl9JJ5m7f8AvflrzS5vri+m86R2J/hr2K8+EvgnWbX5JpLQqm3dH81YF18C9Y0+YPYOt7C3+qWNNrVUY+8HwwOGsdNubhRv+b5vvVtaf4f8yRcv92u20f4SaqrLbJpUxO/7qr91q1tN+EevPcNbx2DBv71b+zI9p/McTFpsNra/Ii4V6z9UmRV+R1WvTrj4FeP75fLsNKZg393+9WQ37NPxguZFR/BkjIzfPN5qrtX+9S5BRrRkedXMe1TvfLN/eqFVTn/vnbXsln+yD4tkk/4mviTSbBNqv5lxeq23/eq237N/wx0dvtPiH4tQv95mjsbfd93/AGqy5YfCXzfaR4psc/3dq/LV2zt3WP54WU/7Ve1aX8KfgCyqltealfTNKrRL5qqrR/xfL/er1P4f/sm+HvHV8mm+DPgteTNcS7Eur64ZlX5fmZv4VX/eq40+Y55Yjl3PlLSLe5hbf5GTv+9XW2du81uj/edl/hb71ffXh39mf9mb4Q6PPbeLfhjpvibxJ9n8q1jVma0s22/eb+81YWg/s2/DTVNY+2ar4YV3ZVaXTbOLYka/7P8As1XLTM/b1f5T4l+yvaxrv3Dc1MjiRbrY8O4/d3fw1+iUP7PPwKtbo2z/AAl02K3hZW3KjeY3y/MrV5t8WvgV8H/Mlu/CvgOFIlbbLIr0/Zlyrcvu8p8V6hYoreckLNt/u15t45mM15sHRa+7dD/Zl03XN7p4VWNPmZZvmVWrTu/2KfgtY2Ym13wxb3N20W54YWb5qXLHlCFbll8J+dWk2JurgJiui8QWM9loax7P9lttfeOk/sE/CvVNUhmTwfDZwSL/AM92Rfl/2q67w7+xP8AdJ86HUvAcepvv3RW9xKzL93/x6nCMf5gljJSlpE/NDw1pH2i6R50wiv8AN/s139xZXjWYs9MtriZ1X5Vt4mbdX6KWXwf+FHhqFE0H4N+H7OX5lVZLJZG/3vmrpPDPwjub6H7Xc6bp9nDbory+XbwxxW6/xMzKv3aUuWOpP1iVSZ+V0fwp+KniC8CaV8OteuVk+55OlyNu/wDHa+ov2Vv+CFf/AAUc/an0yPX/AIf/AACvrLTJuftmq3C26/8Aj1fof/wSY+Cz/wDBRb9py/8AD2l+cnwt8Cyq+rXkabV1KRW/1at/dZlr97vC3hHw94K8P23hnwtpMFlY2cQjtbWBNqRr6CuiNShQjdwvITWJxWkHaPc/nY+EH/Bod+2jqzw3fxL+LvhnRIpP9bHHK0zxr/wGvp/4ff8ABoF8GbfT4/8AhYf7S+sveNFtnfS9OVl/4D5lfs1swPljH4GuR+HPi9/G8Op+I4Jt1m2qTWthhfl8uFtrNu/2m3Vnic2nTpuUYRj6L/O4U8opuXNUnKXz/wArH5zfB3/g1P8A2Dvhx4ng13xx468UeKoIJAyafcNHbLJ/10ZPmavvX4Ffsn/sw/sv6RHpHwK+Cvh7w3DCm0XFjYL5zfWVvm/8er0jc3c1ka1bzXnyIjbPvNXxONzrEyd4HuYfC0o+6WrrxVZKjGGZWEf3m3VxGv8Axshs5pIIbmNpFbair/eri/j78QrfwLobusjIkcTO23+HbXyav7V3/CG/b/GetnfbNKrQW7JuaT+LatfPVsdmGIu3I+nweWUILmlHmPtPWPjVcaP4fXWtV1FbTzpFiiVm5aT/AGf71fkV8L/iI/w8/wCCiF58Rb66jZrLxrrU0srnarlvtQP0BLfrXtmm/tbeAPj94ulv/i1qt94ZttPv1ntfMRl/4DHXy7d3/hu0/ae1zUb7Fxpa+ItUfLHIkj3T7Sf0NfvHgxUqzyHiNT/6BJ/+k1D6zJ8NTo0a6jFK8X+TLn7fX7bt54k8VXdsl5NbPay/PCu7arN93/8Aar4q8YfFabWGW8m1Jre9mf8Ail+VvlrR/bi8babq3jee5s9RZ0kddkkdxuZVX7q7v9mvmjVvGk0l0yJc5C/d3V+M08LSlC6PJWIqUpcszsPG3jrWPFXljWLyQXML+Vu+6rL/ALX96uKuPEGvWEjQxw+dubbEyv8AMrUxfEH2hvJd1DyP8rSV9mfsd/8ABHzxd+0P+z4f2tviz8aNB+GHw4S5YWviTxLbtLPfhflb7NAv3lVvl3NW0MLSStI9KWMoRjF31Pnbw74R+Iug6LZ/EL+29N0prN1uLCS4v1Z/l+b5o619H/aY1X4xeJtXPj/xlcX+q3Uu9Fkl3Ky/d2r/AHa+mr79i/8A4I1+HrBf+E2/bk+InjL7KzLOug6dDaW03+7u3Mq1xnj74I/8EmvDscWt/BaHxRDeKrfZb661xpG8z+FmVa5JQy2Xxz947q1XHxpRUIWj5nI/Cfxd4q8I+KUfw9DIf4W8lNv3q/RD9iHXtNsdcitvjxpv9sahMm+10u8bbHa7vuSN/e+X+GvyQ1T4gal8NfGe+bW5r+2Vma1uN33l3fLur6r+Ff7cXhXxVr2meMUmaz1lbCO11L7ROqpIsa/Ky15mKwtOPvRReFzD937Pn/xH7YfDWz0nwt4rstW/4QzSr/QZv3U9g8XmNbq3/LRd33q1f2k/2RfAPxH00/EP4f2dr9ssoma40m9TbBeQsv7yP5f9mvkH9kP/AIKC/Ca/8PC78R+KYb4WaM0sfn7YlVfvbmb+Kpf2af8Agof4hvv2ltV8H+Jri9vfCeu6jI2mwPLtW3t9vyrH/erLC46UI8jiRmWUyrVVVpz+z9/kfAOuX2ifCX4i3nwZ1vWI47631SZ9Gs9nl7rVpG2qv97b92um0u8t1VIf4tzMjM38VeWf8HFfh/w5Z/t+WNt8Lbq609v+EZtdRtVtfla3aSRv/iab+zn4i8bap8MtMvPHepefcqyq8yrtZl/vNX6LhZVqmDjKfU/Mak6dLGTproz2qLU5+P8ASVk8xP3qrF92ry3k1qoPksHk+V137VWuVs9W+zzboSu3Zu8tU+8v97/ZrX026uZLrfvaRG2ruX+Fv9qrlHl1O+nW+wdXp+9o2h+0szfL93+L/Zp+5JJAknnRJJuZFb7y7W/iqhpa3ir5T7Y1jbc/8Tbf/iq2Psr3G3fPvX725krlqS5Tvp1PsmNrGmw3EMyb+G/iWuRurKSzuXDmT5mPDnvntXeTWv2WN9m3Kt8ism1V3fxNXL+LorWK9C2YITc2cSblzx92v0rwYd/EXB+lT/01M9vI3J5lTv5/kzxr4i6Ib7U7uEkoHLEqDgcn71cJceHUtWebfkM33W/hr3TxV4VN7a+a0bASKHJ/2d1ef+JvCqNM3+hqkf8AeZ6/L+J6kpcQ4yP/AE9qf+ls1q/xpX7v8zgE0lGV32fOr/M3/stWbXT7mE+ds3rtXcu//wBBrebQUMizTJ86ru+X+Jakj0Gbzt/k/Lt3ba+ejU5dDhqR5veQ3SdJhuFebyNrtt2fP96rS+H/AC1WZoVmC/dZX+9WvoOhpayOk3ludu5V/u1uaf4TMcRhm01Sjfw7tu3+LdSlV5ocxtTpy/lPOrzw/wCTF5zo22RvuqvzLVJtBuZJNj/M+z7zN95a9R1Dwj8yySIzBdzfN91qzpPCaNIkiIxH3dqp93+9TjWnL3WRKjLnsonCLodyrjy+qv8AxVdXTUt2CTQ5RV27o/mbdXSyeHYYwiP8zMu3b/E1WbPw7IFWb7S25V+dmSsZSh8Uj0sNTlzcpyseizSQtCiMq7/+Wi/d3VlXWl2cbO7/ACqv8Lfe213WqaXeMuya5XYvypI38Vc7q1vctJNtmjKqq72/2qzp1OY9yjHlloYJXy408v8Ah+ZP722pGuJo5tn2WRxs3fL96mX023Z/Ei/M7L/eqH7Z5x+020i/L9xd3zba1jKR69GnGQjTf6OEKSB/vbZP4Vpk1mkK/wDLNmb50/2qYs1tNCNiZWNNqbqTzIfOyj/dXav8TNWtP4jjx1PqeXarYPY3Don8Kfe2bWrFmkdt8aDB/wCedepeJvCvmKs+xV/i+X71cdrPhSazk85Nv7z/AGa+kp4jofi+Iw0o6nPaXcTWbb4UY/Jt2/71dZouoXTRh9+7au3aqf8AfVZEemzRMu/aGZa29JsZo5UtYUx91mb7v+9RUrHNRpcsveOt8PyQxqtzM7fN8u1W+7XbaTqWJPkdn3J97+KuA0uxeGNnjfzv4krq9Nvfs0Pnb8PH99VrzqnvS5uY9nCx9n7x3ei301nt3orf32rr9C1hLhV/1aLs/i+WvLrHUoZI0S28xTvVvmetqz16Xar3Lw7N+1t3yyM38NcVSjKUz1KOI5T0tdWtpoW8lJBHu+VlTau6o7rVEWb55tiN9xl/vLXJaX4gmaN3d22fdRo/mXdUOra69vdb/tPzfwNG/wAv+7trGOHn3O6OK5o+8dBq19Gsn75+W+VG+9urIvrx5DseZif7sf3d1Y114mc5+07W/hRf4qo/29bTb5EdnKv8m37q1UqMuX3zT65SjLQ0by++aWGb7yt87fd2rWRdXkMz537Sy/eZ/wCKoNS8QW0K7/Ok+b5fLb/0Ksu8vpmb7iu2/d83y0/qvuxKlmUUeq6C0k/w43eblmspsOQP9rmvMVjdmEMKLj5VlX+7XpPhlwPhaJC5IFhOST1/jrzyx3tKPkV0/ut8tfuPjGpLIuHEl/zCQ/8ASaZ3ZpiEqVCT6xT/ACNXS1+zqET5nZvmZvm+X+Gt2xsz5becjCON8o2/duasfS3RYfM+0qkv/PNvvNWrDLCsyQu7O0L/ADf3dzV/Pcqkuc82GK5ocpsaT+8keHpJHtZ/O+626tuzCXClPJZJWT+F/lWubtbya3VU8jO5tyVehuJPJ/4+WZ1b/drmrU/3vOduH5pSlI6G1uob6PZ50Z3ffVvvLtrWiurZdttNbbH3/wAXy/w1yEN+8kkWy227f4fu/NWtZ6s65TyVfb8qbn+7XFKnKPvROyn7szrLW4hk2wj5Ek/hZvu/7VasLW0zb4PnSNNv95q4iHVkhZnM+f73ybm/75rVtdVdXR4V4Xav/AacYx57m3N/MdPH8reS8ypGu7bJInzV8n/t6ePEvPE2m+A0dcWe28umVPm3fw19G6n4kTTrW5uby+VGWJnRvK+VdtfBHxa8YXnjDxpqnieaZm+2XTfe/hVflVf92vqeHsL7TF+1l9k+T4rx31fAxpR+2clpmvFfE1xamZYkuLdk3NWD4uuHkhe2fkxqqvVbxRffYNUS8875f9ml1TVIdQt/tj/P5y/Mtfe8v2j82+E88m1AafcPDcrwrttVflpl3B/aFr5Py7WX7y1L4s06PdK6bUXf8n+1WNp946/upvl2/KlPlQSlMz7q3urHd2RmpkkjzbpFdvmrW1S3+1Wp2P8AN975awnieBz6Uy4/CEgKBlxTJJE28VMpWQh/mP8AvVHPCit/8TSlIqPmR7fMzv8AvUwllb56e33zs+7Ss26P56UTQbTWV2bpTqKctgCpGj2hNn/fVR1LJJuVS6YK0yJblzT1/ds+/dt/hq55iLjZDlf4kqlZyJ5Owp81WtrrIQj/AHan4jIim+Vm/wDHaZJIm3Y7sp/u0sjOzb6Yzbmx/F/eqTQfHJwqJ/6DToIYWVqjSR4+j/dqTzn8obP+BVfoZy+KxHcbVjLtxWezbqs3zlhhvl/2aqnk5NL4jWmPgO2ZCP71dhb28clur7MfLXGhtrq5/hrs9JndrON0m3bk+61UTWKl1b7lyn/fVZslt97ZBuNdFcwusfmPDtrLuFfcdibaDH4fiPXP+CdUZj/aw0BXAP8Ao19tI/69Za7H/gq+qn40eHGIJx4YHA/6+Zq5X/gnnE6/tX+H3f8A59L7/wBJZa7L/gqmm/40eHe3/FMD5v8At4mr9lwH/Ji8X/2FL8qRg/8AfI+n+Zy37H+jvJoesar9mXYssaRSN97+9X0P4J03R7q087yfPuPvJ/D5deYfsg+A7aT4Nw6lczTRNqGqSbWjib94qr/er2XS9JttHX7NDYSSt5W7dJ/6Dur8FxFGUqtzgrc0q7scb401a50PxMr2G2B4/nRlb7tRftBfHzxz8btH0rwl4k1DZomiwK0Wkx/6uaZfvTSf3mqb4jaK8MKarqVmyNcM33lrz/Wr5I0SGzTG77y1hToe7bmLpxieeeMYbOC1mv0tlTy03LtTbXhV3I01y8rvuLOx3ete0/FRrm18Pzb5vmk++u+vFXh8v79ejh48sD1MPyqmMeNVUU5Y3kpT3/2fWpbeGRmDhcf+zVudHMyHyXLKnZq9E+DfwdvPHN8LmaBvs0LfOzL97/ZrO+Gvw51Lx54mttEs7OZ1kl3SyRr8qx/xNX2T8Jfgrf8AiDULb4dfDfT22RyxxXEirubb/eXb95qiXPKPLE4sZiZUz1v4W6Bb6T+yxH4d0dBGseg3sUKp0ViZun4mvk6TQX+2fYIUwscux5N25t275q+5fE3w/f4O/DrUPAcSSCTSdFlGJvvFzCZDn8WNfLnwt+GviHxprS2em6bIqM677pvlVf8Aar9m8V4t5Rw9f/oFj/6TTPKjU0cj6w+G/wAYNX+E/wDwSK8T/CLRHkTUfiF4th0G3bzV3R2O3zLuRf8AZ2qq/wDAq8Ek0m2sdL8iFFhWNFSLy0/hWvUvjF/YOg6L4Y+Hugozw+HdNk+0XDJu+0XUn3pP/Za80uI7zXJPJeFj8/3fu7mr8WjH20vdL9p0exz39izX14+yNnaR1Wu68N/DvTfDGi/8JJ4kh8lm/wBV5n97+81dJ8O/hfZ2Ni/iLW91vEvy26/7tcR8dviw8zP4b0p1XduX5f8Ax6lWxEMLS5Y/EKXK/Q8x+N3ii28a3klhDbedbR/OjbP/AEGvGLvUtS+H/ihP7Bm/eKm9f92vUraz8y6+0/Nll27ttcD8RNFe28QLfww7oZk2oypXku2I/ivmOqjWnT96J6B4I/4KKXvw68KDwn8QtE1K/aDdPZrZ3Hlp523au5q8p+Lv7Z3xU/aI+yaP4huY7LSrX5V0+z3L9ob+9K38Vcl8QNHS+0+Qwwtvj+ZN38X+7XncUz2dxt3421tgcqyylL2lOFpHtRx2Kr0uVyPT1ukn8v5GP+0qfLWdd2aNC29PmrH0PXJtvzu2G+XdW3HcJMrQ9vvbq9nmOSXPGZhXSPbr5LorfxJUc2yFU2btjf3a1dQhQR7HmVqzZInm/c+XsZXpy5hx194lhkmmj+yzJ8jfLWXNvs7h4XnUlW27v9mrLB47hfOf5VenapbvfW/nBF8yH/x6iWxcSx4XZFut7oq/PX0N4XkL+C7a6+0svmLuaNk+61fOHhu4f7ZG+z5m+XbX0Bodx5fgdZnfd5fzRR0RlymVSJpXkL6hYyo7r+7Xd9371eOfHiTy9Jtrbfs/f7mhr0iPxM62YT+KRN25a8Z+M2sXmoalFHN9xXZkpSJw8feOSjvJmjKO9RvJ82xxTKOCKfMjq5R63DxR7A9Wobh5I/nm2p/s1QTp+NPaT+D+7THyo1G1J/uJ8sO37qUsNw8u1Efhv4f4qzF+Y/J96r1jNNHJ+5g8x2/u/wB6szLlOkhj03TbUXl47E/wr/E1amk+Orm482HQ9HVVX5WauRnjaE79avMOv/LFW3NUkPirWBYvpWnzfZrVv9bHH/FV/CHxHb3V9pWiyJf+JL3zb+SLclrCu7yf96vt74LaiLz9hldSMW0N4W1Ntuc9Dcfma/OW11D5lcI3/oVfoZ8B3B/4J9I4XA/4RHVsAf71zX654N/8jjHf9g1T/wBKgfYcEpLGYhL/AJ9S/NHxXpusPdXzoj8yfc21ryLpmn7H1jW22N96GFNzLXAJqdyrDyXk/efKixpuauw0JvCvguGLW/HkK395J/qNB3/Krf3pm/8AZa/IvhPi+U6nwrpOuasq3Oiab9mtPNVP7S1K42r/ALy13eoX3wl+HLQ2Gq+Kpta1hdz3Cr+7trdf7qr/AMtK8M8RfFTxb4w1KOa/vMW1u6/YrGH5YrdV+6qrXOX2ralcXz3N5eM8kjbmap5plfF8R9J2v7QHgy3meG2hj+zq25o1+8zVUvP2sNH0Ni+m+G7eVm/ik+avnGK6mjjLo/zM1Ps7O81OZHSGRi38WynyzlpJhywPbNe/bE8Z3CyW2lTtbJJ91Y/l/wCA1xV98dPiLr0vkvqsy7vv7X/8dp3gn4E+PPGd5HZ6bokxaTbt+Wvqr9nH/gl74n8TTRX/AIw8vTrdZVaXzvm3L/Ftq/Y296RjKtTpy92J8v8AhvRfiX8QrxNKsIby7Mj42x7m3bq+n/2e/wDglb8afikyX+vaDdafbLt837VE25q+8/hD+zT8Af2S/Bdz4217+y7e2s7ffLeahtjkb5vvLur5o/bE/wCCziWsN54D/Zmf7HFNuin1Bn8zzP8Aajp81KHwmcVUre9ex6Rpf7Gv7LX7Mc1s/wATtY0+81WS33Raasqs/wB77rN/DXfah4g0258OjRPDFtZ6PZXHzxR6XF96Nl+6zfxV+Xvw5+IniTxZ8QLn4heOdZuL+9m+Z5rqVpK99b9pzXVsYLC8muES1i2xSK3y1HPVI9j7x9dQ+BfhFpOi/wBveKvFUKf3odu6Vv8AgVcr4m/aM+Ang2NU8Nw3V3NuVNzbf++v92vib4tftRa3qEkltYXmfk2xSM3/ALLXEWnjrX/Fmool5eMfM+bc396sv38pGvs48p90t+098H9SuHFzY3lt95Uh/d/vP4vlqfxB+0l+zfqGivYJo91EFiXz1aJd3/Af73zV8cW+nvfN/wAtMbfvK1al5pdh4d0s3Mzx/c2p5jfxVpH2sY3bJ5YylyntV5+014DXW7iz8JeHry20tdyxSXnyu3/AaJP2lvDcMcKeH9NkaeFtss00W7/K182trH9sXvlaVN/s7v71dd4R+G/iTxBNDbfvNjN8/wAv3t3+1R7OUve5glyU9j27/hfmpapH9ms4VQru3Rxr/rNzVt+E9Q8beJmltrCFlmkXduk3MsfzVH8N/wBnn+zbUfbLDY6qru275lWvfvAeh+G9Ls/s1hFbonlR/wCkN97dWkacacfiMZVPabI5X4ffBvWL6N9b1t/KVZVV5JPmaT+9trwD/gqB+1Y+gIv7HvwguFgn1JY5/Ft9Zj97a2/8Nv8A7zfeavo79qf9pLQfgD8HNT+J1/rEYuLVNul2MMH/AB9XDfLHGv8A7NX5l/AHQ9Y+Knxqi8VeObuafUNY1uO41Sb/AGpJPu/7q7ttRDkky6dH2ceaR/TV/wAG737KWnfsyf8ABO7w5evp3k6n4ukbVL12TDNH92Ef98/N+NfeJJOMV5t+ypYWHhv4BeEvDFmipFp+g28Cqv8AsxrXo5mRByRV1+f2judWFlTVBWMD4r+JIvBfww8Q+LWmMX9n6NcXCyL/AAssbFf/AB7Fct+zZpLeHv2fvClrdERzyaJHdXW7/npN+8Zv++mrj/8AgpR8QJfAX7CfxT8UaXtknsfB9xIqbu33f8a/L/xj+3n+3H8WvAOg+GPBetR6FpS6Xaosml3X79Y/JVVX/gVeHnFSVPDKNviPUy+nTxdVx57WP198X/HD4QfD6DzvGXxD0qw/uCa9VS1fI/7Tf/BeP9kz4La23gnwPb33ivVt7JIthF+4hZf7zV+bt98HfGHiC4+3/GP4tahNbx7mltbi8Zm8yuT03xl+yj8LZLzVZvAd14n1j7R8+5WjRf73/Aq+Tti37spRXotfvPo8PgcvpyvJSl+CPp/4lf8ABSP4oftD3R1O+0GLR9Ml3fZbGA/M392uE8VfGqaHw69tbaIuoXUkqqkc33lb+9/31Xzf46/a+8c65rESeCfhXZ6RayOsEUP3njb7qt/3zXo/g3xlf+GPCt9q/iqa1h1VYIWt45vn2xt/FURw0YbHqwxMKn7uJYsfG3jDxBrmoQ+LbaGGJUjfzpk2xw/7rV5n8evHmofD7w14j8c+FdRjuJbWSRra6kbCzI8uwsT/ALSsefeuW1L4u63481zxP4J0bW7i6abbO8ccvzbW+XatdL4p+EGq/ETwNcfBy0jMd3cWi2uy5l2FWiwSGY9PuHOa/ePBqEHkvEdtnhJf+k1D6DLZtUa1t1F/qfJnwr8A/EL9sP44aH8H/BOlySaz4q1dbPTreP8A1bSN95pG/hVV+Zm/2a+5vjb/AMENf2VP2ara20P45ftT+M9f1w26/wBo2PgTwzC9vYt/Eu5m3SbW+XdXhf7HPgv4i/sD/tYWXxc8S3Nm9vpuh6klncWsqyNa3EkLLE23+Jqwfjd+3V4w+KHjy38eX/iS8iuVsI4vmuNqqy/e/wB7c33q/F6mIlh4eypRPHwuEo1H9YxMv+3T17wb/wAEj/8Agnf8UL6Sztf+CkHiDRrlv+XDWvBsKyR7v4fvfer374//ALbnwr8A+Ebj9kPwlqMepeHvhjpen6To1v8AZVjguI1j/eTeX/eZvmr80r74669qnio6xDeSRSq6sm19qs1c78Xviff+LPFUnip9v2+6iWK9k3f6zavy7q4KtXGYmPJPb+tz0o1snw0ZTo/F/e/Q7H9qTxH8Or7W5db8H6Ja6bNMzNLHZptX5v4dv3a8cj8TX8iukMzJ8m1dr/M1Mg0/VdfmSGYKjbt3zLXT+HfgD8SPFFu15pRhwv8Ae+X+KuqjBOHLPc+YxWZYipVk18Jm6B4F8beLozczQTLZ7lTzpPurXs3wh/ZHspIf7e1LWFuIo4v3sK/drj9N+CvxR0XUIvC+rePIdN8xt/ls25W/utX0/wDsG/sTyftIeMtY+Hmv/tE6xpt5ZwbUutLVfL8xl+Xd/u1x4xVoJy5oqJlhac8RU9xSuY/xSm8GfD34f2WjeErOx8P20MWy6mbdumb+9u/3q1P2Rf2rIdN8bWXizxpq9nFo/huLeuqK27azf7P+1X2f8M/2SfgJ+wva6d4c+KOveGfG+p61p10mt6l8QLJXgtY925bhVZv3bKqtX5a/tofGD4Y/Gz9qrxn4n+COh6bp3g9bpdO0aHTbXyILiOH5WuFj/wBpt22uLJcLHNcTOlf4ftG+PzbGZTaUv/ATf/a2+Pk37c37X2u/Hia28q0uorew0aHbtZrO3+VWb/aZt1el+F7pNN023htodsUcGxFj/h214Z8GRZ2t41zPtaaNP3W5Pu163oN9H5azI6rt+Xy1/vV+mU6P1eEYI+Kp4iWKryqz3kegaTvureL9+qps2zqsX3v+BV1mlTPblEtoWVPupt+6tcB4f1qOJ0R7lYV+95ddto+qblE6bWaRP3rLL8u2s6kOvMehTrHbaTC7XG93Zt3313/L/vVtwt9nsxv+9t+Td/drldL1hJrcpM+14/7r/wDoVXjrXmb0e2aJF27FZ9yyVwVI856dHEc0NDQuLn9ym+2Vzt/esv8Ae/hWuV8YrslhGQSWkJwMckittdSRWdHTG751jVvl/wB6sLxbc/aDbndkgOchMDGRX6P4MXj4l4NPtV/9NTPpuHpKWYQt5/kyafRjf6LAix5Vo0Z2rldc8PpdJsSFU2/Kzb91dtpk62+lQlZWwYhvRl9v4ar6hp7pdfZoZlf5V8plTb/wGvyriv8A5KTGX/5+1P8A0tnRzXqzXm/zPNrvwfZyTM6bWb/2X+9TY9Bfb50KSEb9v3PvV6ND4bRrhv3Kt5m3fI38NOfw7PHMr20LLt/2PvV8pUxHNLlbOiNE5PRfC9sk3nQo0x/jVovlWuls9F+1W6O8Kqn3dzfK1bGj+G9rK8tzJtX5W+fctdfpvh+GS3+zfZY32vuRpP4fl+7WMq0Y7nZGjLocC3gndCdnlhG+bzPvL/wGs/UPClztaaaFkdf4lT5a9X/seHyV8m2V/LT/AFa/LVW48NpNH5m+N42f5/LesJ4qXwhUwsYnjV54XdWZEs4Wf/llI3/sq1QuNJmWPzvs67W/8er1XVPDtt9oYpZrhV2vIvzbmrkfEelvbsyI/wAi/fatadbmlymtOjKMOaRw99Z+ZI0KJH+7+aVWT5V/+KrjtatUDTIkKqG3NL5cW2u/1eZ41+R9kSv8y7PmauM8SfaW8wJwJP4mT7rV20ZHVh5csveOE1aO2hw/ysyttT5m+b/gNZrRusz5RdjfLub+GtrWEmjtGd4t7N8q7flWueW4eNvLwrfO21lfctdq5pR909ijJe6TfIsbQu6j/a27dtSwzPNGJlhXZGnzSb/utVSTzpl/ffKi/K+3+KrdvvaFNiR/e/hq483UjFRjKMjqdY8PoY5JHhkT/gG7c392uR1jwrDJsT5d7fLtr1TWNNjbfbJcyIvm7lVW3LXOaxos0bMnk+YkPzfu/wD2aumjWlL4j8wxGHhc8xuPDsMjB0+YK+2Xcn92ren28McK7NoZn+Rm+8y10l7p81xJ8+52jTbL+621Xt7OGGYwvbLjflW/irT6x7vxHH9XlGr7pDZ2Mit+5h3hvm21pWtjNuXfFkN9/a/yrU6x+TZhETa0fzfL/Fu/vVJBNMLeI/Zmf+H7lKNTmjZGkafL8RNHshbyYdv7xv7v3aFnhhYI83y7927+7Wa3nRy+TDOx+f72/wC7RcXX753R1dP49v3du2tKcb+8pGcqkTej1Z49vkps/wBrzflZaoalrlyoZ5pmZf4d1YP9oTN8kyM8rIq/K/y/7NF5ceSwT7T935fu1tGMY6GUqkuX4i+2sT3jM81z8v8ACu/duapI9SeRXuXuVTy/uq38Vc62pJHMfLdaZ/aiKrwzeW8W1XSOtJU+aFjm9tOMjdk1J1X7U94vzN+6jk2/N/s1BHMl1I9zNMzOv3tr1lzXiXEiTXK703fuvl3bWqeG6SOQvvVU+Vfl/hrKpyqBEcRKUdT3Pwgqf8KlRYxgfYJwMfV685t1dUXUk2v8/wC6jZdqrXongxv+LQo/X/QLg8fV68xhu4ZP3szsy71X92tfsHjK5/2Jw64/9Akf/SaZ9fnDTw+Fv/IvyR0+nybpo7bZmVvl2yf+hbquyXSLN5LzLu37lrmLfUn+1jyZmlbYy/N8q1fm1D5v3Ykyv8O3dX87Vuf29+XQ5KHJKJuw6m7r53lLGy/L8z1I1x9oVIUuVRtn73zH+ZlrmZNQ8yYJ9+Jm+ba+1lq39rmZv3x3rH/49WXLKUeY9WnW5fdR09vq1tDDCnzSv/e/vf7VWrHXIYX37d+75dv97/armY9Q2xxJ9xv4G3/Kv96pLfWkh/c3LeUG2sjf7X/Aay9jPp8J2RqX6nUtrky/JZwxqv3vMVvmWrWna08bF0mUhovmZvmrkI7rzJv3j7wy/My/KtWo9T8m4CIn8H8NbqnzbE88lIk+Nnjx9B8A3lzDeNHLJB5SNH8zfNXxtrV08MjfOzp975vvV7J+0J40m1DWk8PQv5dvbxbpdv3v96vGdWZJGbuq7v8AeZf9qvvcjw/1fC80vtH5pxLjPreO5Y7ROV8WxfaLUps37l+7XL6PrG1WsJpGUfd2/wB2un15d2770q+Vt3fd21wfiCOa3vGuIX5/javaj7x4FP3SzrUz3jGF+K5a8t3gm+TcwrdtbxNThVPmR1/i/vVT1axm8tkR927/AL6olErmM61vk8wQu+f9mpdQt0ulD2yKrf7NZMy3MM3z8H+7UllfeSzbn6/3qfwj5SOSOW3kKFv96nrsmj+T5WWrkkCXy70dfuVmyRvby7G6rU/EUPlhEZ2VE/3jU8bJPD84+ZagZXR/n+Wq5Rx3Eoo8zeaKocRkf3x9anm56dqiC7WWpJt7S+1TyhIu2EZjkTZt+b79XJF2qdnX/wAdas+zDsv3K0RvZN833f7tTAkgmkfazyJ81VfM+b5Eqe+Xg/O2P7rPUKqnlr8n/fVP+8T8Q9XT7mz7tO8xF3b3/wBxagj2LN9/+CpJmhxvzupBIq3Tb2AFRs3lr70s33/wqP761US4/COrr/C8if2fEjov3PvNXHq2eDXV+E2RtPEaP8/+1T5kFQ05pvMZvn+VvlqjNDvZsfKPu1dmVI5/nG7bVSdvN27H3bd1RL3dDH3JHrv/AAT4iCftW6G+/cTbXv8A6SyV2H/BU0MfjR4dIUn/AIpkdP8Ar4mrkv8Agn2f+MqtBHX/AEW95/7dZK7f/gp1bC9+Nnhy1Emxn8ORorf711KK/asu/wCTGYv/ALCl+VI5J/72vQ9U/Z58Pw6L8C/D1mEuE8y189o5P9quj1DULbTVed03Iy/xPu3f7tPi/wCJT4V07R7OZW+y6Xb2/lr8vzLGu6uL8dahc6fp+HdmmmfZuX/lnX4HUlOUpNHn+9KfMcx8QPFd5rmoNDCGeGH5UaR//Za5ePSUt7eS/vNv+wsj/erUu5kt1k+0vtdm/u0uk+Bdc8XTp+5mwybkX+FqujRn8ZtGpyv3jxD42NH5aWcybGml3bV/hWvL9Ut0VTs+Xc9eiftBW/2f4mT6Jbah5v2GCOJ9v3Vk2/NXAXVjeTR7ztdl/hWu2nGfKenT+Ey1QtWx4d0G81u+hs7CGR5Zm2RL/eb+6tVrHS5vMCOrKzfd+Svt/wD4Jw/sl3Orf8Xv8VWCtBb3HlaNayRbt0n/AD2rojR5jPFYj2MTW/ZV/ZD8SaTo9ho9hYSS67qksf2hV+bbH/zz/wDiq/WP4K/sP/D39kP4Hv4w8YQxjV7iJn3Kv+rbb5jKrf7Ndl/wTz/YRtvBOk/8Lo+LVh9nubhGewhvItrRx7fl/wC+q8p/4KVftUal8SvFMPwz8Hzf8S2ziki3WbKqxt91v++q0rcuGhdfEfPSqSxHvTPm74n67a+Ptf1jWTGTBqDSDbI2SU27OT9BXHQ2KaTp/wDZulafDbwtAqu0aqu7b/tVsW9tFaaZ9nDllWM5Zuc9c1xPjLXftFydN0e/k/eJudmT5Vr9Z8WIueS8Pt/9AsP/AEmB1RheJzMnnaheN50Nw8sm6N2+98u6u9+G3wuh+XWNbTZEvywLI3zf981B8P8Awe8khurrcWjbbtb5f++f7y1r/ETxtZ+D9JbyZleZovkWP7y1+J1sRSwVK4SOf/aS+IVh4b0u30HSrlo5mfZKsdfNl5Nc6tNJ5yMXb52krqfHHiy68TapLqV5czM0kv3W+7HXMyR+ZJvR2RN25o1r52piPrEuaQ+WUoxGTf8AEo0z7a/yiRNqN/C396vO/GGvQ+c0Oxs/wKr1v/ELxgkcaaVpiSOzfKq7vlWvPtaZAzO+7zm+bbvq6Me5tD4TJuLf7RM6O+5pN3ys1cB4/wBBfStU81E+ST+Ff4Wr0SLyvtnnTblH8FYPim6ttShmtnTO5Pkb+7XqYeUoSO+jLl944Kxu3t22/wB2t2x1TzF8nr/Fu31zdxH9mlZD/C9W7G4/g3/NXqfYOw6aW6hk3R/eb+JVqBpHbbvHP3ty1V+1P1R03qn3tlWrX5UV3dWf/drQzj7pG0b3HyOjD+41EPnN987uzf7tWmj+X7/Kv/DTZLPbcGbewb/ZqdfhHKXUr6bZvY6wsKchvmir2/R7zyPh6jv823au7Z/s15NNpPnWcV+isz2/y7l+9tr0m1kh/wCFftHMinbKuxv7rU/fIkY11fT28Oyb7uyvKviBM82ulN/3Vr0PVL5P3rzPuG/5d38NeXeILh7rWbiV33fPt3VBpRKVFFIrbqr4jYFXApaKKoCRY0Xl3x/srU6ahc7fs9nujVuy1X8z5XcnJ/2qWO6mVfkpfELlRKLG8lk3ujf9dGqRlhhUpczMf9mOq8l5cyffmZh/dqJiWbdml/dFyls6htXybZML/F/tV+iX7PTNN/wTojPUnwdq+P8Avq5r85beF7iQIn/Amr9HP2f0WD/gnWiK/C+DtX+b/gVzX7B4O/8AI3x3/YNU/wDSoH2HBkUsZXt/z6l+cT4GtZodFhZ4Zle62/NJ/wA8/wDdrNlvXuJDNNNudvmdmf71QtM7D7/C0kbJJ8ju3zfcr8dmfFe/9o3NJ+aze8mh2hvlWo7WxudSvFhh+Z5H+7VqSFI9Hhtkmw/8a16Z+zn4M0S68SR3/iRI0gh+d2kfau2nGIjY+Af7F/xF+L18JtN8N3D26/62bym2r/tNX0Ev7Mv7OXwJhhtvH/xCsbrUlVfNsbfayxt/dZq439oD9ve/8I+B3+HXwfmj0mO43JLJYuys0O35VavkiTxtr2tak+pXmpSPPI/zzMzNuolUlKPukSp825+kHw/+OnwD+HccM2m2sN1cNLul27dqrWp4u/4Kg+Hvh3o7zeGNNjiuG3M9rIisq/3a/OJvG1xptiYYbmRX+8+2uY1nX7y/kV5ppC/97f8Aw1lKM6m8hxpwUT239p79ub4tftEapN/wlXjC+ubbzWX7PJLtRV/hXateP6WlzfXS/LisiGHzm2Abmauo8N6b5MyNMjKG+WtIxjEfuRPTvA7WtnpG/ftK/f8Al+9TvEHiy/WMw280ixfd3bvl21V0Ev5K20L79v3Nv8NXrHwjqWs3z2yQsxk+9troMvf5zC03RNS1S63+W0hZ/k3fNXrXgn4bPb26zXW0Ns3bv7tdR8Kf2f7mGz/t7VbPEUarsZnrttc0fStH02TY8YMabVXZU80Sebm+E4q3hs9JhFy8O5I13M275mb/AGa8v8eeKL/xVrn2LTd3l72/d10fxK8XfaJ/sdttHzbdsbVT+H+g6Da3D63r1zHiT5kj+981R7TmJjRnH3j0P9nv4Pw+IFhl1VIbdYf3sv2ivprwfpfgDwjpkUO+H7Zubzd3/jtfJN58eLDw+zw2dyscX3d3+z/drB1T9pPWLqRUfWG8hX3bt+1qy9p9k19jzR94/QlfGHhtpt6bYopPlRfN+78v3v8AdrA8UfETStOZ7mw1ViYWVkVZflX5fvV+f2oftVa3bvvtNYmYxp/z1rDvP2ovG2oRyo+pSbG3ebt+61KPNLRkxo8p1n7aXxO1P4v/ABWtPB8OqtPp+h/v5Y1ZvLa4k/8AiVrqP2P7Gz034jaVf+R80OpW+5WXd8u75mrwrwGz65NLqty6vNNO0r7q91+CrfYdSS8s/leGWNk2tt3Mtc1Sp7OrE58R/Kf0x/sl/tFWGp/DvSLO9uWdo7VV87d95dtez33xt8N2dqlzM+5W4VVb5mr8nP2M/jxef8Ivav8AbNm2Jf3ay/dWvpi1+Jl5qkYs/t7NF5XySL8u6vbpyhUhdxPM5p0/d5juf+CiPxPs/ij+xx8XPBPh6xk2XHgHUAkjJ96ZY933v+A1+SnwB+OtnN8G9A1u5v4UH9g26SqrbpGZV2/NX6Z3lm/irw/rfhjUrzzINU0a6snVn3LJ50LLu2/8Cr8Avhv8QvEPhXQ9S+F2pTNDd+GdevNLuo1+XcsczKtfO8Rx5sMpRXwn0XDtaNKrI+vfiB8etKufOhhud+75/Mb73+9Xifi74kW2rTzXNntiaSX/AIE1ebar4qvL682fatm5dyfPXO33iS4t7r7T/aSokKbdqpur4WVacj7KnjIy0Pa/C98+rXiarretxxtHu+98q7a4342fHy51LxE9homqsbaO3WBfm+9XmmqfELXplWztr+OFG+//AHq53Ut8zM9y7Ft3ztv+9WtOVWUbSIljIxj7h75+wL4ihs/jxeXOsfZ2ims1l86T7u5W+7Xvnxh8ZjTtL13x1AuAZpLlVRuAHkzjPp835V+f+l+INe8N6out6DfyWlzD8rtvb94v91q+wfiXrMkn7LTa5ekO82g2UkpPcuYs/qa/e/ByMlk3Eb6fVJ/+k1D6XhnG06mFxClvGN36WZ4j4y+LHifxZJse/mKKvyeZL/D/AHa5+38B+Cdd8C6xqupaxdQ67avHLpFvb7fLkX/losm7/gNYl14ks5I9iTbFX7yr95ql0W6triORJpv9Z8yLH/8AFV+IfBq5HL7alWlrK5w2pXlzY3DGF/49u1vvbqfpcmpahJ5E1nvf73y/xNXT+NPh/o6TLeaPqXnSsm+eFv4WrL0fXE8O3EdzNb7drr95K6ZKlOHu+8ePUUva8sixcW+uaTb/AGx9BvNv/PRbdm2/8BroPDf7Qn/CO2/9lfb5Eb+7cIy17T+zz8fvDf8Ab1tD4h0e3lRn8po5ol/eLXvWvSfsZ6feRXfjz4XaTqVtJ8zqsSxNG38O1lrzY1MJU9ypeMkbU8PWTvB80T458NzeLfjlrlrB4EeTU9RkfZFb2aM7f981+mH/AATC/wCCdH/BQH4RTXHxG1n4J6e9tqEvm2q3mvQwSs395v8AZr2//gln8Uv2Z/Dnjy3+Hvw7+Hnh3RzfJJcNeWtnD5/lqv8AFI3zfer7rn+Knhu1uv7Nt4Y2j3fLJCqqq1yYh4Tlt0PbwuGxeElzw3Pxn/4OCfhD+0l8HPAXgnx18XfFuiyv48164sL7R9DRvKsYYYd0cPm/xN/6FX5h6LCkLGCBNkW9dsa/w1+qP/Bzr+2J4P8AiX4q+H37HPhKaO4ufCt7J4h8UNCys1vI0flxQt/tMvzV+VOn3kDats+XEn8Sr/47X2mQYOhhMDH2ceW+vqfnud1KtTMJqcuY9G+HupTabDLB5ylmfdub+7XceHfFybfk3LtT7zN8rNXk2m6hJaqyIm1WT7y/w1f8M+MMRpveQur/AHl/hWvaqfAedR933T6H0XxVDuSZ3Xatvt+ZN3/Aq63T/Eb2scU32lVfZu2qvyr/AHa8F0LxIt1dIiTfMu35Weu20fxnN5LJ57bl+V91c0eY64ylGR7PY+IIV3+VuDSLulZW2qzVqw61Db27v9tkzH/qoZJdy15N4b8VI9uYZvk/6afwtXR2+sp8n77O5FrGUftHXh6x31hrlzbzF3m+6v8AF97c1Lql3HdJEVO1lB3Rf3Olcxa63N9+Z13t97/ZrT0+6a7DydQDgMepxkV+i+Dcb+I+Db7Vf/TUz67hef8AwqQj6/kzo7K7lNpG8gZhEgXy1bkr7VdjmtpE+0+dIrsn8XzN/vVzNpq9ukjQRSEGP75WTGW/u1a03XEYzJDPHu3/AHWf+L/er8p4tt/b+M5/+ftT/wBLZ0KpKOKm13f5nV2dun2MpDN975v96r0drNHh/wDWN8qvCzfKtY1jrCWqql593Z87L83zVr2uoJ+5SEs4m+avh61OXNzcp7OHrcxu6bY2s376G2U7v96tnT7e5jj+RF+ZmbdJ/C3+1XPQ6sm1PJjYN/d3/LWhDrx+0JbQzKGZWZ7ff/7NXnSlKU+WR3+05jaaJ1mFzCn8Py/3Vaqt8u3dMgUbk/u7arTa4kLMiOpdvuqvzbaz9S162tY2mubloyz7WaR/++ajl5dipVOb3WQaps+xzXmyRPMXY6r/ABVx+uSPNC1t/AsStWvrNxc3CunnSOPKV3Xzf4q5m+1J03ec+G/5ZMr/APoVdMYy925cZfZOX8TbJJlhxIxb+Jv71cV4ksZmhkeaZv3fzO1d9qUcxm3p87/wf/FVxHiaSF43h8n5t3zfN96vVo/vdjGM+WZ534hZ5N+x2Td8yL96sKS3mhDwj+FN27Z8u6up1uBJbpdiNCqpt/vbqx7uOOP93Cm0/wAfl1304np08Ry6lGO3ePdPN5hP+1/FWhp+n+cf9Svy/wB1vu/7VNWR4YQ6WzKn+18ytWjpMfltD8+yKZPnbb8kn/Aq09/4i62JjytHockKQ4hSdZWb/lo1ZGoQySM9s+5m27vM2V0l1YuJGj+4n3t1ZOo2ryW7vchjt+5tb7tT7sZe6fE1PeOYuNNe4VXebLN/yzX5vlqhFp80amYOreZ/d+8tbC21wrIlzNMsW/5ZNvzMtFro8LXUPk2EmJEYsu/5l/3lrblhGXvHPKPu3iUrOzmuI/kRdyv+93fNVn+zXkhim3r/ALS/drasdBe3jldH3MzfN5abVWra+H4bqY74Y/l+8zS+Wq1nzRfwmdSjKJyVxpaRrJ+5be3/AC33/LurBvNPeGGP5M7VbYv8LV3+oaDDGfOSGRiyN/qW+Vq5TVLN41be7A/M3zP81dtPljDQ4KkeX4jkbyZ4WXztqLu2/L/DtqlqWouq/uXz/Cm5v4as6sba3aWaZJG3J/vVzepalNCzJsUt/Bu+7XVTjKUos8mtU5fdLv25E2uk3K/Nu3/eok1KFwdgZpNv3lrnm1rbv2JtT7u3ZV+3uplm2O+dybt23+H+7XdKny+8c/NKRr2+oOtvseFtq/6pVqza3UNqXlf5U+81ZtrJIrJNH5ixs33f7rVfh03z5Bvl3N83yt93dXHUjCUrMr35aH0F4GlEnwSjlkPH9mXPPTjMnP5V5at08d0mzd5W/cjf3vlr1PwOxPwURmiVf+JbcfIg4AzJxXmNrY7k+R921NyNH81frHjRLlyPh1f9Qkf/AEmmfbZvrQwn/Xtfkixbt5cm/equ0X3W+7UqxzPh3Zf+A/Nupmnw+ZCN6b9z/d21Z2+ZtdIliX7u1W+9X8+xnJx9w4I+7GJDJbzQ22+F1R/4f/iqns5kb7k3yqn3du1d1RyQozPjbH5e5WVm+anx2rw26PBM2P4Vk+as51JSjy9jrp1JR90kkmufL875W/iT593/AHzUtrqDzNEnkso+/tb7q/8AAapN/o/3DIrfd+X7tSWm/ck3nM6r8z7qco80IxO2NaMYaG3askkDzPud9+5VX+FqmupnsrV7l7xVWOJnfc33WrOW8RG+R2V2+/8AN8tYXxW8SRaTov8AZUMqr5yN/tV34XDe2nGETnxmMjQw8ps8j+I2uQ6xr1/qtzC37xPvN/FXm99qaW8jvHMw/wCBV0HiyZ2V4U+cK7bJGevPNSu3eb55l3f3q+9pU4xpcsT8vqT9pVlN7li8vkkZvvb/AOJW+7WFqMCXXm/w/wC1/darElxHLJsR9pjXc3z0tts8tt7/ADM/z1vymXunJ3Vvc2N9+5fipY9aQt5Nzyf9muh1TSIREZvJ5b/vmuHvrhLe+l2HaVb+5T+2OPvF3UrOzumEyQ/O393+KsS6tZIpG/dbQtalnqyN/rguV/8AHasTWcN4vyP96l8RfwmBBczW3yDpVmZDfL52V/8AZqs32iuql0f7v3/krMVnhk+R6kv4gIeFsZpXm81drJ83rVvEOoxAqwEu3G31qpPC8LlHGMUAMf7xprLnkU5m3UgbdzQWLHhm4NKykmmKu2lrQCzZt83rWhbs6/I7sQyVm25KkfdrRRt1vn2qYmMiO5kf+4p/2qgLSN8vZf4qmkjRW+/j/Z2VUkYLnY+P4fmqZS5vdFykjNs+5Iuf9qmtJGFZGT7v3KZCybmR0zQ8if8A2NPlgWQyfN8+ykoop/ZKiFdR4LXzNPZEG5t9csxwOK6TwT++tXtv9ujmFU+E3bpfLj/cyL81ULqHZ8+f4P4av6mqW8ImTd9/bt2VQmk8tc9C38NHMYSPX/8AgnuGX9qjRAX58i93L7/ZZK9Z/bp0BfEf7VHgXS5Y1McunW6yMy5wBdTH+leU/wDBPuNP+GqNCl3Ek2t71/69pK9+/aY0yLVf2t/CaSSOBb+GfPdU/i2zy7f/AB6v2jAP/jRmL/7Cl+VI4qq/2len+Z2OqalbRyXDp5ezd8it95dtcJ46l/tC8FrNMrMvz7pH+7W3c/6l3MPz/wC1L95mri4rXUvGHiZLdJt0K/K3l/Nur8Gp0+afKcXtJRlzGl8Pvhjc+ONcie23SwtLsVdjNu/2v92vuf8AZ1/YMhm+HWreNvFX7nSdJ0i6v7242fLHHHG0jLu/u7VrL/4J+/sn3/jnXLCyTTbrbcSr5q7dvlx19vf8FZJdB/ZK/wCCRfxY17w9M1vcXHhePSIm+6/nXTeSu3/gLNXv08PGjhbnLF/WMZFH8wvinXE8VeKdV8QRzM4vtUuJ4mZv+WbSNt/8d21nxx3KyKidP49taGjaO5tIS+07YlH/AAGrnkwwzDemAzbdypXGfSc1jpPgd8Kdb+KXjTTvA2iWEj3mrXUdrZqq/ekkbatf0rfsFf8ABN/RtL0nQbnxVoNrFpvhfSbe3+WD91cXCr+8kVf96vzA/wCDaT9l3RPj1+3NpureJ7BrjTPCekzavIpT920i/LGrf8Cav6CvjJ4ts/Cfha40Dw3bJbwKnziH5fM/2VrsoyjGJ4eOqTrVf7p88ft0fHt/CPhW88JeFbxbG12bHaNdu5VXbtWvy48c6ii6lePM7YkuN67n3N81fW37ZHizUtSkENtdSPEr73Vl/vfer498YWc17q8t5bRrsb7zL/FXNWj7SV5GMfgM6eV5tFmkVTuMD4BGDnBrj/DPh+FZPOd5LmX5l+5u+au503Sbi4EWjsS0k77BvOSS54/nWr4q8O2Hwst4rB5F+0yMyxRt8zK38Nfr3i7Up0MjyCUumFj/AOkwNOZLQ47WtYh8G2rTXkK/a2Xavz/Nt/u7a8W8deKNS1q+e5vLnarfLFCv8NejeLmudUhuHmmWW4kdm87b8qr/AHf96vNvEmmW1n5r3Nzt27W3L/FX86YiU8RLml8I5S5vhOWvoHkX532/w7W/i/2q4/xV4qmVja6bu86Pcu2P7q1qeKvESX15JbaVu2/daT7tcffN/AjyMzf3vvVxU/5Xsa05Rj7sjEvZnW6d33O7J93+7WLfiFVLybXb/wAeX/ard1Q7oykdsyLs+eT+9XD+PNfsNFtHgtXxK33mr1KEZOR004zqe6Zmu+KIdPjOyZX/ANquR1PxPPLlLPdjP3mrNv8AU7jUpjJK5x/CKgZscCvZp0Yx3PTp0VAGZ5GZ35NSRvtPXH+1TKK6OU0kbOn3SLGEHzf71aELPJD86NXPWMgjkXY9dHpqm8XG/lf4aOUktRK6ts37v7tacVvJIqv94/3m/hpun6akki/Jkr8ybkrYktPKVJoXXay7WWnze6YS+L3g0G1S4hlsn2v5iMu6P+GtuS1ez8AvZ3PmZjlX7v3vlrO8GyQrrH2Z9qbl/i+7urpvG32ZvCMt5Ci75Jfm2v8AdpRIkeY+JNQSOxld32t/drgJH3ylg9dJ4w1FJYfLR/vfermQDnLVJ00/hBhkcUKu2looNQopGOBxS0AFFFFVygFLHhm4NCx7l+em8sPSnHYmRaiuEjj2Inzfx1+iX7PmD/wTkT38G6x/6FdV+ce75sV+jn7Pv/KOJP8AsTNY/wDQrqv13wb/AORvjv8AsGn/AOlQPseDV/tlf/r1L84n50xk7tnf+9uqxDIkd0oyvy1T3t605ZnXPz1+Pnxso8xuTaw7TL/s/LV+L4gaxYw/Y7O5ZNyVySyOn8dL5zsfnfijluHKy1faveahM8tzNvbd95qktbpLZV/vVQbZjilE0i9GoCUTRuNRnui6Oyr/ALtJa2fmSCGY/wDAqqQLDK338VZW6VF+dvlV6v4RcvuGzpOmoI1m2LuXd8tdHosyKqF5vl/u1wn9pzRt8kzfLVjTb52z515Mw/uq9EdjLlmet6HfbrpfJvI02t/FXsvw18R/DTwCn9q+NvE9ncTL86wxv96vlttQ0q3tWmuZrpfk+SP7Rt+asDUdViumzskd1+6zS7qzlGXQrlj1PtLxt+214CgD6boN4qRqnyL/ABV5r4w/au/4SBX8m/bDJ91flr5wS4ST/XQ7v71Ss1mql0jVWap9n7oRjE9Sb4oaM159tub/AHDbu+/96mXnxSs7iZPs1+sQb7+1/vf7NeTNePHI3yL/AHals3825Akfcn3sGrgOUTuNe8dPeXAh3rs/urWReeJPM3/v2X+H71YU198v8P8AwGo/Odl42/N81Ll/mJj7ppnVHbKbtr/epJNSmjt2S2mbe1UIrpyu/f8A8CqW1vPnZH2tTCXunrXwhjabw/Dsdd6/e/hr2z4c3X2dvM+Y+Wm99qbtq14H8FdSddLkskmYlbj5VZv4a9r+Hd463ywpuCTLsb+GvLrR9/3jz8RH3j7x/Y58ZPJDZwojSIr7FVV2tur7g8ByTXlv/plyqIy/Jt+9ur8z/wBlvxIlrqDw+cybvL+WOVvmZW/u193/AAl+LiQ6ampWEO9422SrJ8yr/tba9LA1vdtM8qpTlL4T2zwPpOsf29CsNyyQrcf6xn+8v+1X4W/tqeCL/wCEP7eHxg8GeT5cbeL5L23VflVobj94tftp4d+L81nqx/sG5hieaJt9xJ8ywsy/3a/M/wD4KsfC1Na/aw/4Wp52+HXPD9vFcXjRbfOmh+Xd/vbanNPZVsM4HpZTKdPERTPj/UtSv9vzp5RV9u7d96s+4k/cu77Q7fMPk+9Xd3ngHUNQuBYaPA1zMzf3P7tc78RvDmp/DB9Li8d6NdaU+u2bXmjNfWrJ9qt1ba0kO77y7v4q+Jll9WpG8In1Uq3s5ayOcvI5reUvs+Vk3fNVNm2sN6b1hXc7N8qtXG+LPjpZ6XI1no9q1w6/K8kn3a838Q+PvE3iOeRrzUpFjkP+pjbatdmFyWvUj7/uoyliP5T03xJ8WtB0e4ltoU+2T7tu2F9yr/wKvtn4mXIuP2LlvHULv8K6c+30JEBxX5j6VuOoR4Xdlq/TH4s7rf8AYdIjxlfCWnAflBX9BeE+CoYbJc9jHrhpf+kzPpeGqknhMe3/AM+n+TPiOz8aTNceS9tlFl+WRq0rPxwin76oN+3bXC/PDcNs8xPn/ibctXoWdpNiP/tV+OyyvDVN4nx9PGV6fwSPRbfxNNeL+5maV/4/9mqlxffapPs3nfMrf3v4qw/D+pXNvKqCH5G/5aVZ1iz8mZrm2mwu/wCasKeS0KctDSpmVWUfekdn4A8J+M/FmoJp/gLR7rVb9tzxWtn80n/Aa0tY1D4x2M7+Htb0HWra6t5dz291YSb93/fNcx8M/iN4k+F+uWHjzw9eSQzabeRzqyvs+7/u19p33/BQ8+MPDFlr1neTXVzdf8fVrbwK0kn+zuZflWvXwPC2VZi7TfLI8XH8SZnltpUo3izK/wCCd/7QVj8F/ii/i34u6DeaUsem+UuqX1u0UTKzbtys1fTf7Vn/AAXI+Hvwz+Hd7o/wQ1ux8VeMb61b+wbfT/3lrp7f89p5P9n+Ff71fFnxq+LXj/41aLcaV4h1W1s7a+t9iabY2+75d3yr81fPHiX4d6x4Lwtz4emtrTbuVvIZV21lmfh7Qy+rHEKfNB/ZO7L/ABEx2YUvq8klIdrXjDxh468Ua18SPiJr02s+IvEF011q2pXTbpJpv/ZV/wBmqMbzQ6kroihG/hb+9SRxPGreS8bPv3f8BqPd/piQoiu9aRhy+7EzlOc580jo7hvJ0+W5dG3bf4q5fRdceC7ZN+5d/wDC3zVv3myaw3o/3l+fbXCrcwrfMj/K+9vl+7tp/EaUdz1Tw74k/eJDI/y/3m+9Xb+H/Ej42CZWeb5v7teLaHqnkyBw+3/a+9XZaPrUqtsdGdtn3v4aylR5jo5uU9e0vXHuIUm+aJWdv93bXV6D4kd4US2mjRG+bzJH/h/iryXR/EMMMiohYMybnb7y7q6PR9QgaFPPfJZPnVX+VazlR/mHGp72h6rY65cySCF5o3Vm/wC+v7tdp4MvzfJckk/Ky/J2Xr0rxvT9ciWZNkylFRlb5a9P+EMolsbt1nDqzoUx2HzV+geDtOS8RsHLyqf+mpn1/CdTnzqn/wBvf+ksl1C6a31e4isZ1QGbfcFV+YjPNaunatuuEdNuxXbdI23/AIDXMa1Kttrt3LNtIaZwGZ+nPTbVq11S2jmX/Vh22s7bP++Vr8x4rp+2z7Fxf/P2p/6Wy3W9njpv+8/zO5tdV+0XDv525m2t5i/8tK14dUhtLj9zC277yNv/APHa4OHWnjYPbPslmdt7L81WYdam2pHDfyF403N867mr4zFUfd5Ynp4fGe8d7b+IGhuInR97bP8AU/e3VpW+tzMxhjm3bvlfy/8Ax1a85t751VJppv3X3maT71a+k6oiyeZC7Mv8NePXw7lVPVp4qMtUdlJrVzMu93VJV+Xav96qt9qUMcfnTfN/e8z5tzVi3F0i3m/ZvCoreZv+8v8AdqGS+eONdu3+981Ycvs+Y2lU5pFjWNQeNvOTy1Zv4Wb5l/3a5241KaaUP5y7d3zNUt/qSfZ5ZJn3bW3IzfN97+GsDUr6GOHY6KvzMj7V+6v96tKPNLcunUjEmvNUjhjcu8aDzflZX/hrm9evbaGxdH275Pm8tvm+aodV1yGON4bN2x/Hub5t397bXJeINcmvpjZpeRh1+ZpGf5m217GFoy5jmxGI5eUr69ND5zwQ+X+7Rfu/Kv8A9lWPJH9ouHTdkt91l+9VbUNagupC6bf3fy+Wr7mWqf8AbTo0XzfumfdE0f8Aer144fmjGxH9ocupuwzJDb/fYoqf99f7ta+m3CR2apMm6JfuR/3a5eC6eO4KO64k+bd/drWtb6OTa7vJ83y/NVSo8vwkVMzjOWp7hcWc0cju+0/N/vfNWbqmmpdW/wAkPHlfvdvy7a2lZxcP9m8yFW3Inzfw1JDb+dK6eTuRlVX3fd3V50pSjqZxlCRx3/CMWat9pSSR/nX5qn0/w6i3Hmw+d8v3m27mbd/erp20/ddeT5Me5m/hrS0vRdzNv+R12/Ky/LJSlKIcq5eWJiW/htI2/veZ/eX5t26rj+GhZt52xcM/zbq6eGzRdjmRS2759zfdqWTTpreGSZ7aMvN8rrG+5VrQipT5TgdQ0Xy4dn2bZ87b1ri/EmizKsvkptT5vlX/AOKr1PWh9njWGGFVZk+6zbt3+1XEeIoXmjdPJ5b5vl+61XCc46nn1ox10PGddtPszKjovzfN8rfLXD63ZvNcPNav8sb/AD/P96vSfFVi6q/ksqCNvkbyvmrktU0lJG8t/wByW+438Ne5h48sOZnzleO5xMNu8f79PmG7czSfw1oafcXUm77TGzL/ALX8VWptNmWTyblFYNS2Mc0jvbTOuF+ZW3V3y5ZHDL3eU0LG1875NjRIybt275VbdW5odpc3UyQud6t/y0rN0+3DKba5dj/fX+Fa6rRbHy1S2hZW2/d2p92vPqctM76fvHsXhG1nt/hGlrIxaQafOCT3OXrzq3tYbWNfs0G12RW27NrV6f4ZTyfh0kcZ+7aTAZPu1eeXkUN1dDzplj/dfKzJ95q/W/GOi6mS8PSte2Ej/wCk0z7LN5RjhcL/AIF+SH6TshbyURiWb5W27VWmx2pt8Q7Msu7aqp97/eo+0Qt8kM2x2/h2/NU8Pk2wKJNJuZ1bdtr8C+rxjUPMp1I8vLKIq6bNNtd0V327kZUqG6jLwjfOzxsnzqvy7a0lj8uOWG26/K3zN96s273xsYUuYyW+9Ht27aylQ5feKjWjTK0kjwyJ86o2z7u/dVaPVolYpCjff3f7zVV1K8CSMIUX5trP8vy1lw3ztM2xF2M3/LOuqnRjL0I+uSjLQ3o7raxuUkVE3fdb+Fv/AGWvNfiZ4k+2axtd8RKjKu1PlZq6nUtSh0/TZZt8jMyfIrJ81eG+NvF1zcXU1zNKy/3Pmr6HKcHGnKUjws6xk6lJQKmt3TzedNbTZ+bbtrhda3rI3zrvV62rHWHaN33s27+H+7WJr0yTS+d/6FXvQPmZGb53+3y1Tw3SRyBN/Lfe21n3Fwkcfyc1TivEkmZxuDr8u3dQUavijXktdNeKGZt7LXBzSSSSNMXyzVratcSTN/u1msjt1StBxlcrZOeXY1Zs9VubOZXSRtq/w1C0Pl7eflamsh27kWpkafEbsOuJeK6TIuGqhq2n+SouYU+RvustUPnUVpaPqkKt9mv/AJ0b5V3fw1IuWW5mxO8Mnmd60murbULEo8aiZfuNT9R8OTAfarSZHik+ZdtZcgmtpNjqytQP4huHVvnopWbd2prNjgVfMiwUYWlooqAHwLubIq9bsnl799Uo1/5Zn+KpoWRW2P8Awv8AxUTMZE8kjK3/AI7UEyoy79n8dLNJtb5JN1I0u77+3/gNARIoup+tNkG3kvk0rYx8lMf7xoKEooooNBH+6a6T4fyJFJNvT/gVc2/3TXRfD+VFu33pVx2M5fAdTqFv5kO9H3fw1k3Vu/nP/s/N9yujms0ht12bfm/iWsi6hdm3u7ZpfEc56t/wT+jK/tSaAw2qDa3vyr/16yV9O/HpYNO+OEXiG5jUiPwnFBGf4gz3M3Svmj9gGJ/+GntCfbwLW85/7dpK+i/2tL/7F48sI1KqZtJjBO7BIWWU4/Wv2XB+74E4v/sKX5UjixH8dehgnXLm+mM00PyLEzvGyV7L+wP+znrHxe8ZQpJpUnk30qv+7Ta0a7q+cobua4ki01LlovOlWKWRX+ZV3V+1H/BG/wDZR03/AIQ3S/F1h5awwzxw/f8Amb+KvxvLqVKVTnkeViXJU+WG59ofsb/sX6B8FfBNtf3kMYnmtV807fm21+WX/B2d+0XBq/wO8N/B/wAI3Ey2OqeL4kuvLuP3U32dWb7tfsp+0p8X9E+FXw51C3h1SGG5WxYBWfDKv3a/mq/4OAPiJ4Y8WfF74b+A/DepXReGyutUv7WS682NZGbbGy/71d1ScqtPnn8jvw+Fp0K8Yw6fEfBun2M0caP5KudnzVKghkmCX9hu+b5dtaVnZeYq/N8rfw/3qsx6LukCI7b91cX2z0JW+E/bD/g028Gy2E3xW8XxQxxQNolna+dt3SRs0jNt3V+iP7R3i5LfNnZzKFj+RGX7tfnJ/wAG0PxUtvBfgX4teCby8hRrrS7HUUZfvK0bNGy/7vzV9gfFL4oWGrXks1tC0vnL8i+V/wCPV20fePDxMeXlPDP2gpvtVnNqt7M32nfsVv4VWvEdB+Eut+LNW8mzs5EST7snlbo46+i7vw3qvxA1SZHsNsTf6pl+Wovjd8Sfh1+yX4PhtoJIZvEF9Fs07T12yTs2370n+zU4ipSpwvMy5XzaSPkrx3oy/DX4ny6XcwmUaVcW7yR8DfhEcj8a888ZeNte8VeIpvE+sCNrmZ22Rt/yxX+7XQeJfFuteObm+8X+IJS95emSSYsPqAPwAA/CvOtU1CG1jm/fZX7rfP8ANX6P4z1H/YXD774WP/pNM1+ymyO8WG3Vrm5uYUXbu27/AJa8O+KHjD7drDw2c2yPbtWFX3LurpviR48RbR9K0253vs2quz5dv97/AHq80j02a+unuXVvm+bctfz1GtKvsXGXNuZt2yXErbLrG5t33Pmps2ivGz3OpXKt5f3P4f8AvqtSaztrWF5rmFTt/wDHa4jx14uKwulteRpDv+eT+9/s1vGjE2jT5pmP8SPF1nZRultMoVvmfb91a8R8R65c61fPLJMzIG+TdWh438X3OuXjwxTN5StXPKMDFe9haHs4XZ7VGl7OI1V3U5V20Ku2hW3V2cqOgFXbS0UURAkVvmyP4a3PDt1I0gT/AGq5/cfu1e0e+eGZE3/xUpRIlE9Z8O25WFfuhvvL8n3atX1h+73iHd8vzSfw1neC9U3xo833W+Wusks4bhfvsibfkqvscpjKJx1nvt9W+0oigq+5GWuj8aXv2fwSXTbhn3PJv+ZflrD1axfTZvMhh+VX3basa1Nc6t4BvLOGHbtt2dt38O2oJ9n7545qN295cM+eP4ahjh82THrTWOFr0r9k7wn4S8dfHXQvB/jOwkubC+uGSeON9u75WoqS5Y8x1xjzaRPNmBXgiivsX4lf8E6NB1Oea++GPiGSwLXDCKx1D5olX/erwjxj+yR8b/BzO914PmuoVf5prH94u3+9XNSxmHq7SNp4WvS3ieY0Vf1Hw7rWmStDf6ZNC6/eWSJl/wDQqqfZbnbu8lv++a6ozRhcjop3kuv3kxTeAKQuZCsfmz6UlFFBQrNur9Gf2fP+UcSf9iZrH/oV1X5y1+jX7Pn/ACjiT/sTNY/9Cuq/X/Bv/kcY7/sGqf8ApUD7Dg3/AHyv/wBe5fmj85k+8KGG00lKx3GvyL3T48Siil3fLtqSeZCUHPeiigdkSK3ylD96mD5l2fw0csv0o+98iD5qBRHrzt/2vStKyVLWHf8ALtV/mqhHCjf71TXV4qxeVC/P8VVzESVxNSvjeXG/HyDov92qrOQ386WRm3mmVIy1bs8ak72pl1Ih27P7tQq2T8v8NDNu7VXxFcrF2D1NWbf93C29P4Kgjbc3z/eanzSbcIjttqSZIduRVxu3UrNlV2cn71Vy20kUsTMrbw/NAFqbfCn38j/ZqLzAv3Hx/fqNpy2Pm6U3zB/cH50Adf8ADXxA+l6wltv2pI/8X8Ve9+CNatm1S28l2c7/AO592vly2umguUuU3Eq33lr234X+LIdUhhm+04dU2uu/5lrjxlPmic9anzQPrv4L65c6Lr9lfvMu2OX5W2/LX1V4N+LFta2ahLnaWdvmZvlkX+LbXwT4J+IlhCI3v9VjhRf70qqq13cX7XfwF8AWq3/ibxrFd3EL7fsNs25v/Ha8b2uIheMInlexq8vwn2w3xo+15fTUkb/Zjf8AirnNU/Zr1v8AbmuLn4Y6V4wh0bxxHpdxP4Ih1BP3WpX0a7ltWZvu+Yvy7v71fGXij/gsB8OPD6TWvwy+Gt3Kyrtgnm2pHt/usrV5P8Qf+CtX7SHjG4SbwTBZeGp45d9reaeWaeFv4WVv4Wrow9HH1JxlKBtSw+JjNTXun3/+w/8A8E6/ivD8QbzxP+0/pWpeCfD3g+Ka7+Jeva5b+Ra6TYw/NLGrN8skkm3av+9XwF/wVJ/bw1r/AIKA/tm6n8adA03+zvBmg28eh/D3Rdm1bPRbf93D8v8Aek/1jf71WP2pf+Cq3/BRT9sX4b2HwS/aP/ar17XvDdjbx/atFjSO0ivmX7rXLRqv2ll/6aV4HDEjQ/PCq/Jt217kIxjK6iepzSUPi1OV8V/NqDzJ0Z6y62fFUSRXHl7MCsatTSnIuaDG8mrQqn96v0m+Odwtn+wnNPKdoTwppuSO3NuK/OLwbZ/atchR3xtfdX6JftJyGD/gn/eyKenhTTO/+3b1+veGH/Ipzz/sGl/6TM+v4Y1wmPj/ANOn+Uj4WWZLiH5HXbVmxjjmkZEfdtri7PXpIVMPr91j/DW5pesfdSN/95l/ir8dPipROphRFwmdyrWza2qX2nvbTf63duRq53T75LhhMnH9+uhtbh1nWG25Vk+8rVpT3I5oX5ZEcMf7uXSb+HI2fe317j+y5+xz+1T4806O58LfDe9bRr5mlg1CC3Zo9qqzbty/d+VWrxq7s+ft9sm14/4f4f8Aeav3K/4N7/8Agqh+w/4c+Alp+zN8bfENt4V8ZPc/2fK+qhVs72Nt3lMsjfd3bq6cPjfqVWNSx52PwksbS5Iux+Qmv/tTeAPh2raR8P8AwaPEGt2dzifULlP3Uckbf3f4vu1+1v7J/wAPv2IP2zf+CW/jLx/8btM8Pt4gtPAd9qN79h2pc6XD9lZlby/vKyyKa+Mf2TP+CWi/DL/gqB4ttfj54GhvPhvdeJLi5t9Us4Fls2t5rhvL/e/dX5WXb81fVX/Bej9mz4Z/8E7v2XNc+NP7Jvh6405/iFpyeCtUt7eTNpZ290dxuN277zKrKq0sfmlfMa0Vz/CceCy+hgY88Yb733Pwc0FU/su1/wBJmcNEzeZ/eX+Gq02oPF4khttn+si+fbWja6Wmm2KQ9oYtm5n/ALtcbpOqTap463o+7a2xPn+7WcfePZR6T/rNNJ+9t/u159eLt1B3TdnftavQ7dvMsf3L7tytv2pXGXGmus0yb87Zdzt96spbG9GPvO5DY3j+YUT5GX726uw0PV4FjVJpm/3o65SOF4mRNnzrWlpsvk3B+7/wGtYxHU909B0nVE2q8L7i25dtdHo+oTSMqI8bIz/e3fNXnljqvlwhERmP8ddBo99DIqwsnlbfut/DUyp/aM41OWVj0Wz1gOyfvtu379ez/s8zRz2epvHISC8J2kY28PXzrZ6ttkZH2srfd/2a91/ZRuFm07WVA5EsBY7s5yHr9B8IqfL4gYR+VT/01M+u4Pqc3EFJf4v/AElkuvarAfFl9aTP928kC5X7vzGrH9uWklqnkuqP9193zNtrhfHWvmx+IGrRm5XZ9vnBU/7xrMXx4kcfyTr/AHdrV+a8T4WU8+xTj/z9n/6UznxOLgsZUj/el+bPSrfxA8PmbH+Rl2o0f8NXLfxZbxqEeaNGX5V+626vK5PG32iGKf7Sp/2Vaj/hLv3b7IV+V1+6q18tUwfNGXMa4fGcp7Pa69C3lvNNGV+9LGv8Na0fiKS1d4d/3fm2xv8ANXiOl+LplLbzJ8z/AHWbd/wGtuP4gJb3D6g9z9odk27m+XbXjVsHyn0OHxkJQjKx63/wkieWfkkiaHavlt825f71VdS8cWyyM8J2Bd3lRs25tv8AtV5n/wAJ07Ls+0sf3XzbX/h/iqje+NvMRJhMpSNdr7vvba5I4Pl956nd9eh0O/1DxY/lvczXKoNm7y/4v93bWPqviJ2jR3mYq3zOy/K1cNeeLnjkFtbOvzfL9371Zd94svW3Qo6u/wB7dv8A9WtaUcDUlyyPPqY/llY6PX/EE2353w+7564nVPFFzud98OPu7f4t1UtW8SujN51yu9v7r1yl9qzztLDDMqn7ySfe+9XvYXCy92MkcGIzE2pNc3SNc3L4Xf8Ad3fxU/T7pLiNn3qvzfeb/wBlrkpr2ZW2QuqKvzMv3t1bGm3k0j/I/mps2o2zbXrewjTieX/aHNI7OG+huoVT7Mvy/fVv4lq/a6gkeZHTytvzbWTcq1zelyTeX++uW3b9v3K1luLqTcnVt38X92s6lGEYF/XJc3NI+kIdQeSRHTdsV/7n3lrQhkST95cou2Nlbdv2rXDWviCaOEO/mOjPsSRm/eLWzp/iAyTfutzRrxukf7zf7S15VbByPbw+Mpcp2NjdObjzvJhd5v8Almvy7V/2a2LWRLdmk35bb8y7N23/AGmrkNP1q5dv+PlVWNvvN/DWjY6lbLIs2xk27v49u5qwjhPetLY6Y4vqzrIblLqM3CQxu6p/47/C1VppJWZzbNhmVm/efdZqrafqELfvPtmw/wDLWqV9qz3Tf6HtUSfdaZPmpRw3vPlFLF+7qUfEW+FWzMrlov8Alp8vl/7NefaxJ5Nr5kk292TZ5i/w112qXjtepO/l+XH/AAt/y0rn9WgSWGSOF/MCuvyx7Vruo4flPNrYiMpSZwesWsMl0z+Y37yL5N38X/Aa5rWNOQRlIbZijbmdv4d38S13Wp2e2Ty327V+Xy2+9/wGufvtPnWTYifOzs37z7telGjGR5NaS2kcLfWqeXl+GX5tv3WqmbflEhRc7d3mL93dXT61p/2xm2Qxuyt97Z96s0aX9on/AH0PzRv8+19u1q7Y0Tz/AGkuYk8O26bd95w6/wDj1dVo8CQzfvkY+Z/t7flrJ0fT5ftHzpy3zbfvMtdNp8KW7ZebJZsrtT5drVjWw/NO5tTrRien6CVT4a5hQ4FjMVU/8Crzu786Vvtlzt2/KrtIny7v9mvR/DP/ACT5MNn/AEWXlvq3WuN/s3zMJC6hfv7mT5a/X/Fmk3kuQX6YWP8A6TA+yz+rbC4N96a/JGVHap5i+cjfL8y/JtVv/sasLN/pMW+bdGyfLGv3d1LNYTNMtz9pVnVNvzfe/wB2mLb21r8m/wCVvk2/7W6vxGpheXU+a+uTHTfafLmlRGQLt2M33W/vVR1C+hWH7MHb5X3KzfK26rF95253SFX2vtl3P/D/ALNZWrSwq+x0Zzt/5af8s6qnhf7opYozbr7ZIpSGZdv3fm+6y1HY2s00n2Ysyfxbf7tTxxwwxvsmX+86tVm10+2kk89ptkzRb9rfdrf6qowI+tHMeOmgs7H7Nv3vJuVPn+avAvHGmvZ6hPbdXb7+2vZfih4mtrbXrPRPOjQR7mdpP4mrzL4hx2010LxJty/eZY678HTjTieRjcQ61TlPOILx7Wb+L+78z1T1qZ5GD722t/CtWNZmh8xnRPu1lX115kY2Phdv8Kferq5eU54lO4m8z50+U7qpSSPv379u75qmvN67kfd8v+zVCQ+Wdm9tqtSFH+Ukm2Mx2Ozbf4abHEWX/b/utUDSN5Z2fK38fz1Pp7edJ9/5v9qnGRUv7pXkXaNjpwr0zenmBNny1b1W0eNVd3/2flrP+dGHyUviCJJJb7t3l/NVdkZD861btZOf3xxVqS1SaFUoDm5SrpOsTWVwm58p/ErVtalp+la1Z/abOZUk+81c/eWZtmHerOl3H7l4XnxVRlylSj9pFCaF4pWhz92ihv8AXH+KijmNApsnanUMN3WpAVWx9/mpF+Zh3FRqu72pyBOfn7f3aCZbkzK6MPN5Vvu1EXOA3y4oZn27KYx+bPpQSDH5s+lNf7ppzL/t0BS1BoD/AHjSUUjHA4oAWt7wDIV1FzvxtXdurB74re+H7/8AE48sorGRNvzUES0iehra7bOLhtrP96qV9bpHNvG3C/w1oyXUMdr5OzezP8i/3ayb5nf7/wAzM9Bzx00PVf2EFiT9qHQvKxg217nH/XtJXu37ZMhh+IOlzl0GNGAjyuTuMsleA/sF3DyftU6ChdTm0vdwXt/o0te8/tooJPH+lh1ZlGjjAX1M0lfseGbj4D4u3/QUvypHBiJ/7RfyOF+HSwXPiK0e5+ZvN3bdu77tf0Of8EmNF8WP+wRF46+GTWf9t3sszWrasVSFpFXbGq/3a/ni0OH+w7q2uXfyp2dd0bPtVVr9ef8AgkZ+1Lovw5+DlnpX7Rmq3lh4It5ZLiy1KO48uO3ul+Zl2r975a/FKWIq0o+5ExjThKr755/8QP8Agop+0V8XNU174aeJ/A1jd6w3iCazv7e6vGVrdoW2su6vyP8A20virJ8YP2zvEusJCttbaWy6Xa28b7ljWNfm2t/vbq/XHxB40/YM0z44ax8e9N8bX1zDea9qWoy2MkWzdGysytX4h2mtW3i74j6/4th3GLU9burqBpPvKskzMv8A47XoSceSJ14eNRc0pHTxrlvsybXP95VqSNblbxETckX3tzfxVNaxvGq/PlW++1S6bp/2zUmkfbu/2nqfdLlGMT9LP+CEeqXLfFDxLo8MO3+0PAcyyqv3ZFWZfm+Wv0R/4V3c6lceYnmBJIvmj2fdavz+/wCDeNbCH9ojW7C/v42iX4fag3k7vu/vFavr39p79udPDX2zwB8FvLmvIf3V5qkafu7fcv8AC38TVvLERpxPGrx980f2hv2mvAH7MmkvonhLTbfWvFs0G2K137Y7P/ppNXwX448ZeJPHXiS48YeMNYk1XU7p5HuL6R9yx7v+Wcf91a1PFFxeXuoXGpalqsl5f3G55by4ZmaRm/2mrzjxJ4iNnILCwm82Xdu+98sa/wB7/ar53FVp1J3kClGXwnS/axD4WlvmXaI7WR8DtgE188+MviBqupXFxbWyKieay7o5f9Zur3eOaeT4a3MzMryHTrg5HQnD182yxpYyHzkV32bmj/h3fxV+veNMZSyPhtL/AKBI/wDpNMdpOSsVF0r7Urpf3O2JdzPJC275qp6tfW1vG3kusSLEy+X/ABNS63ryWy744eG3bI9//j1c3q11NJbx3+pIrK3yo2/btr8Lj7vum0Y8xl+KtecWro77Itm7az/M1eF/Evx0+r3j2NmyhF+Vttbnxg+I3nu+n6bdNu+622vMCWLfMcmvYwOFly88z1cLh+WN5BSMueRS0V6vKd4UjLnkUtFSA2L79OpFXbS1XxAFPt5HjkD/AO392mUm4qwokB3Xg3XHjm3u+dv3FWvTdP1B7yEb33btv/Av9mvDvD999nuNm/FeqeDNWeaNETafn/ielH3TmqROh1TRzqlk3+h5K/xf+y1k6bCn2O50yb5PMRo/ufw12tjJItizvt+b+Gub1zT3t77fZ/IG+Zt38S/7NXL+6KmeAataGw1Oe06eXKy177/wTr+H2s+K/jta63bR4ttLs5rq4btt27a8d8bacbnxncw2y8SOrbq/QX/glD8GrCL4S678S5kbfeaothYfL8skca7pPm/3q8/Mq31fCyZ6ODUZ143PRpPDMdrbx74WYb13sv8ADVy3sZrSGSZJt6/3VT+9XoWoeDXa+GYY1XZ/D93/AL6rOm8P2dqpt0tmMi/LEv3lr42o5z5bH2mHnCx5V4k+HngnxJH9m1Lwlpt5u+aVri3Vmrz3Xv2T/gVqrb4fAclqzbt7W9wyt/3zXvl/oLqqbLZUSN9u1v7v96ua1rTYYbh0O6Ir/DG27d/drSjjMQrxUth1MDhanvOJ8zeJv2E/hdqf7nRPEmoWMn8XmKrrXlnjL9hnx7pe+fwxPb6lEqfOqPtkZt3y7Vr7M1DTUt5GjdF/22X+Kov7Ff8AcokLI+zd5n97dXVRzTEwd5S0PNqZJhpS9z3T84vFnwk8eeDLw2fiHwreWx7eZbttrBk0yeFtkylTuxX6dT6PBHIXvLX7RuVfluolf/0KuW8Qfs//AAl8XGVNb+HVmC3zy3FunlSM27+8texRzilKPvHm1Mkr/YZ+dL27xln2fdr9F/2fQf8Ah3GgPX/hDNY/9Cuq838YfsGfDfVpJpvCWvXmnPu/1Nwu9F+X/vqvbfA/gC58C/sXXfgBrlJpLbwrqkQkj+628TsP/QhX7h4K4ujic3x/I/8AmGqf+lQPe4TwtfD4yv7Rf8u5fmj8vyMd/wAqGXdir13ot5azPbTQtvjfa2KgksblIw7wt/wKvyc+GUkQUU9oXU/PTSpWgsb/ABr9acu/HFGxvSjlTQT8Qd/nzQp2t8lJTQS2aCiTzvL+6cUskzSSM/8AepjDd1ooFyoVvmYmkopeWNBAsmVfikX5fn2Zpu75sU7+D8ar3TQX5Nu9P1pJm3/O4o/hLUYVvvmjmARVfvT9ybt1MBxyKEVzUi3Qr/eNCt/B0/2qMfwfxUHIBWq+EXMxY1zz61d0nWtY0kv/AGbctGZPvtVEsW60cqakSjzF/UtZ8Q3Ehh1DUpnPdTJWfuPIY8/3q0bXUklhFtefNt/1Tbfu1YXQnvJPtNnfwzIrfxPtb/vmiPILm5TH+8n3/lrY8OafG0h1K7Rtkf8Ad/vVfjsfD1nb+dqUMbO3/LOP+9UI1I3G2GzTyoV+7HVSJlItWcj3UzS/wt9/5614dn+p34+WsjTV2zfcU7f7tdFY26SR4RMt/tVUfeI+E47xlvWSJN/zL8tYVb/jtUjvhGm3b/s1gUG1P4TqfhfavJrC3Ozcq1+gn7Q9m17+wXdWi9W8LaZ+jW5r4K+G8f2azuLx32BU+Rq+/PjBcRN+wv8AaZMFG8KaWevqbev1zwufNlOd/wDYNL/0mZ9dwt/uuP8A+vT/ACkfm9d+H7yFm+T5f722qhW5s5G+dlr0aSOzulKeR/uMv8VZF54XS53ZTYrV+S8sT4mNSX2jC0zxLcxzLmb/AIE1dr4f1hJsbHXaqferi9Q8M3OnyF4UZlX7tLpOoXOnsu+baq/7dSVKMZHsdvdQyWYh343L/laqX9tYXUL2bvsX7q/3qxfC/iK1kgRLl1Zv71b9wvnfvkO7zH+Zv71ZmXwnNeI/2hvj9baTF8MYvjV4sXQYbhZYtI/t6byEkX7rKu7+GvozUv2v/wBrf9pb4S+Evgh+1L8Zda8ReD/DN59o8L6PcN8vnN8qyTMvzS7d3y7vu18vfEbR3juItbttqmNvnr9lv+CMvwF/ZC/bk/Yh8SfCXVdEtbbxjpOrW+o3HiSaJmlj02P5plX/AJ57fu/7VZVYxX90qpz+z90+Bv2qPhH8N/hH+yroPxOsPiFZt4q8QeI5rVPCKwN9pt7OFf3l1N/dVpNqr/er5T8AWV7f6u1xbDLj59tfv3/wU2/4JZfsjXH7IMfj7TfHepa94r8RWi2HwqtrOz2NIy7Wbf8A9M1XdX5z/sT/APBJP4tftDeKNUs/APhaS5n0fzP7bkvt0UFuse5mZmX+FvLatqco0qW9zjhWlL3ZqzPn2z0/UtLUQXkLIZIFbaybW21TtdD+0NI/k7R95K7/AOOXxWtvi58VG17SvAum+GNP0/S4dI03w/pbsyRx2+6Npmkb5mkkZWZm/wBqsfR9P/0V5JkUFn+796rjrHmZ6OFlzfaONk0HypPnf59/92o20ncrOm7d/u13lxoaIp875Tt+Vtv+srHk0J45G+Tb5j7nXdVxjyyNKn8pjW0bwrvmf+Kte1mfzN/+396mR2KQj54W+ZPlX71TW9juk+fdtj+5urblhI8mo+SoaMOobV2O+1f9n+GvoT9i68NzZ+Io2X5o5rYMfwkr5yhj2szp1b7m7+KvoT9hwyHTvEhkj2/vrX+Utfo3hNC3HmFflU/9NzPruCK3PxFRX+L/ANIkeX/GvWZLf4peIAjNui1WfP8Au7zXLf8ACWPgDev3KvfHNj/wuDxKqO2H1mdX/wBn5zXGMrxtsRMqv/LRa+Gz+nH+28U/+nk//SmePjakv7QrR/vS/NnRx+KppFCPCrKvzfNVy18RbrlZt7f3nrkFun8zZCG+b5mkWpIb65X99vZVVtrL5tfOVMPGXMXTxEonb2/iZI5N6TMFbcu7dU0Pi5BHs85TtfburjYdWRWT54/9U33qgTVJFjXhf73y1wVMHSl9k76ONnH3Tum8YTNIro7MnlbX+ek/4Sgbhsm+Vv8AvmuHhvnaUoX3M1WIdScrseZlCv8ALuSsY4Hk91HRHGc252MniR5FSbepC/f/AL1QXeuPNb/J8jN83365htYRoh5Zb5n/AOBVDcX03khN+fn+9V08HGOhz1cRKW5qahrEzfOdr/8ATRv4azrjVHuJNnyqf7y1Ukn27nRlLL/tVVWZ5Nv3t396vRp0fsnDKtOJoW949w33N38KyVv6PvjjWFNrrs2v81YGlwu8ez+NvuLXQ6LDcyPskhwsf975fmrSVP3TONSR0GnrNHsT5l+T51atu3t5lZEhdkOz59v8VYmmvcwxvDcpGdz/AHt/zVt6WzwyJNtY/P8AxfdkWuSVP3veNY1pHpl59pi2TPDsWTds+b5asRatNZzb0vGEWxWZf7zVl3urL5LI7qQy7kZU+7WXHq03nB4X2lf71aywvMdMcVyneabrX7tLlIWU7922Rvu/7VdFa6pHcxpN52/zHZXVfvLXmtjfQ7Yt8yt91tv8P+7XT+H/ABAkbfO6yRN8yKv3VasZYOPNsdMcZLuegx3UKrG7vIjt821f7tQ3jJJ87ybGV/k3P/DXOWurQ3O+Ge8kd/8Ad+6392lvtedo1SF2jf8A2ko+reRtLFR5CzePNqcnk+dt8tdibn+Vqz7y38uTGxoj95P7rLTmkdfke53Hb87SJ81VtSuP3aQw3KttlVWXb81dEcP7pySxRl6h53mHydv+q2/vE+7/AHax9UheFUme5Z/MTbtX+Fq3bmPzI5p/tPzq3yrWXNbpNcNN++RGVdnmfd/4DW9PD+8ccq3Nuc+ummZYv3KruZtjb/lamx6OskpdLPcf4/n/APHq6q30NJm2WtnxHuVvl+7/ALVX7HQ0hUbE3mT5Xk2bflrtjRgc/tmcjb6Pcr+5hdgVb5Nq7a39Jt3sU87e29Zf9X/z0XbWzb+H42f9yFKRy7dq/wALNWja+G3VNkkPm7X+fcn3v92j6sP6xE1tEhCeDBDjy/8AR5OB/DktXKeSkO9Emk+/95vm213QgZdLa3XJPlMB8vU89qw7rS/LjmfYqPD/AHn21+s+J9FyyjI12w0f/SYH2nE82sHgbf8APpflE5iSFGk3p/wL/ab/AGqpXCw7rgwpz/B5n95f7tbuq2qANND5bI332X73+1WDqUbyQumyNx8qxbXr8g+q9eU+O9tEydQv3baiTqHbd5/l/wALVkXF1iJcw7W/3q0NUmRoTDCi7V+aVl+X/drCurxI/Nd33vtVVVvurVxwfu+6ZyxRZtbhIZWjeCMrJ825k+7/ALNTLI9nDJePCr+TFu2/8CrPs9Qti62z2zD5FVlb7tZ3xO1qHS/Al68LtmOJtskf3l3fLWcsLOMveia+25jwD4tfFRNY8aaleJNytx97d/6DXOyePH1SHZ526ua8RL/rZvl3s/zN97dWPZ6pNDIfkqIx5TG/N7x0V9Ik1wz/AN77/wDtVmyfM4R/4f4akt7gyw/vPl/2v71RXi7tkzvhqCOWERk2z7jjlv4azry2dlb+9vq08iNMu/buX7lNupt3zpD/AB0ehcfdMqQJDnKfLup1vcOs6zBcU6ZnVnKbQWeqzb1X7/NOUh8p01vHDqlnwnK/wrWFdW6WuYXRlP8ABV/wjqyWt0IZn4Z60fGWgzW+L9Ifkk+b5aRPwyscuY2jkHzZ/wB6tnSbX7VAUf8Au1ntC9wq702lan0uR7e4/efdoCQzULWaGMpv+X+Cs+OTy2ztrodZVLi23oi7dn8Nc9JvWSguPvETM0jk5pVXbTU6/hT6qJoFFFFH2gClVj9zfik/4Bmj/geakBd3y7aSikY4HFXL+6Am3b82adRStyu+lEmQlFFFHKUFbfgWR49aV0P+zWJWr4P+XWEcpkrUkVPhPQ7xnaNfnX5f4l+8tZdxdJGp/c4Zk3VoXF09yqwv92sHVJsK3yYXb8qs9X7kTmPYf+CfCNd/tU6TM6bRFZ3jr75t3H9a+jv2sLeF/iRptzJGCyaKNrHt+9krwL/gm7pQk+OtvrzqAWt7hF+bn/UvX0L+1dOIvHGlqJGJOmgmLPGPMf5q/YMNb/iBGMt/0FL8qRwV05Yj3TyLxJC8MUd4j/3W+9X15+xPrz/FD4I3/wAKL/VY47j+0pEsFklZlVmhZV+WvjvxJvvf3KIse1NvzP8AK1dv+zX8Vrz4d6pczQ3LM0d5bzrHv2t8rfw7a/FsLU/e8pnLllSNv4sWHif4I/Cb4haN44SO31fT9BuINsifLM0jeWskf/AWr4x+FUDx2jFPn+X5fk+7X6lf8Fndc8B+Pf8AgnHo3xpsIoU1vUtcs9L+0Rr800bbpJFb/d21+ZXgSzSPTo977N33l/iruqRpKXuHXhuaOHXPudXu3WqfPtZv/Hqm0d3XUndtquz/ACf3fu0kCo1vs8nC/dWP+Kn2sKW0YmdPMPm/KqvU8oe9sfYH/BK/Xtb0n4vanqWg6xJb3Fx4PuoJWjZlZo2Zd3/oK19D+OL618LWLPcvJmTc7t93ctfJH7APxCm+H/jjVtYfSftzzeHLiCC183aqszL8zV6pr2tar4y1I6x4nuZEaTa3lq/yR/7K15mOxHs5csdzzsRHm1iS+LvGl54g86bSt0MX8Ks/3l/2awk0ua4maNIcK2399/e/2aluLqztbf7Tqu1FX5ov723+7VW61x2sRc3k32O1+bZDt/eyf3dteZ78Y80jKNM62OBYvh1dW4IAWxuBkNnH3+9fLWueJvPZ7PTX3FX2vJsr6U07UIdT+Cl1fWsTQq+k3YVSeVwJB+fFfLFx9jtYXMyNmP5ot1ftHjRJ/wBh8N2/6BIf+k0yo6Fa8W2sYftNzNu2y7v3n8VeT/F34i+RbTQR3Pzbm2r0+9XRfEzxtbW9tNsuWRF+b5v4q+ffFXiK58R6q9/NwD9xfSvxjL8L7T3pHrYPD/bZUurqa8uHuLl8u33mqOiivoPhPSCiiiqjsAUUUUSkBc0+GFoi7puqO4sHj5TmptKu4rdZFuXONvyIKn09od3+kuoH93dWMvdkZe9GRlEbOGGKA27mtPVI9NkuGENyr/7VUprN41zvX/gNVGXNuacyGwSFJg6jNd/4C1aG4u0R3+feu35K88VvLf5K2vC+ofZbwfPj+41XykSifQWm3yfY98zrtb79YXibVPtEMiPeKnl/dXZuZqoaX4ge40N33r8vy1zt9qzzTtvdh8u16XNzHPHmiZGqNCNQa837ZVT5GVPu1+0v7KvwR/4VD+yf4F8AfYNlw2jR6jfyRr964uP3jM3/AH0tfld+xv8As6ar+1Z+094P+BuiOuNW1aN9SZvvR2cP7yVv++Vr93PE3hu2jmfR9NRktreJYLJd+7y4412r/wCOrXkZpK8LM9HAy5avNKJ4dqHhPzlbfu+X+FvvVjXGkuuIURQ7N8jK3zV6zrmivbzbERU/h2q3zM1cjq2g7V4hz/F935q+ZrSlGWh9RRrR+KJ5jr2n+ZI8c22It8u2T+KuK8QafCzK6eWHX7vl/wB3+7XpviTS7xZNn2ZRD935vvNXF65a2yrLZwzQr/DtX71cUf3nvLQ9iniPaQOEuLOb7Zvj2lZNzbflpq2P+tDvIrLKo8uRvvVoapa7djw7d6uyxTNF80f/AMVUEzIsaJMjGRdv7xa09pCUrlyjzRuV2s/uwvMrsr/uvlpF86HzIZodq/xs3y/N/s1ft5I2khTydpX5d33adJavPuNzeK4j3bVZ/l3VcanLuTKP8pg31rPJHhLZt2xt7ferY1hFtfgRq4hHCaBfED32SGiHT5rq3GxG2/eZWXarVo+JrSGT4V6tZiIRpJo12pUdBmN8/wA6/oDwE5f7cx7j/wBAtT/0qmd+Uc/1ivzfyP8AQ/Mbw34Xv9a1p/tkLHdL/rF/ir1vSfhH4YXSnm1iwjkTytyNt+7XXfDv4V6bp2lrrFzHH5S/N8y7dv8A8VXK/F74hW2lrJbabc/d+Vdvy/LXwq5IrmPxv35zPLvil4X8Daey/wBlWDRP/Ftf5a8/lsUWT5Pu/wB5q1/EGtSalcO/nMV3/wAX3qp29q903mCo5uY6I+77pUh0ma6/1Kfd/vUv/CM6kq7xDuFdLoul7m+40qfxNWtdR2dpb796jb8q/wC1VcqJjLseezaTc26/vrZh/tVXa3f7ipXX6xqkNwGTYrLWRb29t53qzfwrUy/ulRqGMUfhKVY2YZH92t9NIs2/1iferRtdC01o13w7lp8oe0Zx6283UJTvss+MeV/wKvQ9N8M6JcYT7M3/AAGui0TwfoMMqv8A2bDt2bd01HKTKsePx6ReTfchZto3fKtOGh6kzbPscmf4dy172sem6XZva2Gm2+1vmdmiXdWKuh3PiLUEcQ72V/4Up8sQ9pI8butJv7OPzrm1ZV/vNUUaxPJhn42/3a9H+MmgpoukxLs+fdtZq8+02DdMsv8ACv36g15vcLdj4ZmvId+9VH+1TL3Q3sF/1i7WFdFZ/u7Mv/33urC1y53Myb8/w7d1BnGU5GUW2sSetM3O336c4yd3rTaDaIoYrR5jr/q3pKKuMSiSNsHe/wD+1Usb7W3pxUC71+THzVL87Y+7/utUGZMD50gff1+/VmE/88+WV6qRqi/f3VatvNZgifKy/wAW+jnIlH3jc0k+Wy70+999a6Cx2La750z/AH1WsDS5EmZP3e7+Gt6SRILAyOmAqfJt+VmoEcL4yuPO1Zk/hWsqJPMkEP8Aef71SalcPd3zzP8A36seH7f7Vq0cPbfWsTX4Ynaw2b6L4RZF+9JFur7c+PErw/8ABOYyxk7h4L0jB/G2r401fUbbTbeGzdG27fusvy19q/HuzOof8E/ZbSDjf4Q0vb+dua/W/C582U55/wBg0v8A0mZ9bwmmsJj7/wDPp/kz89tH8WTW6t9pfctdTpOsQ3UKO/zL/tVwl/ot/p8jJMjUyx1K8sZB87Y/u1+Pnx3LGXvRPS2sbK8Uokasuz7tYmreEUjj3pD/ALX3ab4b8Xoy+TM6ru+V2rrPtFtcQ+dDt+593furSMiLTOK06P7KyTRpg13fhnU4bixa2mT738X92sfWNH3Ik0MOC3+zTNHknspt5mYL/d20EzNfxdpfnafKjorBk2oypX0d/wAEN/jI/wANv2vPD/hjXvEOpW2i61fx6dq1rZ3TItxD97ay/wAS7v4a8EvJE1DTVTzsPs+638VZPwD8aXnwk+OGleKoXxLY6jDdRbv7yybmqJ04VKckKXMf0QR/tP8A7HnxW/bE0fwfqvhLUNHtPBeuNomh2eqaivkQ7d0lzcNH/CzNtVa8u/ZO8FeLL/4qeLPA+hftAar4N8NeOfEN5YX8mjxKslxp7TNtVWb7rMrferxj4nfDvwNJ8dvD37QmleNtJ1uHx9YTa59jsZ1Z9L8uNdzSL/DubctdD/wTh+JXh74+ftHRaJrviT7NYSSzNpax/K00y7tu5v4V3VxYiE48vIzzuWrWxPNLSx8bf8FZPgN8OP2cv+ChXiz4QfBjw5qFn4Y0XT9PTSnvl3fa28v99cK38Ss1eS6DYpJpYe5RkdXZWX+Kv1Y/bB+FPw9/bq8B+J/iXr2jx3njj4b6yunf2Lotwvm6lp8LfvJPMX+Lbu2/7tfmNoWr+G/F2paxqXhTw9Np1guqXCWFjcXXmyxwq21VaT+9XZGp7Slc78vlKVfkZl/YYZo2CQ8R/N+8rP1TS0ZmSBP7rbVrrG01GZN/7oqitu+9UV1YpcKr/ZmJX+Ja1jL7J21v5jg5NNmjbfs2/P8AJHt+7U9rZhd3nPs/irdvtLmjnPyb9yfMy/dqFdNhhkbzoWKqv3lrqjseLUqe9eJgLv2h7ny9y/L833dtfQf7F0AgsPEKZyfOts/lJXiq2KyZQ87k+9Xt37G9s9vZeICxB3SWpyvTO2TNfo3hR/yXmF9Kn/puZ9RwGn/rLQb/AL3/AKRI8K+OtqW+LPiTK5Da1Oc+nzmuJaPy12b2/wDZa9F+N1u0vxT8RyBE2rq8+5j/AL5rh7q3kVNibflT7zfxV8Xnn/I7xX/Xyf8A6UzxsdUlHMa3+OX5sypGeHdvRt38FMaQMp2bdy/eq59nuYY13ybfkqvH/rDv2/N97an3q8XlkRGUSLzkXr87LQziMN5O13k/h/u1JDbuyum/bt+41OmhRsOm7ev8TfxVyyjyyOynLmhzCQtNHGvo393+KpVuMQ+fs3M38NFrGkakPw33qGhRZEhO4q3zbv7tZSidEZe6PkuNq/uU+VU/h+akMjyN5+9fm+VPmpI4CqnyduPu/LU1tp+795sUfxbVWp5UORDHHt+d92V+6uyrml6XLfAbE+7/ABVYtbOa4m2eR975dypXXaDoMMcA/iXZ/c/iropnJXM/SfDLy5eH+FdvzfxNW5a+Gd02xJmlMfzL/vV0nh/wpDcSfaPI4+8i7P4q6Gx8IpNGNiqrt825Vq5ROL2n8pxNtotzCuzyWZv4NqfdrVTT5rfHnTbGVd3+ztrt28J39q0bpbMzNF83l/dqhfeEzGrzOkY8v7qyKzfM38NZ1I83wm0ef7Q/Vmv2nZ/JXDL8/wDdrAmv5vtR+dcbP7ldHfRzNDK8xZdz7drfw1y+pQw27fO+/d/dr6GOFhyeZxRxUuYlh1Y27Rpsk+b5vM+981bun695duNjt8r7trf3a5FZ0hU/eR/lVGarSXUxQIn3l+bctU8vjL7JrHHSjqd3/wAJQ8jb0mj3qisir/7N/tVaj8SPN5rpc79qf6v/ANmrgIb+ZFZ34M33G+9U32zYzb3xIrrsVf7tFLK+aLsVLMLwudxD4u/1aB2Zf4/Lf5m/2aY+oPIrOZoV3P8AdkbbJ/vf7Vctb6gLrbczp5bt8rq3/staulrHFL/pO0q3ypGv3lWtP7N5TL69zGwtwt7NshRQ/wB1Wb+Kpre3uVki865Vw3yvCqfw1Xt7OFfK2O2FfcjN/eratbNftQtpnbH3mbZVrL+U5/rnvF3R7GaSaP51bb8yLv2tu/2q6a10kqwhd42f73yvuVd1Q6Do/wC5aZ0jZF+42z5ttdhpGnpax+TMjNFJtbdJ96pjg77DljOWPvGHH4bTy2eSH545fkbyvvf7LVch8PzCP98jf6rduj+6rf3a6L7HDt8mT5n81fKZv4Vq4ui/M7wou/5vmaqjQ5Yk/WOY42dJHvjHMcMzgHd2qvrGk7W3w/N5i/e+8u6tHVIjB4geLptmHJOfTmpdSsYY9nkuz7tzMv8AFX6n4h0efKcnfbDx/wDSYH3XGOI9ngstfeivyicDrFnthz9yJkZX/irmbyHzIf3G4bk2MzJ/49Xe+IIf3a7EVdu5tq1xepxvCGkd2Mrff/dfdr8zp4fsfC1MRynH61b7pD+52IvyszP97+7XPX0LR7Xd9y/KrqtdZrVrumaHyd8ez59v3d1c7qEbsySG23fPt3bv/Za6Y4HljZRMI4iXxGX5EzKz72Rmb71c18Wle78NzQw7R5n3t33ttdXtuftHz7T8nzR/3a5P4sSbdPTZuiPzfNs/8drkzHC+zwrkduFrx5uU+bdc0+2maVC/3Xrm7yzSPc6bcLWlr2pPJqUqu+G3/dWol/fJsyuG/wDHa+U+LRHpR+H3inDK6qr7FVVT73+1VtmS8gYJJudv4qimtXDfvCxVvl21CsgjkRNjfL9xd1OISiQXCvHNs2KV/vVG0ybdjp8q/dapLp0kLbE/36oXk7x4kR/mqvcCN/hLMkL3C5jT7v3WqpcQtGuzHzfxtSR3jxYdDy33vmq1HIky7+rf3TWZcpcupnQsYZN/da9E8G61pvirRZtB1P8A13lbYm/u1wd1YzIvnJytO0fVbnQ75byHcpoHpI0tU0m60XUJLO5Rl2v95v4qrTQ+SwdOVZK66+jtvHGjjVYfluY12s33a5Vle1keGaRsr/C1HL9omIlrcZj2z7v92s/UI/LkLp93/aq1eSJuZ0T/AIFWfM+6TO/5auMiojA27mikVdtLTNQopD8vz4paACm7dvzZp6feFJU+6AUU3P3aU/N8maIgLRRRR8QBSMueRS0UcoBWr4PV21hNj4rKrT8J7/7VXYmSv8NSKXwnaXDItqPm/wBZu+b+7WBrEm3ds6/3fvVt3kjrA0f3VWsDyZtS1aCzhhYmSVV+WiX8xgfTf/BPOzew+I+hRykF547uVyOx8h69p/a0Rm8daYf4f7LAf2HmvXD/ALHei2lh8VNLa3UKkdnKEQptKt5Dbq739qtW/wCE+0uQu20aV90DOT5j1+xYGUf+IEYxr/oKX5UjyZ1IuvzHjmvW8ysiF1likT5F+61V/BNi9x4otJtNds+btbb/AOgtVjUo7/VNSWzs0x8+yLb8zV9Cfsm/sm+IfGmrLPbeFbq6upmV7O3t4tvzf89JP9mvxXC0Z1JmcsRClE8k/wCCkPxB8T2/7MPw8+C+qQ3CQXXiKbUovMf5f3cfl/8As1fPvhfENnDshVVjT71fUv8AwXD8Dn4Z/E/4X/DfUb9brU10O6v9S8l9yQtJIqqq/wDfNfMuixosaJD86Lt2K33q7uX3z0Kc5OlC5u2ph8vfNMzD726lWZGhEJT5v9yrvhnwzf8AiTVLXQdKhmuLm6uFjit44t25m/hWtD4lfDHxn8J/EX9j+LdK+zTb22x7lk3f8CX+KnGXKV9s9K/ZT/c6pfzQtGv+i7d0i/L/AL1e1LeXOrXDw6UnzRvteaZPkXdXjv7JOm22qXmow38MjpHbrLKv/Avlr1zxJ4qs7O4/srTYI43X7qx/eX/erycdKlGrzHl1pctWxFq1xYaAN7zfabz5l/vIv+6tcT4k1C8EM2sPeK00MX/AF/3a2vst1cyb7y8VFb5vO+9trkviJqT2+mw6Vsxudmlrx61aX2zHm5j1HwTI0n7OrSzRk7tGvSUz1GZeK+QPGnipY43s7ObcjfMzN/DX1x4PkD/syzOjdNBvgCPbzRX54fGPxzFYPL4e0yZmuZPluJP4VX+7X7v4uUJYjJOG1/1Bw/8ASaZ1YajKtPQ5n4meNn12+/s61uWe2h+Uf7VckuMcUrAt1NIq7a/LKNONOHLE92MfZx5QVccmlpGOBxQrbq1+EsWiiinHYAooopgOjj8yVUP8VDRPtZ9mQv8AFTaWOR49yJ0aswEqSGbayh32rUdFXyoCSaRGbeny062nMUoZRwKhqS3jfd/s1ApRO+8G6w81q9tM7bWT+H+Kq2qXjx5m2fd/u/erG8O6klrJvmfb/do1jXHmm+5xvpy90x5T6p/4IveOx4O/4KZ/DCXztg1a7utLn/3ZoWVf/Hq/cjXtBSx8+wSH5o5WRlm+996v53/+CfHiF/D37c/wk1p5mZrfx9p/zL/tTKv/ALNX9InjSzRdW1ATQsrfapG+Zv8AaryMdR5pG9OpyxPKfEuj2d0r+TDJCy/K275mWuM1aGGOeZHTcsfy7fK+98v3q9J8Sectu0bzMrN/F/d/2VriNUs90LTWz5Xyvn8z71eJWo8vuuJ6dHFcu55T4is5o5Hh8nasn/LST7tcLrlgm6XfDsdflVf4f96vUfFFsk0jpH5ibvvs38VcJrVrDDHNM7/OrfeauPlpS91Ht4XEcx55rFrmN8vu+8v91qobphGjzWfKqvy/xNW9qlrNZ3H7na+35fmbcrVlSWsLTJN5zLt3fK396sp0+V2jHQ9eNT3SrDG87SwzJHs2723L83/AWq3Z2/mKj9WX5vu1Fb6f9okea8RRIz/JtrWhV1s/3MOdvyvH/dq+WMpxM5S5feD+z3jkRJvu7d+6NvmVqZr8DL4K1GDdknTpwCVx1Rq19Nhdlj+Rc/3tny0zVLe1eGe2UEwtGVwR1Uiv3/wGp8mdZg/+oWp/6VA9LJKntK9Vf3H+aPiDWvH02n6HPps15t2ysGjX7qt/s14D441681a+d5psru/v16/+0xbw+H/GV3ptnbLDFM7Mir/vfNXlFv4dutQk3vbKV3/xV+e017SlE/JqkfZ1ZHJw2M11J5wh+X+9XQ6XoW2Nbl027W3V0Fv4ZsdN3b3Xev8ADWZrmtWdirJDNg7PmrYz+IS61CHTV2QhQfvba5zWNceRzBJNz/e31R1LXHumd0Rs7flaqLfN87/e/wBqlzcxUYltrhPMCI+fl+WrNv8AOo2J8/8AG1U4Y3kkCJ0rW03S5ZtrojZ/jqOWYvhC1WZhv2fL935q0tNXzm+dGVd3y1Nb6P5G15nY7vvbv4mrQ0+z2y/O6rtrQjm5dS7o9v5arvm/2katRtW8lVd/vb/vbP4qzluraGPZCm5qb/aEClt/y/8ATPd97/apSCXMdBp+nzaq2z77t8v/AAKu60HwnZ6DYia5dd/91vvf8Crz/QfElnp6rNM/zL821XrTuviJc30DW9u+8NubbJ/dqZe9rEOWUuU4X9o7Ura4voLa3m37f4l+61cNoVt8u/yd27+GtP4mX1zqGvL9p+UKn3ah0eNLdWHyk7Pu76Rr8MNSbWr57WFYYXbDJ861zV5MZmxs4/vVf1i+eaT7+Qvy1lSOm7ATbT+H3R04yEc7Dg0jDI4of7pp25NuacTRiUUUA7e3P8NSUOX5m+c4qXzFPzpubb/epiqjLn5s09f/AB2gzJI1jaNX+bP8dW7Pf5nnGNiN/wB2qm5Fj378n7vy1e0xn4Kbg3+1VcpMjf01Xl27/u/e2qtWPFGqJDpJTGG2/I2/5qk0W3O5Xf5VrG+I12rSR20fy/7NOK5SPtnJv9010fw9037VqyzOm7b93/Zrna7X4d2otbGa/k/u7aZtU+Eh8Yapu1BofO3iNdv3/u19/fFuYQfsDRTEDA8IaT1/7d6/ObVm8y/km+bDSs1foh8bWCf8E9N3UDwbpP8A7bV+t+F//Ipzz/sGl/6TM+t4VVsDj/8Ar0/ykfFF1bWGpWrNIin/AHqw9Y8BecrPbJg/wruqbTtVeT/lhs8v+9/FXT6TfR3S4MKlv4F/u1+Rcx8T/eieVtbX+l3DI4ZWWuh8NeLHtYwk3zfNt3NXT+JvC9hqlq9/bKu7+7/FXE3mi3mmt53ksqr92nL+aJftOY9F0vWrbVF8l3yPvbV/vVJqGj741ubbaP4dtcBoesXNjMvzso313Hh/xBDqGIZn+6+7d/epxl7pEy1o9vcs374fKvy7a5fx3b/ZdUhmSFsLPXZTW/kyfabZ2+Z/k21g+NtPmvlivLlG+X5mVaqMf5RS5j7/AP2JZpvin8K/CNnc2cNmNJ066066ks2/eTfeZVkrC/4J8eILPS/2gLGbXrm4g01tUmgezs9ytJuk8vb8vzLtrs/+CWqeAPF37F/jnRPDsN1N4o0nxbp9+8kny+Tp+1vOkX/0GvMvh7eal4P/AGltetvD141s1rq7T2TK3zeSzbttEY81GXIcsoylXPufxT+0T4i/4I+/HHxJ4mb9muC5sPiF4QuZ/Cr6zOIvs8kbMquy/wAWGb/gW6vzh+E8mpapoN9rGq+T9tvr+S6uo4V2xeZJI0jKv93burv/APgrh4y+PvxA+Pvhjxx8aviVqeurceGo7Tw+s4WOK1s1VW8tY1/2v4q4z4I2rzeF5Em2sv2hfl/2lX71c8aMqcbnVhKcadU3mtUaQPM+3cnzxr93/dqCbSkWQ2sXzq33tqVu/Z/Lb50+X733futVaRXt5Hm37tu7bt+63+zWlOPNM6K0uWBy81m7SGzeFfli+8vyrVRdPRV81PufxMtbt8uxdhm/1i/Myr/47UMmmpDMzpyNu3/Zrt+zZng1Jc1W8TBW1hb5ETb/ABOzfw17F+ypbJbwa6FdTmW3zt+kleX3EfyvM+7ezKqt/s16r+y3EUtNccOCr3ELKQMdnr9G8J/+S7wvpU/9NzPq+BJc/FFB/wCP/wBIkeNfGW1B+JOvYTKnVZmdf73zmuKvLFJZk2QqW+Xau2vUfidoUkvxD1qZZdobUpnw3f5jXPXHhmFmV5rZk/utXxmeR5s7xV/+fk//AEpng5jWtmVb/HL82efSWfmQvvjbc3+q/h21VuNN2sg2Ln+Na7u+8LTW43ptdPmVFase60OFgmxPvP8APuryvc+Ewpy974jlmt5lykKN8zfd/u09dPfcnyfL/F838X+7W1No77vJ87au/wCXy/uqtRLprtMYX27925f9quWUT0sPU93lM2PT32b3Xb833Wf5qmWyeNdi7S33ttaUMKRtsSFSv96nQQvGyo6Ns/vMtYyiehT3MtbF5NzQow+T7v8AtVftbF5lRIQq/wAPzfxVLAschZIYW+ZvvMlbml6Wkql7bk/d+aol/KORNoWk+Xb73+7/AHdld34Z8LmT959mjYf+PVR8M6Huj8mdGHmJt3bdzbf9mvV/BvhdDl/J8uLYqPHGnzNWlPkictT3iDQfBrraxQmFmEi7t237tdboPgd2V/MtmH8PmeV96um0PwrDZwtNclvmZdkbfw11lnpNtaqHeHEUjbNuzdtrOVb7Jj7CBwX/AAr/AMxWtjbSb5Pmikj+b5qyNc8D2y5SFN38X+7XslrpNnMrfPvVX2pt/vVQ1DwXYLC8KWzY3fN/dojLm+IUqZ836hZ/Z4wZrZtuz7q1y+uWLrG6PDgr92u81zTXmkaBIcqrs21f7tcdq1uitshTllbbuf8Ahr9F+r+6fKU63LI5Zi7XCpDMr+Z8u1v4dtPWGaFVgdGETfM+1/mb5qdcK8kjwlGYL99tn96pLeGFdltbWzbY12/vK6KeHtC8Tb23tPdHSWaKpdHZmaX7u77tOW1IzN827Z8vl/NSWilpPubhu2/K/wB2nstst2Y4XkKr8u77u2uijh7fZOeVb3CzpNnC0nnGZn8x922uh0mRFmf51+6pX5PmVa5qzW5kYfPuH8W5K6LTWmuE3oivtf5mX5WqpYOUfjJ+sc3unQ6XHtuP3zrtZflXZ/DXQ6fGjXEU32lmO3y33NWFpbIjI6P5vyfvVk+Wuo8OxldkM9tGn/AqwqUYRNIy9w6/wfZvFD9mk2na7Lu/vL/vV2GmWME0KI6SbI/9Uslcr4ZaGH+CTbJF80a/ers9Lk2wo824qu1fmf5v+BVw1I8srx+EuMoyjYuNYpNuCQxuy/xN/eqRbV5IxD0ff87LT0kT7RNG7xsn8P8ADtanIyXFul5M8jHZs3b9vy0uXsRKXKcFr8Mdt42eDAdUuYweeG4WtXWrBGaeb5S+z5VX71ZevqqeOmWNcgXMQAYdeFrb1KTzI2kgTJh+4qxfMrV+nceJf2blKf8Az4j/AOkxP0Hjdx/s/K3/ANOY/lE4TXrW2ms3RI2WSTciq3y//s1x+sXX2NUfZ80e1XjVd3zV3muWsbSOjvh/mZVZfm3VyGpWn2Jkd3Vk2/dVv++q/PqdM/Pub7RxusPDeySzO+X835l+6zVyOpRwNIz7JIy27fu/hrttUj8n5EkmL/3W/u1yuoWb3i+d9mVVZv8AWK//AI7XoYWPxKRnze+Y7w2SyG5fd/Cv+9/tVxfxesoWs4kSFg7O37xn/wBn5a9Alh3TH5GRWTbtauQ+L2nvb+G01B0V0tbqOX/gO75m3Vx51h1Uy+fL9k6cHW5cTFSPkHxBa/Y9Uld3+ZZW30+zZA29EY7vuVvfGbw9Np/iKa8hhbyZH3xf7tc3pbbmbY7f7tfmkf7x9R8RamkO3+6v8Tf3az7r+JML/vK9WbyZEU79y/7VUJpPOY/7X/j1P4ZBKMSORvNC7EVV/wBmqNwok27+Garc33VQPgf7NVmciTZsZqfL7pUSpL/rClCzTR/x4NXG035WkfpVV7dlByPu+tLmNOaMi7Y6s8kohm+ZP9yrl5psN3++T+5/crDU+WM5rqvAz2epI2n3O3zNvyM1EiJR/lM7w/rVzoOobN+6Nn+f/arY1aOz1KP7fpu3Lf8ALOsjxFov2W6KJtG3+L+9Wbbahc6fwjsv+0tTy8xXvDtQV1B38f7NU1+X71Wb65+1S+bvzVb7/tiq/ulREVtpzTw27mm+X70qrtoiULRRRTlsAUUUituo9wBaGXdiiimArM7cvSUUitvbFZgCrtpaKKACtjwWv/E0WaP7y1jK2eDW14Ng3XDzdlq47Cl8Jv6szrCzydG/u1r/AAD8IzeLviBbbE/dWrNOzM33dtc94gn/AHexP4q+0/8AgkP+xn48/aEk1nUvDGgzXO5lt4pPI/1ar8zM1T7OVT3EefiqnsaHMaf7OOnT2vxYsW8ghBDPliMf8smr1jxx8FfEXxs+K2keFvCeiXuqajc2fl2en2EO93k3sVJ9Bz1r374g/wDBO7Uf2e/DJ+J0sbyTWEEcepCdQptWkcRqBjqzFuR2ANbH7LviDU7bxFp+jeGNXbS9TXWVnXUbeH97ghFRN/8AdyHO33r9ryrDxpeB2KjP/oKT/CkeB7R1KXMjE+BP/BInQfhHq1tqX7Q2txprk0S3D+H7FvNntZGb5Vkb7u6vq3wn8PdK+Gvh2Wz8K6DJoVqsX/H1ffNcyL/vLXf+JJrDT/F194kjhuPEmrSOqXV95W35lX+Fvu1558Trrxnr0n2a9maC2k3K9naxNLLu/wBpvu1+URiqceWETJUf3vMz8bf+C3Grf2l+31p2g/aZJY9L8HWbRMzbvmkZmavCNPtXVvPf/lp/er07/gpxZu3/AAUZ8T6VePIr2OmWkT+c25t3l7v/AGavOLOHzpAj3Kr5bVwy+I+ljH91FnuH7JenfYdZ1Pxgk0aT2tk0Fv5z7WXzF/eSL/tKtQftHx22reE7DVU1KM/ZZ18pVl3yeXu27m/vbq5v4P8AxY0T4a6tNc+PLBr3SZE23Edvu3r8vysv97/dq/8AtG/Hrwx8VrrTLDwNo8lrpdnYQpPI1ksX2iRfu7V+8qrUS96pYyjz/Ea37Ndxr32e7s9NSR/MTa0kf+9/6DXrjaVZ6T5t/MitMybt0n8VeVfst3D2y6jPbOq/ul+bzf738Nek3Fwk0j793krF+93fw189mcX9ZvE82tz8w2+ZLiN7+aGN7fY3y/3v96vGPih4y87WpLCzTzdqY8yN/lWuk+JXxNext5tE0GaSG5mX/XKm5fm+WvIPGGpJ4P0ebUtbud02z7sn8TV5nL7afKRTjzep9T+D559N/YzvLuJ8yQ+FtUkRgc8gTsDX5c3d5cahcveXMrPJI2WZq/Sj4PavJr3/AAT4n1iXOZ/CGstz6ZugP0Ffmh5ntX9LeKMFDIuHu6wkF/5LTPZwEeVzXZjg27mikVdtLX46eiFFFFTzAFFFFUAUMdvWikZd1AC0UUUAFFFFTaYC/dapYztRnZ//ALKlmhRbVJg/zf3aYvzff+796jlMx6yTffBw1Cs8jHe+TTJGy3yU9Ng46VIHd/syatPof7RHgLV7WXy3t/G+lyLI3/X1HX9QvxChS41698mzVUW4Zom3/wCsr+Vv4fag2leN9G1VGw9nrNrKrbf7sytX9TnjDUEuLq3v9i7brTrWf/eZreNq48VT5uWREpcvvHBeIJt0bJIm4Rt821/u1xfihYbfda745vl+X/ZrstcuJlWXZCsvmM3yx/LtrifEXk/8toVT+4y/w15tajzaG9OpKUjgdcjma+3j5Xji/wCAtXC63bv5jvvVNy/vVZ/mVq7/AMTTIzP5KLsb5fMWuE8Qb0hXyX85mVllVv7q/wAVebOlGjK57GHqSicHqEMK+a802yJX+61ZMyzSKnyKrrLt2s/3q2dUvIYf9S+zc+5P4ttZLNE3yO7I7P8AeWKo9jzS5uY9SniPcCzhmvIUmSFQ6pul8n7tXNPtZt3k+ZJ8z7Nu3726jT7O2SHa/wAi/e3f7VXbXfb3CukzOn3vJX5f+BNSp0+WroOpU5Y8xb0+NLWzms4bnHlv8jM+5lb+6tV9XbyJJnmbAVMsfT5c1es4ZnjjTfGHm3M8aru+X+GsTxrP9g0DU7hgD5FhKxBXrtjPb8K/evAxp55mCX/QLU/9Kgexw1Uc8VWb/wCfb/NHwl+0RqFtr/xSu0d5H2vtT+L5q4q4utN0G18m5dR/tL81W/GWvPda/c6lN/rZpWavN/El9PdXDfvGC72r85hH91FH5nUl7SrKUi94m8bvdGVC/wDB8rL/ABVx11qE11Lvkm3Nt21O1reXDL5aN/3zWto/gXUr5kRLZn3fxbaqMbkxkc5BbzSfchZq19L8LXl0yfuW+Z/u7K9J8D/APVrxftNzats/i2rXZXHgfQPBOmrc6rAsUS/KrN96tOWFOXvGftPf908x0P4b3Jj86/RlT+P5f/Za1pLXQ/D9l9xd7P8AI38W2q3i74rabbzNb6JC2F+Xds+9XFy61rGsTec/X/aqJS5jT3pam5qWvQySecnWqEmvXPl/IWDfd2rTFtbWEGa/dl2/3q2vB9x4evFcw2DS7U+9J95v92p5uUj3zAW88Q3G7ZDINv8AeT71QNB4n3b/ALHIv+01eoWOraHZsAmnR7F++s1X77V/CV9CpfR1R/vfu/us1Iv3vsnktnda2v8Ax82zf3vmrVs9aeZRvfb8nybf4a7xbbwHfE+SlxEP9pN1VrzwHol5bG50y5xt/h27a0+H4SJHmfij/iaa4JvLbCr/AN9Uy6keGEH5V/vf3ttXdY2R60/k9IX2bqxNW1BGmcb2/u/dqPt+6aGbdS+ZIRvYhXqCl65NJT+I2iHBFIq44FCrtpyfeFUEgZdu6mKNq7yKk+8xSmqNvSgOYdG3zBEqS43x7k602nq7qmwfN/vUEj4W8xW7N/erS0mBJNvz7mrNt1cM+9M1saDGhmRP7396gmR1Fuk0dutyk20Km3dt3VwvinU31TVHn37gvy7q7XXdQTS9DZ/lQ7PkXdXnRdnJduu7mojzjgLCpllCJ/E1ekaXappvhuJAn+u+auA0S2+0X6J/tV3Oragn+jabHtTy0Vt1P7QVJfZMnVNK+bzpOGWvvX47Ar/wTxKqQP8AijtIA3dOttXxEslteW+9EZ9rN95K+4vjzEJP+CfkkQXg+ENKAH421fr/AIYa5Pnn/YNL/wBJmfXcKaYPHv8A6dP8pH5+6fcMW2P1+9WzZzPGyuPlb7y7XrEjhmttu9P/AB/dWrZ/M2/fX5DHY+M5jo7K++QI77tq/wBypNQ0m21CEuiK52/drHtZHXc5f/c+etWzuvLK/d+WtTLlkcnq3hu4sZj95N3zVJpd2beRE+ZStd/qGl22tWXyQru/hZa4/WNDezm2Q7i38W1Ky5UaR/lZ1tjq32jS4kd9/wAv3V/hqDWl+1W/91GrD8N332VhC+3av3K39QkS4s3dHUfxKtVGX8xnUjOR9if8EWdW1XVPiN4z+CFheeUPGHgi8giaP5WaSP5lWrf/AAhqL4p/4SqzSNJrVvKut27zWZW2t83/AAGvDf8AgnH8Vtb+EP7W3gXxVZzR25bXo7O6aSXav2eb923/AKFXv/xgk174L/tReNPh7co0ljp+tzeVDJ8q7ZG8xW3fxfe+9W1GPNKSOWp7soM5v/gplJpXiZfAPiHTLyaabT/D6xXCyS7tsjSN/wB81zH7PsLr4G+0zQ+an2j5l2fdar/7QVunijRfO8+PyVg3RQxpuZW/u0vwH002fwrs3udyPJLI7r/EvzfdauepFxid1GMvanS3Cv5P2l7bb82379ZuqMkKpsmxF95l/utVy/ZDIZodoVk+X5/mX5vu1zuuagkDSJ52359u7+Fmp0Y/akTipfylW6unVvOm2/M+3dUUl5t2pC6jb99t1UZtQRnOxNqfe+akMiNJs3/e+bdXYeJzTjL3i8sUN43ko/yt8zt/8TXrH7Nlutvp+qJGfl3w7cnJ6P1ryq1byvnTc6K235vvbdtes/s5MHstUdIiil4cZXHZ6/RfCdW47wvpU/8ATcz7DgP/AJKih6T/APSJHDePjt8Z6rstvNYapKef4fmNY91HbNmN0Ztqfe/hre8dW7r411V0kfedQkOAvBXdWfZwwz4mRN6/e2r/AHa+OzqX/C3iv+vk/wD0pnzuacqzGsv78vzZkXGn+c6pBCoP3/Mb/wAeqjqGi2zY32y7vu10clvC2XLzfu3bY0ifwtSjTXkgKPbfeXcu5/mryJS5djgj7vvHBXWgujfc3bn+ZqpSaTCq75o2PlvtTbXZalofkyJs+VG+Xy93zVkXFn5e5IYWST7v+zWFSM+Y9rB1OaJzv9nwxt58O4vv27V/vVN9heXdvRnH3dy/dWtBrG5+V3di/wB3cvy7lpiWkMaPvG5/vblfbWEonrUY+8VLOx8mTYjthf4pF/1ldH4ds7ZcJHD8v3l+T71Y9vbozZ2b1/2v9quo0CzhW4E25sKq7/8A7GsJfGbyjGO0TvfAunorLNHC29fldZE/h/2a9i8J+H7O4t4X+9u/iX71eceB9NeRVuftLLuVVRZP4f8A9qvZvBdnNJGiCCNl2ru/vK1ZyqGNSMOpsWemTQrsdPM27VSPbWxHZ7pE85/nklbzWX7rbaks1e0Vpprb7ybUVn+ZasRw3Kr+5EcZk+40kX3f71c0ZSlLmOeXu7iW9nDJ5Uz/ACIzbkj+6zNU02nzXUchWHa0P3V/iZf4atxr+5eaBIwu/Zt2/eq1ZrtZnW2kRdi/d+6y1tGpymfLynyJ4gb7Qd/3trr919qt/vVyeuQ7ZHmfl97fKtb+qahmzZEdVC/cb726uX1SREy/k8Ltby/4v96v2Gn72h8NHlUbsxZPOVtibS6/eZvvLT4bdLeF3eFvm+b79X49Nma9d/JXLbf3lW7fw55jFJ0kA/vfxVqpUoijzxMaRWg7xpFt+8v8VSw281xtR/7nzyRp8rV0LeFXmhTybbcuxV2tTZPDM0O1GhkXdu+Xf97+KtvaUvskL4jAhjeOQwpu+X7+7+GtvS5PMVZsKPmZdtRS6W9vHvlhmZ1fd9yraWr2sg85G/ePt+WorYiMoijH3/dNvTbhGjRHjZgz/wC7urp9HvvtCoj7tkf93+H/AGa5K1X7OzI6NsWXdFuf7vy1saTqCRK0Pk7W2blb+81cPtoG3LLqejeF79FuETYqy/xL/s112n6xDC0sKfM7SqssK/w15Zp+teZCJp/mf7rf7X/Aq27HxVNH87yfe+Xdu/vVy1ImkfM9IsdYtplCJCvyv87Kn8X+1VptUe8j3wbd6vt+avPbfxE/l/uXjzH/ABN/FV6PxRNI2+bayt8u6P8AhaspS5ZGsY8wa1db/GJu+OLiMnIwOAv+FbGqavDbyPI8myRv9b5b1zF7fmbUX1CQbSHDHK9Me34Vma1r3nyDe8mz73ytX6dx0k8vylt/8uI/+kxPvuOVFZdla/6cr8olnV768upGuUDJEysrybK42+1BLiaazuUb/Y+T+L/ZqbWNQuWk8yGaZArL8slULjUt0MyJDh2bdK0f8S/7NfAUZQjH3j87qcvumVqnnXDHYkizLFtRWrA1KGZyIZuGV/4k+X/9qt268l2fyvMi2/Mvz7ty1k30HlzCHy+PlZGV90daxxUI6lyozkZkMP7tvOeNhsb7v3mrA+Kdul94D1O2S5j3rZSbdvytuVfvV1Mi21ux37cK+3bt+bdXI/FrfD4H1ObfvRrVlfdU4/FQ+qzX901wtGXtoHg3hW48PfFLwmnhjxDeLDqtqvyTTf8ALRa888b/AA18Q+A9X+z39tMsbS/Kyp8rL/erN1TUNT0PVPtmmloju/hrufDX7REOoWv9k/ELR4dRSRFHmMvzKq1+XrXQ+o5eXY4aa3huoTsjZnX5vmrEmX98UC17JZ+EPhL4quWn8PeLPsDyN89ncfdVf96sbxd8CfENrGbzR4Y71FbbutZVZv8AvmtI/wB0nm5jy9mdfk343Uxt6/6n/wAe/iq9rHhfXtJm/wBM0e4Tcm5PMgZdtZu1448v97/aWl9k3NGyvIVZftNTSSaPdMUkmUfLt3Vl2UUt1OsIT5m/iq7e+DtSgXekbNtGXap5CfdGXWh2zxt9juVP+yv8VU9NuJ9M1BJgWRlbGajltdS01t7pIn91qZLcTTcO+6qkUdB4kuPMaG/R93mJ87NXPTS+YvlpWnb3MWq2gsp22sv3G/2qy5oXgkaF+GWpiOMRtMZtxzTlbPBpPL96Cx1BbbzRSMu6tAFopGOBxSI3Y/hQA6iiil8IBSv940gOORQG3c0viJiFFFFP7ZQUUUUuUAre8GnCzv8A3krBre8NxhdPd06/79TIiXwFiffcXwRIWPzr8v8AFX9En/BAPwn4Y+Cf7Iov/FulNDp/9qW76prEdvuka4m+byW/3Vr8Iv2O/hpZ/GL9o/wn4A1JV+z32vQteNJ/q1hWRWk3f7O2v6y/hl+yB8Pf2ZPh5rvgrwzfR3XhXWpk1S3025iX/R7jyVX7392to0ZVIvllaR4eYV5RlGNvdPlX/gqZ/bOtw3PiL4Pa+JPh68du+sWEhCNHe+Yqoyq3zOpyD7YrkP8Agn1pXwg8P/DXU/iv4ssvtPiG18RtZacjjKRRG3jYOR/vFh+FbH/BSH9nTxPoHgu3+NngnXC2hy3kdr4m0uSTAgbpCyL/ABDeVGa8z/Yk1zTdS07xF8MdRvBDJfRi8sWmP7vzIwAy8c7ipxx6V+3ZTTlS8EcTF+9/tK/KmcMnCSemh9I+IPjRpWtal9m+2bYmlXbHap8rL/FWdrXiywWN7bTblrcN/Dt3btrfdavMPDem6rDfPDDZ/Nv2r5afL/vLXVX0KaHbpeX9z5T7t7rcPt2qtfjtSUojp8t4n4c/8FAdem8Uf8FG/ijql5eee0eqLb7l/h2xqu2uS0mNJtqP91V3f8Cpf2g9cTxR+2D8UfGEMqyJceLbpVkj+7tVtq7abp7fKsycNs+bbXHGPMe7P4IxJbxQzCH5T8/3v4ahVv8Als6fN91F+7t/2qmWZG/c/wB5vnZqbNJ8u9+F3feo+IiX7uPunrP7Pa2dvDfb3Xe0S/NJLt210/izxQ8Nq8Ns+F/5ayK/3q4b4Ptcm1uYbBJHdlXbHt3bm3fdrV8ZRppNxJ/bzrF5abvLavnM0lP255mIj+8uc9r2pDTPM8Q63c+c6xfLCv8Ad/h3V85fHH4g3HiHU/sK3LMN26RWfdt/2a9C+LPj2eLTptZu3URp+6t41/5af8Br5+vLuS+upLub78jbmrXL8NeXtJnpYGjGUeeR+jn7Pf8AyjcT/sS9Y/8AQrqvzfr9IP2e/wDlG4n/AGJesf8AoV1X5vM2OBX9AeK//IlyD/sFj/6TA1wnx1PUFXHJpaKK/GYncAbdzRSKu2lqgCiiigBd7etJRSK26l8QDtqfwUBDI3FJVzSdPm1KbyYev3qcfemTKXKV44HZtv8AFW/4B+Fnjz4neKLPwV8PfDGoazq99LttdP021aWWRv8AZVaTTdBm/taKw3LukdfmZa+gvGvwY+Ov7K37PfgT9prwhqv9jxfEfU9QsdE1DTbpo76NbXasrLt+ZVbd96qqcsYnP7SUp8sT518XeENb8G376Tr1s8U0MrRSq38MittZf95ay12eXX1r4f8AB8fif/gln8Q/iJ8U9VjRNF+IOl2fw886BWnvL2bzGvVWT7zKse1m+981fJLMitv/AIa54y5om0WMJR23gYFSLsZl2feqJR82PSpoWh3/ACfw1XoVLcuaRO8V9HdJw8Msbr/vLItf1J3WoXN54Z0G8vPmkbw9p7eWqfeVrWOv5bLLZK6Fj96WP/0Ja/p1vtUhs/B+g2fzPNJ4X03/AFiNtWNbWP8AirnxHwnLiPhMvxFcQq2yZ2i2/daP5fmrh/El0kau6Iyy/d3NW1rmsPwnkq7K25tvzba4/XNQdVDzzeYvzb2Zf71cfs+b3hwlKxz2uXiR3Dvc2yqV+VW3/drhfEl6itMls80fmbf3n3q6fxNdTQ/JPtRdn+sb5vvVxGuTeTcSI75+60W37qtXn4iPvHqYepynL6nMkkjujqX8/dtZPurVBpo1ZspIRuVpfk+WNf8AZq5r0z/8sU3/AD7tyfLuZqzWjS1i33O4rGu51WX7rf8As1c3+I9OnK8NC3Zq/R5tm35Ukkfc22tbT981vG8yNmZPn/uyL/vViWsxbYo24k+bzFXa3/Aq0LO4eP7/AJ0asnyLHtZW+atI046GntDa01bmDHk7V8tPvL/Cv92uc+JtwsfgbX7qTcQuj3LNnqcQtmtq1vn+ztDDuMi7l3N/FWF8Unx8P/ELhw+NFuuSMZ/ctX7d4FK+fZhL/qFqf+lQPoOGZ82Jr/8AXuX5o/NbXJPtV000MrP/ALX96qFv4Z/tKYOerPUWo6g8Ko8f+7Wn4R8XabZyD7Ym4b/utX55HY/NJc8jqvAfwRTWJUfyfkVvmZk27q9o8I/CHwlo8K3OpvH95UX59rL/ALVeZ2fxkttJhVLDy/LX7u371Ynir47alMsnk3O7cn9+n7b3fciZ+znKXMep/Fr4z+Ffhvo7Q6OYzeeQyoflr5X8dfFTxL4vvnmv7xvL3fKu6meItW1XxVqD3M0zPtqnH4ZEgDv93+7Sl70eZm1OMaf2ShDHJcSb/mLr92txVSxs0mZ+P/QWqCOwS0XZs+Zf4f4aiuPtl9hE+7911o5UHmZ+qatcalfbPm2b/lrpPD95HptqMuu7726s6z0NLdvOmT5vuqtalvZ2e1Ud8/7Lfdo5Qv71iyL68vmZPLZVb7/+1XQaPoNy1qjujKPvJuqv4T0+zvJm+zW3mOrLsVfurV3xdDDdXhhudVm+4qy28Lbdv+zVe4Tzc2xauJPD2jx77/VbdJo33eWr1k+KPihpVjp62Hhvc1xIn71mT5Vb/ZrmfGngV47FdV0rzGRV+dWfcy1y+n71Vkd2Wsyo/wAxozXW23d0fczfM7f7VYV1I8sxd6uX1xt+R0Zd1ZxJ3ke9BrGIU35F96dRV8qNAooopgLsb0oXofpQzbqFO00pbEcrHwru+d3qSNU8yo1Z4/4Ny06ESbt2zmlykyLNvHMz/J/3z/erotFj2r+8TKr/ALFY2mw7bhXTmuo09U0+3e6/hVPvURlymcpcxiePtQSaOKwWHb/Furmqt65qE2oalJNI+4L8qVUqjePuxNvwJatNrKOEyV+b5a1vEEgm1R5tmxl/h/u1V8DLHb29xdhPnVfk/wB6rVwu6NpnfcW+/wD3acY85lKXvD9Lvvs67H6feWvvv40xpcfsFtGM7W8I6X+X+j1+e+51jHzbP9pq/Qj4vlR+wYpbOP8AhEdKz/5L1+u+GC5cpzz/ALBpf+kzPsOFHfCY/wD69P8AKR8A3Fj8+90/75ptrN5Eyo/T/arY8nerSPu/4DVKSxQSB0+dl+8tfkEf5j4otW6pcSM8LqP7+6pBdTRtvd9rbqr2cscVxsdKuXEaXH93a38W37tMDb0HWiu37y/wtu/iq/faemqQsj/e+98v3a5O3kmt1Z4U3bfuLu+9XTaHfboxvTKf+zUS5ufUzMC40/7DdeSU+Xd8yr96tBmM1mYfJX5k/u/NWrqVjDeKzoiod21f71ZM0b2Hzypz/tUGhZ+H/iq58L+LrHWLP/j5sb+G5t/96Nlav1A/aW+GOifGz9o3RfiQibLTxp4FsdSguPtSqnneTtZW/wB1lr8mtUvIIboO6fumb52av0V+C3ijXvif+yL8LvGdheNLd+Edem0S4kmb/ljt3Rrt/u104KX+0rzOfFQ/dXPPrXS5pJrzwvMkcvlyyReZs+9tb7y11U3h2bwj4dtIfsawwtFu8mT+Jq2bzw/C3iK/s5ns0v5rprhFVWXcrfwrW58VNah8Xfs86Df23hVrKXwveTWWrXHm/wDH00jfKzf7q1eMp8tWRWHqc1L4jyDXtas7VPMm+VlTdtX+KuL1jWvLmdJI96f3m/vVc17VJo2m877zPtiXfu2r/erkLy+/0gb+f95qwp+8RXqSjHQtnUN27zivzfeq7Z3X74RhGVfu7q5xtQRZH3+W6N/eSr9jceWuwv8Ae/u1t/hOCUTqNPvkkjaEcf7X96vZ/wBm+4e5sdUkZifmhxk57PXhFnfJ8kPyv83zLXuH7MVx9ostXbYikPACEOez1+i+FH/Je4X0qf8ApuZ9XwGv+MooPyn/AOkSOO8fiCPx3qcpMrML+bA37UDbj1rNjm8uZUR9hZWXcv3dtWfH95s8f6y7xnZHfy7gejfMaz4rp45P3zq5V9yK3ytXx+ef8jrE/wDXyf8A6Uz5/NZR/tKt/jl+bL8mF+S1dnRUXdu/vVb3JH88cLNui2/N95azYblJI96Jlo9zL/DtqeO4uUtV3v8A7XzfeVf7teZynncvN8I3VFSONnR+Nu35qx7pkht22W2/5d26N/u7q0by4RbeVHRsfMzeZ/7LWPcXD+UgR4y33dzfxVz1OY9LBy/eFS8kEjbEtVRvupVWOP8A0jM0LP8ANtSprpoZJld/n2/LuX5d1VppN0nkvt+/uZV/iWuOUox90+gp7D4ZHmZ0R2yr7dtbfhqR1m8m5dVVfuKz/ern1aCaTYny7U3Ve0nUHtXV32ttf+5u+Wsai933Tfm5T2nwPdPHAr3k0bOrr93+Jf8Aar2/4f3jtDC8zq7r8/mL826vnHwVrSRn7Mkyq0f8Tfxbq9i8BeIEt5beF5mKSfN8rfKq1zy55aky+E9f028e6uPs1zbbh977QqVttbzsn+pZyvzRM3/s1cf4d1bTbjY8Ny25ZdiNv/zuro9J1KFWCPbbX81t0jfLurGXN7r5Ti900bLZuPnSf7/+zSRxyQzM6TZH3Io/4abcXDzQqtt8v3vmqhqWtvaw+S80bSbd237q1p7SO4HxTqV86M1sjrtZ/nZv4aRbd764M0zttjXdKy7W3NWGNSubhfkuVKs/3f4maug8O28ilLl4fuvufy28xVav1WOK90+Sp4X3bmro+j7mxsV12r8rV0mn+HY1VLl0j3Kv3aZo9kk8yuvPlp88kny7q6fTbHbGHe2Up/z0/wDHq5a2OlKOkjslhfhM5fCdtcbHjtmCR/N/wKluPBaLl7k7dr/LG33lZv7tdpo+nzTQiZ9zCRdyN93/AMdom0+2jLQgqo+VFb733fvbq4pZhKMr8xjLCRied3mhwxxpMSxM25Pm+9D/AL1Zl7pKNcD7HujVvusz7v4a7/UdLdpGtofMCbdysy/K26sTUtL+x5eG2xTWYe03kTHCyhscsrw29wiOiuF/vfeZqI7rb5jv5ip/spVu6s5rWSbY/wA80u5Gb+7WdeTW0kB8n7irtddv3qUsV7xp9T7l6x1p9Pxv3b/uvuf+GrNn4mS3kfZNuH91q5K4uUt7dbVNu2P+6jfL/u1A2tOsaRP8m75dypWksdzbGUML3O9bxck0buHXau35aY3i6CzYhNzfPtZt/wDFXANrnlRs+9l2p8jNWbJ4s2xrc75Nky7mVqr65zlLDyie76ffGbwob6J2Yi3kKljzkZ/wrh7zxJbTMZkmZjJ/ra2/Cupi4+Cp1MfNjTLlsDvt8zj9K8lsfEFzcQh5J4wzL91v4q/T/EGtGGUZQn1oR/8ASYH3nGtKTwOW8vSivyidsupQzM0z3Mm1V+b5vvLUc+o/MPnZ/MTbtV/u1y1jrDqu+2mZvM3Iysn/AKDWjb3KbldPLYb1X5flZm2/xV+XSxnLHlR8LHDXleZo7nkjTzvM/uszfN5dI2+RmTezbU+X5f4qhs2hk23KTf3kb5/lqyykSI/y7Y4tu5fu1nUxnKdEcPKRUuI4XbeifL8v3m3fNXAfHqNP+FT63M6b/LspG+VNteiX0fmRpBD8zMjfvF+7XFfGyz+0fCnXYU3FI9NkeVW/i2/3ayxGM9pQ5eY2o4WMalz4w1aPzrFfmZtyKyKy/wCzXLXEL2txvT+9Xa60v2PTYpvMbay/Lu/3a4bULjzpSf4t1fN+6elH4gh1SaF1dZm+Wul8P/EfX9PmR4dSk3L9xt1cgqFm21a0+3dm2PuA/vVJXLA9Qs/i54rmiENzqTTRbG/d3C+Zt/76ouPE2j6gz/2xoNjcq0XybYtv/oNcJ500asiPytWre6f5WTnd/d/hqoy5ZGUtjfk0nwNffvrbRJrfam793P8Adq7DcW0lm1rDuddu35k+asS1mdx9zarfM22tPS5nWffPM2F/urVxFKX8pDrnhW8uoYtiK0ez7rL95q4/VvBmq6fKdttJtX+7822vWtWW21zRdnzI+35GV9rLXneral4k8OzSWs037v8AvL/F/vVPwjjKfMcoVmt2+fhlp9xdeeNkyfMvetr/AISawuTs1LSo2/2l+9WdrTWAYNbJyy/w/wANHNE2+IoUUUVBY3y/elVdtLRVfCAUUUU47AFFFFMAooooAKKKKACiiilyoBVXd3re0yaa30kum3bWCpw3z/NXQWUciCNIduNq8GiUTGqfRf8AwTv0+bTfFmp+PEhX7THZ/ZbC4b70MjNuZv8Avla/qG+HHxph+OP7O/hX4j6HrCy22reFbeK4h8r5o5o41jk/8er+cfwL8Nb/APZ30/w34J1V9uoXmkQ6tfwtFtaH7Qu5V/7521+y/wDwQ/8AilD4/wDg34v+Dt5qqyXHhe6t9RtbVvm22s3+sZW/3v4a6MPLlmfPYqpKpLQ7j/go28nhv9ljU/D1/JiTUUspoiW4l2XcQOPzrwD9gXwFD4o0W41eTTVmNrrEqh40BlUmCIjGe3Wvd/8AgrzbXD/CXSbm2gZreCURNJt4UNIjD9RXhP7Hvxu8DfAH9nTxJ498XeLo9NlTX5FsbdU3T3cgtosJEP73Nft2EqRpeCmKl/1Er8qZzU4ylTsdvq3ijwB4CttS1jXtehhms7qRZVa4VfJ+b5VZa+H/ANrD9rzxh8bLq58N+Cb1rXQt7farj7slx8rf6v8AurXP/Gb4yXPxW8V3usanbSWNrfXX2r7HJ96Rt3ytI396vKvHnjaw0vRZ/I2ysyyNtVfm+VW+Zq/nrG4+cp+69DelGV+U+MfCljNeeINYuXDFW1eb+P73zV2Nu3kr5PzfL/DXG/DeN76zmmmfa81/I+5v9pmrs/JTcmE3Oq/Jz8q/71d9L4D2X1HzQvJDv2LiT5fmqNti/I8KuPu/7tTXB3Q/J/wLbVJo9u/zkZlb7u2r+ID1/wCAPijw34K8N+IfEPiGGOW4j+zrpas3zeZu3NtrjPiV4yvPF2sXPiHUrny0+Z3j3fKq1Q0CRDYsj7di/wB3/wBmrzD4+/Eh7iY+EtKmUL/y9NH/AOg14lajKti7HPHD+2q/3TjPiR40fxZq7JbP/osPyxD+9/tVzZOeTRSAY716tOMacOWJ60YxhHlifpD+z3/yjcT/ALEvWP8A0K6r83y23mv0g/Z7/wCUbif9iXrH/oV1X5vMu6v2nxY/5EuQf9gsf/SYHHhPjqeotFFFfjHMdwUUUnz+1UAtFFFLlQBQG3c0qfeFG1FUbPvUogNUYGK0PD1++n3n2mPrt21QrY8EeHb3xN4httD02NXuLqVYoFZto3M22nzcvvEVPehY1ZtcvdS1RLmaZs7l+996vtD4Z2HwK8dfC/4b6r+1pD48ufBPgV7hvsvh3UVb/RZJPMlhjWT7rSN/EtfNmp/BLTvh/wDFr/hWvjT4neH4bu3ljWe80+6+1wRyN823cv3v7rV6N+1Z8U/Fev8AhDSv2fPDXhHQ4LrS7VZLy68P3O77Rb/dVdv97+Ks69eMrQW7OWitbs9P/wCCnmp/szftAfBHRfj3+zx8SNB8F+EdEvF0nwH8DrOXzbqys/8AlpdXLI3/AB9SN+8Zm/2V3V8CS793FO1CyvNNvHsL+2aGaN9ssci7WVqazJxvpxjKJ2jWG2ShndpN6CkY5bipLdfmOf4aCPhNfwjp/wDaXiTTdNT5TdX9vF/31Iq1/S94nWG1tbXTXEzJDpFnEsf8Py28a1/Od+zb4dufE3x58EeHkh859Q8X6bEkK/e/4+F+7X9E3jq++x+ILy237mWVkRm+b5V+Va5cQ/hic1bocpdXiTXDQvbMnl/3m2rXO6o00bMifLu3N81buoNJI0r3KRqu3ayr8zVg603mLvfcSy/JJ91q5pe7EuPxnH+JLf7VbhHKoq7Wfd821q4bxEsy70tnb/b3fdr0XXFhXf5wjCRovm7vvf8AAq4HxEtz572yJ5nztv8A7rLXJU5pyO+jE4fVJHtWRJkZPM+dvkqnJdTHfJs5V/mX71XtYbdutoUkdNzfKvzf8B+asuFZtyzQo27ftlVvvVzyjM7oyLFvNNG3mujS+Z/D/dqzZ3ELRs7pJsWXbuX/ANBqpH5PlrcvbSRyebtTbT4Vh+a2Tdne0rx/3m/2amP940lHmiWlv3jOxJpHZX+Vf7u6ofHTG4+G2uG4JGdHuw5PGP3bg1VWSRZn2IzBmVW3fwr/AL1XdYhF78P9Qt5WGJdNuEYjnqrCv23wLnzZ5mH/AGC1P/SoH0fCitiq/wD17l+aPyq1PVHabf5e3/ZqlDdvDumR9n8NX/E2jvZ39zZzSNuhuGX5vl/iqlPapCvzphW+ZK/OYfAfnsiRdYmjX55mqpJrE03Dvn/ZaqcxfCp33fJTJB8+08/3v9mr5fcJujXtdchjVUfcP9qtSPxFprQ/fX73yf7Vcltdm2fdX+Gnx71XZ/7JU/ZGdO2oWDbd6LuX+JaZJqkLLsRFT+JdtYVv8o+/81Wo28ydXd2qpS7GZPNqTIu/fuNVbrULmTdJvbC/d21JJH5iu7v8u/atWLXTrZnH2x9gpf3So7EfhfxlrGg3QubUZVfvV1+m+PPD0twXm0eTfI/zNI9VdH0bw3Lb/JD+9X7/AM/3qmuNN0f7Tss4W/22kquX+UOZHq3hTQvCvjDw+88KNE3lMssbbf8AO6vD/iR4TfwbrUkKcozfumr174WtNpuj3Ezw7UX+H+9XL/GzS5Nf0ptZCZ8v7jLTl7xl76nc8Wu5nuJi70z+H/2aiT74/wB+koOvoFFFFBoAXbxRRSbflxQA5V+b79G35s7KSljUSHZQTzMlLbgP9mpI49zLzhqjVDJIz/3as2qozLv6fx0+Yk1tFtst52xSqt8tafiS8Sx0V0L/ADSf7dQ6XZou35N/8VZPjS+Sa6FjC/yx/eWp5kZKPNMw6WNcsOPlpKn022+1XaJ/tUcyOk63QbEW+i7OvmfNuWmSQ7/uJvq1YzIzLZu/lIvy/wD2VOmhfcyo+F/gb+9Sic0tinNbv9kVH2/M/wDFX318aW8n9gPKk8eENKA597evgib/AJ47/u/xV96/HBHf/gn+yIdpPhDSucdObev1/wAMFfKc8X/UNL/0mZ9lwpf6nj7/APPp/lI+HLeTzIQ/3m/36WSNPMTyf9ZWbYXzw/u3f5v462I7q2k2I8K/c3J8lfkX2D4rnKiw+XP52cLvq3C0O4/40k0fytsTKstNsVeORpN//jtKOw/8JLGu1fubd1WdLvvsrbPO2DfuqJ4U4m3sN1ElqjbRsyy/N81HxQFzWOjW4S6t/wBy6qV/iasrVofs8jfP5u75qsaPcRtGE+4zfeWjVoy3zp97ZVxIt0OQ8SMWtpfn+Zv4Wr7P/wCCYuvf8LC+CvxF+DP9pNHqENhHrehqv/PSH/Wbf+A18Ya03mQzpNDtb+CvWv8Agmd8crP4NftUeGdT151OnX1xJpuqRyPtT7PMu3c3+yrVVKUoz5hVqfNStE+spPGniG61jQbnxVC1tHDE2y4ktfmm/ut/u16Notnbap8MPG3gPVdS+2Pq1m15YKz+WsMy/N5i/wC18tfoqP2Qv2b/ANuf9lfQrDTbLT9M8V+H7C4t7DWLS32RfL93d/vfLXwNpnwr8efsu/GSz8JfE62jtP8AiZfZ7ea4bcskf3Wk+avpcdg41sNGvT/7eifP4bEVKdX2M9+h8R+JNWdbrfv3qybfm+XdXOXmpJCwR3+993/Zrv8A9srwqnwx/aQ8U+CbaRnt7e/+0WDbFXdbyfNG22vIpdUhLDfuY/3a+e+E9SN5Q5TVk1KGPdDhnVv4lqzY6gmVdN29v4d1c1JqTtJ+5dfvfKtS2usPDI7+c33/AO792lGRUoSXuo72HUodzlLZl/uRrXvf7JtytzZ646k/6y34PbiSvlmx14xxoiTN8zfw/er6S/YmuRdab4iffuPn22T+ElfpHhN/yXmF9Kn/AKbmfU8DQtxLRf8Ai/8ASJHNfEeVf+Fga3G0zZ/tCYnaM/LvPy1kx3dvcKEdPmbavmfxbqg+KGqiH4na+EkUeXq04I2/7ZrNXU0m3+d8oj+Z1/hr4vPdM7xX/Xyf/pTPnM0p/wDCnW/xy/NnSR3iKoRHjba6ttqU6g8cm/zt7K/3a5y3voVZE8xVWP7q7P8Ax6rJ1RGtzPMiqm35GZtrf7teZzTjE4o0/dNDUL7zl865mX5V+633q5261RBMYflG1t21VqtqGsfu8fKr/d+b+H/gVY9xrDtcb0mXc3y7qzlI6cHHlnqbDaluUoibE3fxfxUf2jD80r7drOqoqr826seK68yRZN+4/e+WnySbVaPzP9Z8ytv+7XJL4j3qfMbFxcJHGv8ApOwf3l/9BpY754pPk+Td/CrVmNdPbqmxFbbtV1+9uqbdMzbEkjwu5nbZUcvKbSkdv4V137O3lvcq4avTPBfipIbUQzXPz7PlWT723dXg+l6s8cImh+Tb83/Aq6jSfEyKqvc7V+X/AFi/N81RKP2omVSUeU+nfC/irav2bzsoyf6tvuq38NddpviosuLm5aQRtuSNW/h/ir5v0Px15P7uabarL8rbvmrstL8b/MiQzfd+dZG/iqJR7nJKXvHtf/CaPDZmCFFkT70C79rN/s1j6t4yd45X8tdjff8A4v4fu159N44eRhsust/e37VrD1b4geXE01zMsTN95fNaspU+aIuaETyPR9Q8ySPekar/ABx7vlWu18MyRxxkpHGi7trQx/Lu/wBpa800Wazjjaaab93v2/L95t1eheF2+zLE+/7vy7pP4v8Aer6ytiuX4TCnh4/Cdz4d+SFYRGqbk+633q67QZEwyOnlt92KPZXFaXHDPIJi6zbfvyb/AOH+7XTaXctDGk118qK25Pm3Nt/hrjlipS1NpUYna6PNMzf6TN/d2SbdtXZrVGzcwhdk27zV3/8Aj1Zmi3Ft9k3wTNM0fzOv8NaUdul5tSY4LJu8tfurXFUxQ40jP1SzdVR9m8wozbVfcrVgavbPJ9y2kVpNvy/7Ndmqp5aRu/735l8lkrH1izdfNtoZt25/3Uf/ANlXN9ejGRUcLzS0PPPE0aSs1zbbU/e7Ny/NXL6jNNtaBPlP97ZXc65ZwwK8fnKzb2b7u5d1crqFrB8s0Ls0rfK3y/K27/aqvr0u4SwvvfCcdq19DF/y7KzxptaT5l/4FXMzaw7SKn2lsruV2+6tdN4ktUjt2hd2UL8u1v4q8/1z/Rrp0+Zn+9tZ/lX5a6qOM9oc9ShKJZuvEU1uqO7/AL1W2/L/AOhVj6lrl3H+8eZm+f7tUZtcmt7j5EX7nz7qydb1qGWNnfqtdUcR/KYSo8p9V/Da43/s1rcMTj+x708+zS14fod9H5aO+7Z95Fkr2P4S3Ak/ZPjudxIOg3zZ/wCBTV4P4T1Kfy/J87Lt9xWT7tfrfiXU5cpyT/sHj/6TA+04shfB5d/16X5ROys5kjZHv3VU/vL/AA7q19Pb7Psh/wBY33l8z+7WDp80/wBoieaZXXZufalb1hdeS3nO/P8Ad2fxV+PyrcsviPkYx/mNjS/JUqn3nk+9D/s/3qstskZn2KVb7y/w/wCzVOxWa4Znfa0W3d8v8NXo2RYWm/5d1/u/e3f7tc0sZL7RtTpkcnkRxb0h2S/ebbu2r/s1x/xRhhuPhvroebLNpsibv7rN/s/3a7ia4eOzZ0hYfL/FXD/FRobL4Y69fpCskq2DN/d8tdy/NWf1qUjWVHlPjH4qX0Nnb29hCjfKvzf71cAzfx1t+PdZfV9ZeZHyKy9O0+a+uRFGjGrjEmPuxuFjb+fKBt71vW+nvDb8HczVs6D4Ff7LvkTa7VDr7Q6XCyIMndtq+XlM+aMjG/fecyO6k7/u1oaTapu+eFqw7jVEZjsRvlpIvE15byB0fil8Ivfkd5Z6I8n+p2hPvbv4v92pI9Njt1i33Knb8zs38NcpY/EK/DeTO+1W++y1c1rTLnV4vPsdaDI23YgpylzClHlOuF5YSL9j/tKEj/rrUV9pMOuWv2aYK8ez5JF+avO5vDWuxMXhDOF/iV6m0xPHdsv+hw3RVfm2/wANTHnKjGO5V8S6DcaLqLwn7u75aypGfOxq3Nb1nUrgbNVsSHVf+WifxVhzO8jbmpGsRaKRWzwaWr+Iob9/2xSMu2nKu2hl3VAC0UUVcZAFFIrZ4NLRHYApeVNN53b91LUAAbdzRSKu2lrQAooooAfHHukXf0avW/2VvA2m/EX46eFvBmq7fsE2qRy37M/3YY28xv8A0GvKLH95IN/G2vev2WfDepWV5P4wsJZEmX91ayMn3W/i2/8AAamXu+8ceKkoRPrL9rbWP+Em+M0/iq2mV7aZVit2V/uwrtVV/wCA7a+xf+Df74jWfhL9q7WvDd5qvlL4i8HzQMsn7xZGjbctfn7pdrc3V5/aXiR8xwp92RPvN/er3f8AYB/aAh/Z7+Plt8WraFbiHSbK4/0eR9qzNJHtVaxp1LS55HiOXOfqr/wU9+I3grW/2cZ9GbWoY9Qk1G1Sytj9+4dHBcD2VQx/Cvys+Lut61Dd2elWDEpHG00fmyfJFIx2Fwvc7Rg+wFbni74q/E34/fHKX4n/ABL8fxXGZ5hpPh6zi2W1jAUIAX+8395q4r47WllN4itZ7y8nVRp5QxxvgYLNz9a/YqWJ9t4C4yf/AFFJfhSHFezkctdWaPdP/aXiFX3Kqsqy/Kv+7WJ461DwloHgvVHSaNy1rM26NNzM3l1Pb6f4es1KfY1Xau5dz7ty1zPxq8QWui/DPVb+zhVCulzRbWX5fmXbX8+r95Vi0VR96rFHzp8M7d4/DsE3k8ybm3N/vV1Mm+HOxPl+9838Vc/4H/0HwzZwpD/rIl2bWraWZ5Y/33yn+61fUwjoerU+ImjjS3jaZJmK7F2rVSS48kF5Plb723+GoLrXLaPMP2nZtfbtasa+8RIvyQ7Sn96l9rQXLGRqeIviU/g3wfdJbJH9ouG/dTfxL/u14TeXc1/dSXly7O8jbnZq6D4iatLfXsMH2reiqx2BuFaubqY04xlKR2UafLEKKKKo0P0g/Z7/AOUbif8AYl6x/wChXVfm/X6Qfs9/8o3E/wCxL1j/ANCuq/N+v2XxY/5EuQf9gsf/AEmBw4T46nqFFFFfix3CMMjiloorQBFXbS0UFd3FT9oAooZfmye1FPlQCt+7au1+BWr6bo/jb7Zf7d/2C4W1Zv8AlnN5fytXE0b3Vg6Nyv8AdqZRJ5UbUWnzLdPc3k2597M0m/7zf3quW8F42rrq763J5i7dszP8/wD31WENVuQhTPBqM31y38bCr9zlMOSrzXub3xDvbLVtcTUbYbpZrZWum37t0n96sBd67t9G4NIzvTJF+bmp5TeO4L8nCPU0bKmE6bv71RbTu+SrEMM0mfumpJaufS3/AASn8K/8Jh+338LdKmto5IYfEa3T+Z/0xjaT/wBlr9w/Fd1CLyWZ33edcMySL/eavyN/4IS+CpNY/besfEkyRvD4b8L6hes0n/LNmj8uP/gXzV+sd9Ntt/kfeJPvKz/xf71ediJe8Yv3nymRdN+7d/m+V/7n3qyNSie4ZY3s22sv975lrS1BrZbqSGO8Zwvzbf8A2VarXiw3UaTfbP3f/LKNflaSubm6s6oR945XVrVxG6PbZf8A5a7n+Vv96uS1LSUhYiZ23yP8vlv81d/qcJaRke2XLbvNb+Jq5i8sZppJSzyJMqfJtiX5f9ms5e9E6qcTzTVNHcRo6Iqtt2urL92sSTT3jZ7Peqn7zbl/75r0DUrFBueZY5TMrK23+GsO6tLZQ1s88hRtvzNV/YOnlOVW3uo4/J2fMsv72T+FarFZlZJ98ed27zFb5q3NUtUWHfbQt838O/5WrNuP3yt9p3IJNv3fm2tXLKjKUtS170feM2SZJVdEdkZpd25futt/hrYMYk8HTwncN1nKPlPPRunvWTJHtaVBMuyN9y/P/wCPVtGQjwxNLvPFtIQynkAA4r9p8DoyWf5jf/oFqf8ApUD6fhWFsTWl/wBO5fmj8z/jp4bfwv8AEi/heFkt7iff+8ri9auNtqnloo2/w19V/tUfClPFnhd9dsH3zwt8y+V8zf8AAq+SNehubeQWdyjK8bbZa/MsNW54nwWIozo1PeKD75GX5/4ql8t2XYNtQK22T/Z/u1Pbx/effk/3a7Obmkc/2B8caeZ845+7UbMiyN89SeZ5cZb+Gq/yeYetMB5zHJvdKuWsbzN8ifMv391Vlj/eM7u2Nn/j1aFnC8O19jb/ALz7amMeYiUSXb9nVt6bvk3VWvbx5JlRH+T/AGaTUL6ZZGh34Zvvr/dqpG0zNwn+/RER0Ol3lyq70euo8P6XNfTRo8n3tv8AHXJaLE8zhN+0f7Ner/DnR4fJ8z7Kqbfli3J/49T5eUcubkN1NN8nSbews9rH/lrtqa+8DzSaTNvhVk8r/d+b/ZrrvB/hWHb9qmddy/N8qrtatPxBbJcM9sj7Pl/u/LWspGEZfzHxF4r0z+yNeutN+b93K33qz67j4/aGmj+PJtnzCRPmb/arh6zO+n8IUUitupaUdiwpfmWkpFbIpgKW280/bvxsprIFApwY5L0uZAEP3q09LhSZvnXH8Py1ST7wrc0O3TztjvtpmMjXhaGztn+fbtT564nUbp7y8eY/xPXS+Lrp7XTvJR23M38X92uTUEDBoCnHqxa1vDlm6yNN/Eq7k3VmW6+dNsrqtN0/ybXydm5/vfNVfEVU5ugkbfZ2+fcf4q1Ix9ot/kRhu+b5qz5V27UgjbG/73/stW7G8Ty9nzGp5eU5/iJLq12oHTlv92vu343K3/DAzIDtP/CJaUPpzb18NSRo0P3GX/gVfc/xtLL+wW23kjwlpf8AO3r9d8L/APkTZ5/2DS/9JmfacKf7nj/+vT/KR8BTxvCyom1j/eqxYXE02Wf5T92msJvM/efKrUscZjbfvr8kifHmowk3B4fvfdTdQJplk2b1H+z/AA1Bp9w6N5fysy/xM1W1jSTLvH83+zR/hM/cFt7hJJFhcM396rEcyXEOzO35dvy/e21Vh+WPdv2r/eqZW24dEZ1/j/hpa/ET8USW2vvst4IUT/crZuv32ns/3dq/erHWRJFXCfd/iq3HdTSW/k/xL99f71WP4fhOX1uHbcOkLt937zVzeg6hJo+vLdrMyvG+4bf7y/MtdR4i3qx/2v8Ax2uEupdt57K3zNWZpT94/pW/4Ik/tBQ+NP2c4YfOZ7q4sFVoZvlVZNu2voT9rD9mbw9+1d8HY9N8SaD9j1zT1ZdD1KHbvaRfuqzV+Q//AAQp+P1/pug6n4S1LW/Ljjuo2TddfMq7fl2x1+3n7Hvjf/hZel6n4Ytk+3JZ3SvdNJ96Hcvy/wDAa9rDY2q7QlL3Tx62Ejyua+I/Bb/gsB4BuvCPi7wN42udNjtru60OTSNZXdul+0W7fK0n+8tfFz6om77mx2r9qP8Ag5A+Adgfg/qHi3QdKkF7perR39vNDb7vLj+7Krf/ABVfh7PeeZJvd2wq/IzferjxFOVKVgwdT2t2/iNBtQT75TD0+TVv3Y3sy/32WsRrzaF3v8rURX/y7C6tXLKXKd/L7x0On+IBGyfI2zd8rV9Wf8E+9QW/0rxQMcx3FoC2c5+WWvi+O8eP597I33v9lq+uv+CaV19q0fxa2zG2eyGfX5Zq/SPCX/kvcL6VP/Tcz6rgqMVxFRt/e/8ASZHB/GTxA9n8ZPEsSsny6xcDkZ/5aGs/T9e8+HZcj5W+ba33v92sL496zJbfHPxWqP8AKPENyG+X0kNYsfiiaGH/AFO4/wC196vjc+/5HeKS/wCfk/8A0pnzWZ6ZlW/xS/NnokesI1u8aPiKOXcu2ql54utoY2mdFdV+b71cTL4gu5G2P9xvufPUcEm6b53Vf71eVLl5jijGXLdHR6h4oe8uNiPsVvurVdbpGl3u+1ldfl+9WRHMkb70RmLfK9SrJD5g2fK38TNUy5eh10ZRibsNxuZHtn5bd8uz5asRzKJFV3z8rfwVjreTf67+997b/dq3b3CQwvshbd/Buf8Ahrm5UejTNSNvP3Ike7b9z5qk855EWabav9xVqlDcboymz/f/ANqprfybnL/ad+5v87aXLzQOnlRZWbyY49n3G+/u/vVLDrU1vJFcw7flfbtVqrx3Dx24heaNdy/Nt+am/Isfr/srRTiYVNzsNL8VJJCH3qfm+9u+9WzY+NJYZEdvM2TfxM/3a82t5prfaJLZm+b5FVPmWtG3vH8tf3Db2Rv4/wDx6rjRjKJ5lao6cj0abxw8cH7l/mVNr/Nu+WsHVvHE11sthcsWX+L+7XL/ANoXkm5C/wAkabfv1QvLp9y/PsX5tu2q+rmPtub4jc8N3/nN8k22Vfm87dtr0Lw3q20JeWyKjN9/a+7c396vFdAvkvJV3/IzP95a9G8LXkcMm9LmTcqbUVfutUylLlPa5Y/ZPXdNvoJLdJkmxNvb7vzfKv8Ae/u102g3iKySJu/haVY/m215roerPCyPbTN5jbV27vvf3q7DQdY09Llkjm/e71Xyf4vmrmlKrGI+U9N8P3kMkiO53fLu2r/FW7p8kNuqTI7B/NZ9sP8AF/vVxnhfVEVQ7uqNHuZP727+7XU2sk21XdN25lZNvy/99V52IxH2Tqp04yjHlNS8/eIHmST94+7csVZetNc3Mb7HjhibcvmSf7P92r81/wDZ4mmhdn2/M+77tZOoQwXG25SGQ/P8kO/7teRPERp/ZPQo4X4XGJzmrW6CZC80kT/di3fdkrnNW0n7K32a5Tzl3s8Xz7lX/arttSW2uI186Ft0jbVWN/u1gaxYpbwCFEZF+98vzK1Y/XPaRtc7f7N5o8x5Z4s0/wA6N/k37dzN5ny/NXmXiC1ma8dPO/g+evZPEmiu0ZkmTbL/ABqv3a848QaDMscySIy7U2v8v8NevhMRyzj7x5OKwMtzzbXN8DbHTcy/NuWuZ1i+/ct+/bGz7qpXba9pqLan7yuvyu1cL4is33M6fJ5nzV7lGp7/ACnhVKfLLQ+t/g5M7fsbxzE8jw5qP6NPXzn4LuHZk3zMDv8AkZv7tfRPwZVl/YwjWTk/8I3qOf8AvqevnDwbavJIkL7sL/47X7T4mK+UZH/2DR/9JgfV8XK+Cy//AK9L8onovh+6haYQu8jJ/A23+Kut0lkW1/fIu/c3mqz7vl/3a5bw3ZpJMqIq4rr9NjtoZFTDSmZdrMv+zX4zWj7x8jGVjSs1+yx70Thv4d33alWN2kZ/J+6/mI3+zRDawtc/Pcq7xp86r/6DWlY/vI1mdP8AVozbv4v92vPqc0TriVdszW8n7najPuikkfd97+GvKv2nNQfR/gz4lmSZUT7EqIv+823bXr+pSQTWCbHkQt8yx7N1fP8A+29qkP8AwrtvB+murI1wtxesq/N5m75Vb/0KscLGftb/AGS6ko8p8c2ts+oXXzx16b8O/hskdv8A2lfosQX7nmL96rPwp+Ef9pSf2xqSYgjfd/vVsfFr4haP4djOiaJcruhWvaj7vvHDLmloYPjDxRZ6DZvDCMbfl+X+KvLtV1i51O586SZsfw0/Wtdn1q486Zv+A1TjjeWTatH95m0Y8sRA27mlVNxwtaGneG7i8RrmUNFDH/rZGX7tLcLa225LBPN/2mo98Ob+UzmVwv3KvaNr2q6PKv2a5YJv3eX/AAtTFjUE/aZsf7NT2MkNq29LbNKQuc2JPiB4tkj/ANG2xjbt+WKoY/EHjCST7S+q3CfwsqtTH1CTyRDsX5vm2rUtja3OoTrDIjEyfcpxjzGXNyxOh8F3H/CQLcaf4ks47kMnySMvzf8AfVc94w8DxafC2q6RIrx5+eFfvR11ENna+HdP+zWzs1zIn72T+6v92orfTWmt/wDSXWGGT77NTFGU+c8yTr+FOZc8irWtWq6fqk1sj5Ct8jVU8z2rM6h1FFFXyoAoLbeaRjgcUn3/AGxS+0A6iiijlAKKRjgcUK26iIC0UUVQBS/wfjQGK06P5uBQBs+DdHu9Z1mPT7OzeWWZ1ijRU3FpG+VV2/71ftV4F/4J3+BvhT8B/BeiXnj+1stStdDt5/EelzWSvI11N80nzfe3Krba+Lf+CBP7H9h+1f8At6eGPDviSzkm0Tw3FJ4l1lVi3L5dr80as38O6TbX7GftOfA/wR8bNcuLl7mTRdRsb/fLdWfyrdL/AA7l/wBmt6NOXJzI+czKvzVeQ+Nvjh+zL8KNDtYJ9H8yRI0/dfKqrIrfxNXlX/CpdE0+3lhh1JoYvvrDGi/99LXrfxw+E/jbQfEt14bm8QzSwqi/Zdz/ACsq/wAVeUa94V8VaOxSa83n/lky/wDs1eXWlVcruJy0Y04x0K3gjRLPTPH9qYZixEUm3Emc/Iai+OFkLjxNav5W4/YAuP8AgbUvw90i/tPG1pPezksEk3Koyv3D3qf4z6bNf69a7GYKtqudrYx87c1+uYRSl4AY5f8AUWvypGp57Do/zJN5zSBVrzn9qu6Sz+D2ozIP9c8cDbf4dzV7JZ6LNHNvdFG35XZl+aSvFv27o4dL+F+n2aO2+81mON1ZPuqvzV+EYelz4qJthOapVPGbXUP7NsYFs03pDEqv/vbap614m2/6h8Lt+bc9Y9xqD/ZfOSZv9nbWbJcvNIru+6vqua8D1Ix5ZNl3UteeZV/iP/oVYeoaxcySMiOyt/6DT7q4eBf9YrLWbeXCMv8As1jKXc0jsZeoSGS6+/uqKiT/AFx+lFaHTH4QooorMZ+kH7PP/KN1P+xL1j/0K6r836/SD9nn/lG6n/Yl6x/6FdV+b9ftfit/yJcg/wCwWP8A6TA4cJ8dT1Ciiivxg7gpVXc2z86bt+bNDLupR2AWkZc8GlpCd33KYC0UcAUUAHBFFFIq7aAFoop0XQ/Sp5gE2utJRS7RxUk8yBPvCrunrukGU3D+6tVkV+Plx81amhWvmXsSf8CoJP02/wCCAPgN4bP4pfFqaHZtgsdGtZlT+83mSL/3ztr781K4hW1eF32bX3bVr58/4JBeAZPh7+wPot/qttHFceMtcvNWuFZPm8lW8uJm/wCArXvF9N8zIm11/ut8qqv+9Xj1qjlVlEr2P2ipIsDEW0a/NGjN/ebb/tVTmZ/mTyYWeFG8pmX5lp02oom+FIdv8PzN/C1V7q6ka4MPk8bf9Yr/AC1P2TeMfeKGpW/nbP3zfu/71ZWoWaTKfveZ97zP4q244/OuFR4coqbnbf8A53VBdW8zQyTTfw/xRtu21nP2h20eXVo4rWtNn3O8KNv3/LHt+XbWPcaGvkul5tz8uyNlruJrNbpnRz/B91U+9/wKsjU9M3Wr/I27ay7Vf722r5eZeZrLb3jgtc8P+Swv97Db9yNfmrIvrfbGJkhkTsrL8u1a7W6t91v9pghXzVT51k+VlrCurSGS18lHVn3/AN37y0f3pGManL7pyTabbWcjbIWU/eVmrSEAtfDckLsJMWzkkchsgn+tTXlnbW1x/pKK0km75mXau2mFEXQXRWJX7O205xwQcc/Sv1/wPSjxBmLX/QJU/wDSqZ9fwo08TW/wP80eP6to6NbvZzRM6zf61f4d3+zXxX+1F4Rs/C/i50sLZk86VvN3fdr7zks5lmltvOVlbc27+Kvl/wDbS8FfbLdNehhwGdvu/wCzX43ga0frNuY+azCj7TC866HyzjZIJuOPm+arVuyM3nPJz/s1DJsjkZHTd/vUscaRtw9fRR2PnV8I+4bco2f+PU6GDaocbWbZUW794EdN1aAhRV3pDllT5FqeXsIYsfl7UT5lqVrhId3ko3/fVKtv5m5If++qRbcx/wCyNnzL/FQKUftFSRTN87jeW/vU+GF1+TZ/F/DVq3hTaqI/8e75kqaOHMjb0Xb/AHquOxMvM0fDlvuugiOzru/hr13wbeJp6hHRSsfzfNXmXhGFGukT5VP8FepaT4fudQjRzD5QVPvf89KvlhIUvdgeg+F/HWmnT3TyVRvuouz5q1F1ZNSVPs1rv2/Ku6vO9N8O38d8qGaT+Jm3V1+m3Ft4f0nfdXKs+z5VZvm3Ue7E55e8eKftdeG3VrfW0h27fldv71eGV9G/Hi4m8QfD+91KbbvjZW2/7NfOVKR3UZe4IwyOKWm5+7Tl+/8AlSNgpf4PxpvCr9KWgApyLtGKbUi/eCPSlsTLcsQp5kmE2/7y103h+12tvf5f96sDS4ftEjJs2LXSTSf2fpst191fK21BlLnOe8XXz3WqPDvXEPy/LWTSySeZKZH/AIvmojj81tnrWhtH3YmhoVr5lwH+b6V0sDeZu+fB/iVazdPh+zWq7E5b+Kp4ZHT92nCt9xqXwnPKRbbZIvkdNvzbaqLMkbHzH+b7qLV+P97HvR+dv/fVU5oUti00yKxZ/k3fw0c32SY/CXrO4eTYPOwy/Ltr72+M4P8AwwiwVhx4T0zBb629fnzHqLxyZfa3+zX6B/GNz/wwX5mCT/wiGlnp3/0ev1/ww/5E+ef9g0v/AEmZ9pwp/uWPf/Tp/lI+EJIfOc7/AOL+Jvu1UVvvI7tlfu1o3UHmKiPDtDL/AOPVWkh8uTzkRTt/u1+QR2Pih1oqxMUT5T/FWlZzWytsebcW/wDHay2aGQbNnzN/d+9uqeONI5hv+/IlEtgNONi0eyFMj+Kk8yHy9/RFqrHJ+8+TduX5amEM3yJDyjfeanHmDkLFvIi/xq39yjy9w853Zf8AeqBYHjff53Ozd/u1ZjuIW3v5e5W+/T5gMDXm8yR1PCx/+PVw998t0/b5q7XXJjJcSw7GVV/ib+H/AGa43U+bpuMfWpl8WhrTjyn0x/wTC+Kt18PPj3ZNE0ZS6XY/mf3v4a/oJ/4JLfFm/b9o3WvDd/Nutdc0tVXd8q+Yv8VfzHfAHxs/gD4k6V4kVGcWl/DK6r/d3Lur+h79gnXtN1CbRPjHoOpXUdvZ3Czu0e3/AFLL/F/7LXRTqRjGSZ5GZ1pYaqp/ZPq7/grP8B4fjJ8C9Z0q003znuLCSB2V/lk3Lt2tX8p/j/wnqvw98bax4E15GS70fUZrO4Xbt+ZW/wDQa/sNuNc0r4nfD3UtGmmW5+0WEn2K8ki/ds235Wr+U3/go98MfE/w5/aw8VXPiRGE2rapNdbvK2fxba9CvaphYzj0OTBuEMT/AIjwncF++/y/w/xULM5ZUd9oaoVkeRt4jwF+5T1VJG2F2+X+9Xl8x7XwlppH3Nv+5/dr65/4Jhn/AIknjBf7tzZD/wAdmr5B3Oy/O+1v9mvr/wD4JjBho3jAnvc2WP8Avmav0fwj/wCS8wvpU/8ATcz6jgxW4jov/F/6RI8K/aFkd/j14vTZwviG6+b/ALaGuYhuk2tJNwyptSun/aGL/wDC9vGEQ/j8R3Q/8iGuVt43ZdkKfKtfHZ7/AMjvFf8AXyf/AKUz57M/ezCt/il+bL0M27B/vf3vvVI1xMuVT7jffaoFd1Vn8lS1I0yKoR32mT5f+BV5RxRiXvMdlR0+UfxrvqzDN8xm8lXl+XYtVF+aRX8nPy/dWrkJ8tvJh2nd81TI2pxhGZcw6/6SkLNtT/V76uRs7Qr5gZd3/fVVFRNu9H2t/tVcFvuX/WY3J96uf3YnoU48pat5oY1XyNyfI2/d825qtQpCu1IUw7fw7arWscPkjhm/3atWak4KTNt+7tb+GseaZ2x5uXmRP9nf7OcP935fmpV2Qr8n3vvbaSDf86J/vf71OY3PmB4YW3Knz/PXQc9YVmk8zzng+Rvlfa9TW8iRzK/2n5I0bcuymNC0IfznwG+43+zUkEky7Rv37fmfd92toxPExHx8shsk1s1uZvmXd/EqVQuoyJF2eYq7/uird1HuZdm1U/2fl+b+KqepSTbdnnb/APgH3avl5Tm5vsnPafevJMiQ8FW/1jV23h/XE8lkd9jL825f4a8u02abzF+dv95a6CzvgzB9+4158ZfzH0R7N4b8UeXHFvuV+Zf4fvf71dx4X1u2WcOkqpuX591eB+H/ABQlqu+YqpVPkZa6jRfHASZvMm3fPudW/iWolzSjoVHl+0fSXh3xNDJHGiPHsVVbds+81ddpWseZBKiJHs/56fxLXzjofxAbzBNNMzpu3RQ/3WrqtK+IlzJMjw/Id/zs38S14uKjPm5kexg4xPaZPED2zLDpupRsrNtljb5mZf71Nm1aGa9DpDDlbfDbXb7v/wAVXndr4yeScv5yn+Hcv3m/2q0rPWnvo1fZsVW+fd8vzfw14lapy+9I+nwuHv8ACdW115qx3LvsK2/zq38P+9Ve4jcK+z96qt86slVLNk8s2yQswk+/N/eqz50z/Om5VV/lXf8AeWuD20paQPTjh6X2jntctdyp+6+Vvlf5vu/7VcLrGl3MzSpbOp8ttu7/AOKr0XULX7RE0bwsgkfcn+9XMalY2saumxlddyyqsXzbq9PBS9vM8TMKMactjx3xho/l7vJ2qfNbzd33a898Sab5TPEX3BX+Rtle8a5ocLK7ujOm3b8y/wDj1ee+KPDbrumRFLLuVGb+7/dr6/C+9HU+IxUZc/wntHwstXh/ZB+yuo3Dw7qIIH1nr52+H9rDJfKl4/l/JsXd/u19M/Dm3Mf7MYt2AH/EkvgQT0yZa8C8G6XMs2/tvXYtfuniW7ZNkn/YNH/0mB7nGLf1LL7f8+l+UTsvDum+T5bP5Y2/Krf3q7Xw7Z2zWZSNJDufbukT7y1j+H4XaSOGEfO33a7LQ9NeO6Z3RSZF3f7tfjMj4uPu6Faz0+ETNsfcJN2+rEMKRwzQ2z+bM21UZn2rtrSuLN2meF4V2fd3L92orC1ht9014uyH7zSMm7btrjq04y0Z2U5cseYk8RWb+E/Ccvjm5RWWH91b7l+9J/u18q/Ha8m1XQ7m/vE+0brhXlbb8zfN/FX0N+1Jqzt/Ynh2N5vsn2fz/LVtqyfL8rV88/FCF5PBt8lh9/yv3Sr8zVth6cIxM/bSqS0+E8w8TfEKbQ/DZtdKfZuXbtjryG8tda1u9+1TCSRpG/irqm17TftEUOp8ruXzVb/0GvY/hd4+/Zp0eEP4q8K3F5L5W1FjZV2/7taR5VK8hy9rH4D5/wBN+HPiHULhE+xvhm27tlddL8ONF+H1n9v8eSeTNs/0ezX5pJG/vN/dr174hftIfD3SbO4sPg98PbW2m8rbb3l187r/ALq/3q+bvFF14h8QatLqus3M000j7mkmatfaR2gKn7WWtQl1zxN/bV4ttDttrb+COH7tQMqbWS2/76rJWN9x+RhRHJcq2yN2qNfiNuX+UvJZlW+d1ct/eqwtvDCux32t96qEM0zN8/y7fldqvW7PMy/e2/3m/iqjOXulu3td0Y8zrXcfDXQ4Lq4abZ86xMyLt/irkbE2y/O78t/DXf8Age6/s2MO/wAiM3zs1KK5SeX2m4y88P21jm81KZlTzd27dXDazrU2u6t9gs5pPJjf5F/hWuh+JniIaxeyab4c3GST7+1/lVawrfwvqXhvw9N4hubZi4X5G/u0x/DI5vxQ0J1hvI/hVQ/+9WbvX1p80jyuzzNlmfczVEy7aDoQ+iims2G49KBjqKKKXxAIq45NLRRR8QBRRRRyoBFXbS0qru5TpQy7TimAY249f4qlt4/3io/FRLwu+rOnIJJgnks5Zv4aXMiJS5UftF/waW6JeeHfi18RfGGzamqeDZrJmZFZfLh2yfe/vbmr76+OUk2jeLnvHdvKml2QMqbVVq+PP+Dea0s/g58OvGOoaq7Qy2/h+3tftEa/euriTzGj/wC/arX1d8aviJ4eutLXUrq8hfyWZoo2dV3V2U6sPZHyWKcqlY+WP2uPFmm2vjCz+23W77VB+9jh/wBn+KvFtQ8ZWF9uhO1Sqfwv/D/u1e/aY+JWleNfHxtobCREt4ttvMqbl+Zvm2tXlmqeIobXc8L/ACqrbG2fM1eLWxHNPlN4e6jsvCd79p8XQBJlK5l4Axn5TT/idOsWt2wKZLW4CnH+0a5X4TXsl14+thKQCEkxj+IbGrc+MG9vEVoI5FTZaZZm92YD9RX69gqkl9H/ABjf/QWvypBF8xl2947TN8/mlX3bm/8AQa+bf+CgGqRyr4Y0dbmRna8mnlhZ9y/d+Vq9xm1pLW1W5mbfKvyytH/F/tV8sftha1c6t8QtKtJm+W3tZGi+fd8rN96vxHCyhLERR2YOP708xuptyhPu1VuLpFxAn3vvfLRcSOqtvfj+7uqtdS+XGHSGvbl7x6luWVitqF5uXf2rPvJtu3f92rV1Nt++/wArf3apNvaPe6bl2VXulRKv/LWik2/Nmlo+E3CiiijlA/SD9nn/AJRup/2Jesf+hXVfm/X6Qfs8/wDKN1P+xL1j/wBCuq/N1G7H8K/afFZXyXIP+wWP/pMDhwnx1PUdRQw3daK/FDuCimqMrinFd3FABRRRV8qAKKC23mkVdtMBeCKXadu6hW29qAGOMrml8JMgY7jSUm75sUtMoKXcdu2hhtXFC4b5O9Zkx3JLdXDL/tVv+HLCbUb6Kws+biaVYItv8TM23/2asGNEUAua+hP+Ccvwgs/jP+1p4F8IahCz2f8Abcd7qP7rcvk2/wC8b/0FaitUjTpyl2FGMqlWMUftB8LPDKfCn4K+CfhpDbLEmg+FLOzlb7vzeXub5f8AearGrao8m5ESMxfeXa235aZ4s1ya61y5nv8Ac4kut0S71/1bfd21zepXG75Em3H+H+9/wKvlI1faT5n1PWq0fZqxY+1TSTK7vG6r8rL/AHv7tC3xe4TyZmX+F12fK1ZbTJdQt5txk72+ValtZPKuFR9vyr8kn96vQjLml7xzxp8psxs8lps2YfZtiqOZfs6ukn+ysvz/ACtUUN00y/67c6/xL/DUUc1sqrv5dnbey/MrUc0fiOjl/lHNC/kult+7WT5m/vKtZVxax3VvK/3fkbYzVof6M2XgTcV+XduqrfQwrvnTb9751/u1VPYVSXKc1eWexm3zbmZ1bayf6vbXPajpqPm2T91tl3eZ/tV1OrfKuxAzs33Fb5WZf9muY1SN4VZz5mJG3RNu3L/wKum3LHU4+b3vdMO+LtL0YozbU8z/ANlqs8Z/s+SJ4wuY2BUdB14q/fX1tJI1tNcqrRxfJ8+2qJIFi5WTOEb5j+NfrvgnCEc7zCy1+q1P/SoH23CElLE1tf8Al2/zRxDQ7bib7NZqh835/wDppXln7Tngv/hIvBNzctCryRqz/wC6teyzKtxdBP4Vi3blTduaub8SaDYa1o89teJJ/pETJLGybvL+WvwGMpRqxcTzJU41sLyH5ma1pP8AZuoTWbx7vLb726qGHVjv+Xb/ALFegfHTwj/wi/iq5h+7ulb5dteeyN+89q+zpfvIRZ8hKPJPlZF5iLGe/wA33qt2uqBCUf5l2VR2hW+T71OVZNy7Dz96nylG5b3yNj58f3Vp7SR+cz7Mlqxo45t3lp/vbqtWt1tk2TOqlv4qqJlyovbZuyZ/2qmWR2Vdgz/C6tUEVx91P7z1PD/pDeWtUHuG14ZvktbpXdMbf4a9k8H+NIVhS2ePLRpuVVrwuzjeGZHM25v9l66bRNUurVvMfdj/AGmpR934jOpHm+E9d1bxheXV150MKqq/NtX+9WPdapqOrXBmmdid21I/7tc3H4301secW3t/Ev3Vrb0L4g+G4VSaZFkdX+dvu0+axPw/ZJPiJpd5cfDe9sPIYLJB95k+avmaWNo5WRhyvDV9jTeLPD3izwq+lWd1Gr7Gby2/vV8n+ONGl0XxNd2Uibdtw22g2oe7LlMeiiig6QopGOBxS0AKn3hUsKuxb/dqJRuNTwr90VmRLc1dBgeRlTZ/uVP4vvHhtEsy/Lffq14ZjRlVJk27v4mrn/El4L3VJJE4VflWq90iPvFGtDQ7dJJWebd/sbapRQPM42c1p6aqJMLZ+mfvVRVSX2TV/wBav31X/wBmqu37xt/k/df5KvrDHJG1Zl1vt5tnzfN/DS5oxiY+zNfS5kklG9MBvlqe8t/MjCPtcqn8P8NZek3yeds+6W/vVrwsk0Z+fYd3zNRHYqUTFuoXjuVdPut99a/QT4yyFP2AVkB5/wCEP0nn/wABq+D7q1RszI27/ar7t+O+6L/gn3J6r4Q0r+dtX694YO+U55/2DS/9JmfYcJv/AGPHr/p0/wAmfDdrfPIoSZP/ALKpWjSTbcom3a27bWHZ322b+Iqvzbm+7W1FeQSRkQzZLLX5DzcsT4zl5ZleGN4WPnJ96rS7WjDpub/e/hpVt3ZmTeq7v/HabHHPCzb/AJt3yoq/w0fEHLyj44ZIPn61ZhWaSPbs2/J8lQpHcsE2dP7tTRs63P32/wCBU5e6RERm8tvJ27mZPmajzNsbDG35PmZadNHM0iu83H8LMtRXEyeS6Tvhv71KS5g+EwdY2bpX35Lf3a5S7YSNsT/x6um1aZ41O9Np2ferl7hjIx3n+Kj/AAm1Pcn0a4+yX6TbsfNX7L/8Ek/2jE1r4A/8Il/as00iq1vdNH8rfL8y1+Lu5lO9etfaf/BJH40P4Y+KqeD7yRtmobVijVvvSf8A7Nc9enOpSlynnZ5hnXwM0j93f2c/2nIfB9mnhLx/rW3TpGVF/vQ7v4t1flv/AMF+vhDpWofErVPiL4PuY7m2s71ZUa1+ZWt5vl3V9k+NNHmttmq6PMxh2xs/z/Kzf/FV4R+1X4Bf4neHdS0G7f5NY0aRJZJt3ysvzLt/2t1edk2ZV6K+qVz4PJM0q/WFQqfZPx2kVFdvkZX/ALrU+Nk279jbm/hqbWNL1DSdSutKvE2y2c7QOv8AtK22ooY3/ubW2f8AfNe7KPQ/Q4y5o3LEapIzBOd1fYH/AATLVRo3i9lGM3Nl/wCgzV8iRqiqd4ytfXv/AATQTZo/i4KwK/aLLaR/uzV+keEqkuPcL6VP/Tcz63g124jor/F/6Szwr9oREPx58WsIW3f8JFdfN/20Ncqruql9nzf3Vrrf2g02/HjxUSvH/CRXTf8AkQ1y/l/N5ao3zPuRq+Ozu39uYpf9PJ/+lM+bzPm/tCt/il+bEhk2rvTd8zfdp7N5279xt2/xNU0UE3yps2/xfL/dohgPzP8ANhv738NeV9g4/thDE7TMd+1fvPWhZq8knyIu1fuVWjt3j++6/wB7dVm2ZCwT74ao+yaxj7+hoRnyY9nys392rlmz7hv2tt+bczVRtm3SfIm1v4GarUNvtkCXO5mX77LXPKPMenSiXY1Rt8ny7W+6qtViz+aHzH3I235lb5t1VYY3WRZoYf49taNuzs2/f8v3flT71R7stjsUuYkt0RvKmcfJs+6v8VSJavIxcXjOsKfOuz7tLHC6xo6fOf4dvzbf9mp4ftKr5zn5G/iV/vf71MwqR5veIpLXzJPkTcu7+L5adHbw/Nbfd/ubamkj8lmd02bfvfxUNb7pN6Ozf3G2bflropy+yeNiqc+a5VlRVjXcfl3Ns3VRvFRYWfYvzVpXSo2zzNrfN91X/iqheQpbyfu9u2T+Gteb3fdOWMfe9483jk2t53nNuZ/lWrVnceZGzzIw8tdu7f8AerN+0TSSI/ULVu1k3R/I7Lt/iavLj8J7cDZs7942CbF2qv8AC33f96r1neeSyzJM27+8r1gWd0/nNbOn+sXdWjaNtwkMfH3dv8VL7HKb05cx2Gk+JHk+RJpCy/drtvDupagVXZIuJPl2t/DXm/h9X85/Mh3f8D212vhu68tt8FzGjq6/eWvKxkf5T6DA+9a56N4dupvM2TXLLtTbF/d3V3Hh2J7yQv8AbN8qrtddm5WrzzQ7pFUeWi7mlX7RJIm5v+A16H4UmT90j7lfd8nlrXzWIp8sZcx9Zg5fCjsdPheSNHm4Vl+RVT5d1aCwokiw7GU/eT/ZWovDtu8ccXnD5l+bb/eroY4Zl+fKurfN5a/w15sOaM7HrcseXmObvrFIIlmtkVZF3fvJHrA1C38u3E15bfvmdmVlbd/FXY6pp0LM38C7d33N22sfWLWFYRD9mUuq/eVfvLXtYX93seHmC5tbHCa1b7reZNi+az7fvfK1cT4i0O2mhZPJVPM+7t+bbXpfibSUtZNk0Pzt/CzbfLrmr6xhW3eG2jYyt9z+Kvq8JKPLzI+Hx0eaXvHWeDbZIfgObZwNv9lXYOemCZK8b0nSU8zZ5Klm+ZG/h2/w17l4bt1X4Sm1miCD+zrhXXsPv5ryzTNPSGIPCih2/i/h21+8+JbTybJE/wDoGj/6TA6uNLrC5e1/z6X5RNzw/pbIsdy6LFu/8d/2q63TbObc3yW+9UVd275mrnNLaGHykdN275d0f/s1dHpMiXDBLZ2bd/49/u1+Oy/unxMXyllYxeKZssiL8sqr8u3bXJeK/HGm6xcXOm6NNI0FjtWWOOX/AFjfxf8AAqPi38RoPAfh+Y2dzCb+4iZIoV/5Z/L96vO/hGZodH/tW88wtqkrPEzfLu/vNWEufnOipL3Trvj1O+seE/B3jmGZjp+tadvi+0LuaNl3Lt/2du2vCPFGpJ5MlhMnzN81er+LtSe8+C9/4G1PUma58G6tNPp27/lpbyfNtX/ZrwyS6e6U6leJuVk+Vfu7qJfETTly+6zwf4j6Fc6X4onhSNtn3kb/AHq5zzrhR99gK9X8dXmm6lqmblNzfd/3VrmtQ8BwszTWsy+Uy/eD1fLM6+Y5O31a8t2DpMwrptB+ImkC3Fh4k0fzkb780Z+asi88I3NuzbHyn8LVQm0ieKTy/MUk1oP3ep3LXHw11lv9GuFt2b+Gaib4f6VJH52m6layqz/djlrgJLaaNv8AVtt/vVNCupIuYJW/4C9HNL4ZC9mdPefD+/hdvJhU/wDA6rN4XvIVR3TarJ/frGTWtXtFBS8k3f7TU0a3qTf8vLf3vmalze6HL/MdFa2cNuyPNcqrK3zr96tqGR7zZD5jOFX+/trirXVn8xZJpuV/vVv+HfE0K6pG7/Ntf5lb+Kjm5hcvuHoOj6Homh2f2m88tJWVW8vZWV4k8TG8Y6b9jj+zN9+P+HbVu8l03XLhZodWjSST5dsj7aIfDum2sJmv7nzP7qq25v8AdojymSl/dOOl8KeGdVsZFgdoLn70S/w1xF3aTWd08EowyttNeu61oNna2ralZvHEf4l3/Mq1wHjaSw1K8a7sCvmR8S7f4qJfEbU5HOAE9Kcq7aFXbQzY4FL4TYWgNu5oYbutIq7akBaKRjgcUtXHYAoopdjelMA/g/GlaR2WlWPb99PmpfLf7j9aXKjMYoI+VDx3rtvgl4bh1zxnbSXO1orX9/KrDcrKv8NcfBD8wXNfQn7NXw5tlsV17UIZEEj7tzL8rL/drGtUjRhqc+KrezpSPsT4K/tTeMPgp8H7jwX4SSFJ9a1ePUri6b70e2Py1j/4DVKT40fFH4hak954q8W3XlRsyxQtP+72t95tteZWlvNfXCb9uyN2Tds+bb/dWuhhjMNq6JNHskdfNbZ8y14ft6tSR89GpKT94t+INavJr4F7ld6t96Nv4W/9mpVDzfJMm1t38TVWhhsLaZEuRHMytuSNl3bv96pri6kmuGS2s1iT70rNUSlyxL5TrPhOnk+NrOPaD8k2GK4P3DWj8apnXxHbwRKNzaf95ug+dqzfhUoHjqyO5WzDJtO7LY2Gr/xyuDF4js0W3Z/9Dy2JdoI3txX7dlf/ACj3jeb/AKC1+VIPhjocfJpthYyNc397uYJ86qny18k/tYapZ6h8cJrW2+VLPTo4k/8AQq+n9e1D7Pbb5n/hZU+f+H/4qvjn4uXqap8U9Zu4Yfl81URWbcy7Vr8Xy2PNX5jtwP8AF5jAl+b5KhvIXaFkRMLv3f71XI7dCux9oP8AeqvfXibWRP4fvf7Ve/yo9P8AxGRNb7c1HI22H50w1PupH3NDs3f7v8NU5pnk3I75qYxNIkH8W+iiiqlsbBSL8u40tFKIH6Qfs8/8o3U/7EvWP/Qrqvzfr9IP2ef+Ubqf9iXrH/oV1X5v1+0+K3/IlyD/ALBY/wDpMDhwnx1PUIztOXGRQ43nJpvXbTq/F47HcG7c2+ikVdtLTAKKKKzAR/umnK3bZndTX+6aFXHAoAcVY9qP4/xoY5bikKbuDxitCNmFFFL91PrWZYbG9KGb5VoVttG3a3I+Wq+EB9vvZxF8vzfxV+kH/BDX4O3lv4l8V/H54Gb+xdLj0nTm3/KtxcfNJ/5DWvzp0S0lvNQiVOBvX5ttfth/wTn+E/8AwpT9jfw3Z6lCsWoeIribW9WVdysvmfLEv/fK/wDj1eRnGI9jhH5npZPhfrOM9D2i+iSTe8zqvmJ8vy7q564VFbek0b/wsy/w/wC9WjqFwkrMjo0bK38T/My1l/aobVmfepLP83y18jha04e9I+ixWFK3lo0ium3K/wAS/LuqWOGDKb/m8v8AhptxMjYh3xosn3I9/wA1Ekc23y9igfdXb81evQlzazPFqU+WZFJqU0V0qTcIqMqMvy/99VHcalOWhdHYKr/Ky/7vzLTbqRvu/Y9vlptfc/8ArP8AarMmZLaTZbTKBvZZWb+Gu2jHm91HPL92bK6h51u00M21v7rfe/2qguNWhmV2srnczJ95vlrJtr2G1mebC7PurJJ95qikvN8b3EyMPL/5ZtXdTpzicUqnMTXEyTKby2TfIyfPu/h/2qwNTXzpnR7yPa3y/wCzVu9voZIVkdGiVUVtv8SrWNqV15as7w+Zt+Z2+7t/+KrflMoyKF02mx75kRS391lVmZf726q0eZdIbY28tE2CR1JzVHWrx5IHhhmt2RX2fu/4atWEijQvNReBE5APtmv1vwVhFZ1mHL/0C1P/AEqB9twfK+KrP/p3L80YDTTRwxrsZlVGVFj/AIW/2qxtckuLyGR9jJIvzMsfyrt21cmaG43pM7I7MrLtes7WryaOxmyVZ9jKys/zV+A+zlGem55NKtFQPjL9qyxhvtWluUfL+a33a8EmaRZG2c19EfG7R/t2qXaIkbM27ZXgeqWr29w8KH5l+X/Zr6ihHlpRPm6kuarLmM5bd3z/AHv9mrcMChc7GojVmb9y6/Kn3aGmeNtnzf7u6to+8TLm5hsjeSqpDTIVQyF/4qV1dm5p3lPtH8IpBIsQs7Kp2fN/erSs5DC2/ZuaqFu3SP7wZdtW4WSNU3/+O1UiTSsZE8xYXdd33t1dJo+n/wBqYh8ln3cKq1ws2oeQ2U5K/wAVdH4K8bPpd0iO+4fd21PxBL3Tb1D4d62rb7aGRU/grJvvC2vab99G/vLuSvXtH+IlneadDD+53fxbv4qW48TaVds/nabC6q/z7YqKcomMubmPIdL1zWdHul4YfP8AK392tTxxodt470p9VgRVvIV+bd/y0rubzw34M8SSBLbbaT/e2yVb0v4S3Nq2+0v43j/uxtTj8IlLld+U+XpoJreZ45kwV+VqZnJ612/x08G/8Ij4sZIn3JcJv/3Wrh1XbWnKdsZc0RQ27mikVdtLSKFVX3Vf0/5mEOzP8W6qEbDdnexrd8NQxzSMmzlv4amRlM1ZvJ0/RXmLsr+V8rVxrM7MXfq1dH43uBbwx6bF/F80tc/b2/mZfY2F/u0+ZFR90uaX5MJCTYNXJIyJt43bfvfLWXHHNHMuz738O6tKKRmjG/70dHvmcpRNfTZHaHzmh+X5dtQ6pbuqibY25vut/dqzpsyCLe/z/wAPy1FqkbqvD7xt/v8A3aI/3jP4fhMmS4fbnfyv8VbmkyOzfI+Rt+7WE3yts7/xVoabePbrscUw5u5u3EMzKP7lfcnx4Tzf2AJY1bGfCOl4I+tvXwzZz+fC3z79yfxV92fGdFk/YLKBcg+EdLwP/Aev1zwud8pzv/sGl/6TM+z4V0weP/69P8pH55tG8bfK/H92rGlyfZ5EfDLt/wBqrF9a/MxRFX+7uqlI3lrv2NivyM+LjLmN9bxJI/kjbc38VSrJNNJsT/vmsSxurlWZ/lxWnZ3+66SEv87fM9A5fEX2WONw7o26mf6tfMfduqe6kmjVU37hsqHb5g+/8mzc0jPQOQ/c8ny+c2GT5VaqepL5MZLpu/2t9WZWeM/PD5x+6rf7NZ2qt5a+c52qz/dpx5+UmXLI57WLwtv4Ynf96sNvmYvWxrVx95E/i/hrGpG9PYK7/wDZv8ay+CfizomvB1RYb+Nvm+796uAq1pN09nfxzI+Nrbt1AVI88LH9I/wh8P6l8Qv2WdJ+MGm20NxYNKsEskfytGzL8skleaeMvAt/dabd6zoFy0otWZ/MjbdXD/8ABGP9paH4ofst6j8B/FuvNCjQSQXC/wAW5V/dN/s/erQ+HPxI1X4P/EK8+EXxIvPtFvHcNAl5JF8qx/7VKrk0cTR+s4de9H4j8rzfBU8vzRTjpc/Kr9sfwOngv9obxCiWzQ299dfaLWNn3N8y/M3/AH1ury+H/VHen++1fdn/AAVo+Dtm0cPxL0HTY9kN1Iktxu+aaNvustfDNrCjf6N8oVf7zVvJNwiz7nLsRDFYWLH6evmN5bpsZfu7q+v/APgmmjx6L4tV8Em4sjke6zV8kWquzNsO5t33m/u19df8E2Tu0fxa3rc2fH/AZq/RvCX/AJL3C+lT/wBNzPtuClbiSj/29/6RI8Q+Puz/AIXj4udl3ga/c5T/ALaGues7W28tZndlMn3K6347Wu743+KF2L83iG5bcf8Aroa5lYUVgmzmT5V/vV8dnvL/AG3iv+vk/wD0pnz+ZytjqzX88vzZFJFuVk6fPtZv4qfDG8ah9/zbPu7PvVI1m+7+Jtvy7qdItzHGqQo2Y/uNXly5Dh5ve1K0mRF9xgu75FqW1VFXL8fxf71KqpMrb933/m3Utvb7d0kL7t3y7aykbU5e9EuW7ozJ91P/AGWrluPMk+d2Ct8q1BZ2/wAo3ov+0tXrPfErO+5v/Hq5/dPVpyLdtHDG6QpM23b91qsiGG3xsk3Fm+dt9V7fzmwiSLtZdv8A9jVq1je4bfInG77tZ8vKdnN7vulm2berQxvsff8AMy/dqzFb+ZD+8TaqtVO3CQt86YVW3ffq1GUWFfOfPybmZf4qqP8AeOeQeWjTOk24r/Cu/wD9Bp6yPFtZ9zJs+6vzf99UkLQvJ5zx+Wnlfd/i3f7NSQySLC8O9Q7fcbfW9P3ZHjYzmlsVpPmKOm5h93b93bVC+mSNf4dsO75t1X5Fe52vMmfl+9/DuqnqUNmpZHdWRtu9fvKrV0nHGPL8R5Uv2m3/AIN2779WbX94uxw3/AabIqLIGj6VYhx5yuifK38P+1Xi8yPoIx5iza26FfMjjw7fKlaNqmZEfHy7vnqpDCkaq6SM21vvN/FV21hmkf5Pvf7VRLm+ydtGJs6KqW7KmMszfP8APXXaLJCtx5Pk/LtXczfwrXJ6Xa7sfZnzI332/u12Whx7I0y6su/5mrzcRKZ7uFj2O38P3Ft5kaI/Ej133hmaFpvs0ybfLb5Nz/NXnGiyQqyRyQsoX+9/E3+zXX6FI8zR3MN78y/wsu5m/wCBV4FT3uY+jw8pRPVvDdwY40hm+5G+1tv3q7W3jE8KXMzttk+WJa8z8P3SWsjW8KN53yyvul3K3y13fh+dJIUciP8Ady/Iv8S15vLGnL3T2Iy5oamm0LtbvMm3cq/3Kw9XtIZVR32n5fmkX5drV0Sf6n5EVir7tu75mrO1KztmWR96/Km5vk+Vfmr1MJ73unkYzlOLvtPtriTznSRvMRt6yfeWsTUNDtre1kSGTe3/ADzX5q6rUtPuZpGfzl+X+FnrEu2dGNnBDv8A4UZV2qu7/ar6bCRltFnxWYe9zOxpaZbKngk2rDj7JKDg+u6vOv7L+zq9ym0Q7ti7q9Mso3fww0ccQDGCQKue/wA2K4u40+aRdkyNG2zcyr/DX774lTtlGRf9g0f/AEmB18YQ5sFgP+vS/KJm6P50MSQwt80n3dvy1f1jxJbeGdMa/mudrqn7r/ab/ZqnfWv2PF4qM38TfJ92vKPil44fULoWENyxSNPlXf8Aw1+Pz+I+Epx98wfFniDUvGXjTfePubdti2/N8v8AFXex6lDoun6H9js1kSO/aK6VU/1e5fl/4DXGeCbWG3sTquxvNm/1Uezd8taOoeJLO10G60q5dkmuIN1qu/5lkX7rUpR5Y8pfNzfCHxU1qw0fxBBrdy7Q2mof6LqNuybo9u75Wb/0GvFvirqiaDeTW2muzWDfNZf7K/71d54q16z8UeG5X1iFsN+7aPd8zMv3q8Z8UagmszS6VNNJ/ovyru/i/u1n7ppGP8xyt1cTSSPc3LttZ/4arWfiLU9LVw/zQ/3m/ho1C6muJnt/ubf4Veq1vIkjPbTfP5ny1fLzGxsQ69DeQ75nU/3qq3C2cm14UVNtYOoWt5pswTe3ltSQ6xJ8qSfrSDl/lNCSFAweb/gG2qupahDBH5EMK/7bfxU2S6RkZ8sStUJ980hffndVy+EqPvfERySNI2+kyGHBWpI7V2Vn9qlW12rh0o5UXzRKx+4PrTo5HVg6NytTpa+YSWX/AIDUn2FF+4agRu+GfEkc0KWV583zfJu/hre+y6k0zPp82V+9XBw27rcfI+K7zwnqTxwoknzv/C1BlL+6QXi63dK9hczMEZP4lrNTwy9vvkfaVVf4l+9Xd6hqVh5av5G41SuLVLiT9ynzN/DVxjMiXus8jnjkinaKRMYblabXXeOPCr731K3++v3465Gj/EdUZcwUUUUe+UFFFFHMgClCljhaWLofpTmyozioJluKu9k+ROf4mpHV2P8AdajG1dvyt/srVizs5riZIURvm/8AQqrmROx0/wALfh5qXjzxFDpVnDuXer3Df3Vr6+8H+B007T4dBsvM8mPb8v3d1Zv7IPwt0HwP4Q/tjxPpM1xqGobZfMX7sK/wq1ewx+KPDEcM8KabHskXam5f/QWrxMVW9tK0Tw8RW9tU5bnLNoOsRKYXSOLa/wArf3f9qpbXw7DbyKb+WR3/AI/9n/vmtS81rSmkRHePYy7nX+Hbu+WmyXln5zbPlK7m3L/drzpfvI2OL3Y+6VLeGzhVvJhWU793zf8AoNQNcuN3nbd0n3vL+7Vm4VJmZzMyKvzbt/zNUDKnzOY127t21azlyx3kXGXue8dP8I5JpvH9tK+zYUl2gdR8jVc+P8xi8QWvlJukNiNo/wCBtVH4PFG8fW0kM8mDBIGjZePuGtH46zRQeKbKWcttWxyAvc72r96y3ll9HvGf9ha/KkF1Y4G38OJNG9/fuyBn+Ztv8VfEvjG8e88fa9eRupaTVptu1dv8VfaXjjxVZ+G/D9zf/bGxJEzvG0v+rbbXxHDsvNQurxPmWaeSRmb/AGmr8ey+MT0sD8LZA0jtu86b73/jtQyWPmR74d2f9pq01sxJGziHb/D9ynR6bMI9mF/4FXsnb8PKYjaTCy797fLWZqNktv8AMiY9q7D+x3klDuMLs3bW/irH8Uaa8No03b+GlKPKXTlzHN0UUVB0Cb19aWiitAP0g/Z5/wCUbqf9iXrH/oV1X5v1+kH7PP8AyjdT/sS9Y/8AQrqvzeZscCv2fxW/5EuQf9gsf/SYHDhPjqeotFFFfi/947gooopgFFFIzeiVmA5RuNCfeFNf7ppY8pQAqv8ALx+FJIibsRmlYfNj1pKqQCLv705/vGkop8qAKXc8h+akUN61JCuXAc/epe6Znr/7F/wUufjf8efDfw9SFtmralGkr/wrGrbpP/Ha/cXVobC12aboO2Gxt4o7eyhVfljjjXaq/wDjtfn7/wAEX/gz9jXWvjfqulRuLGBtO06SZNv7yT7zbv7yrX3tcSTRwmYwN/u/3a+Bz3FyqYzk+yj9A4cy/wBnhPbS+0Zd5dFf9GmdX3S7E3N826s+4uHjLQ2ybv4n3fdq1fTPCsyJCwT5WfcnzbqqTL5jOmyOXdt/3vu/drxvby5/7p6lbCxqcyZPZw/MP9Tv/wDQastH9li/f7RG3937y1nxslvMieS33dztv/iq4l1ugX7TDtT7zru+7XbTx/tDyamB5djPvvlaO58jcGfbtZvmZa5y8jtlhUuV3+a3y7/vbm+7urqdcuraSMzTP/HtXb/DXK6tdQq0mLzlfuLs/wDHq97A4jueLjsM47le6kh+z7+rRv8AKq/w/wCzVZtReONv9Zuk+/UF5qVszF3diknzI38LLWHfaw8dx5KO2N+1vl+Va9unUj/MeBUjOJfvtWCyeT5zI+z/AJ6/NWJq2rbiYQiq+7d/rdytWTNfP5zfafl3S7dq/d21l3195d2yfaV/6ZK3/wAVWr2MeYtaheHb5lsixuy/vdvzbq2tJmz4OE20nFtIcMc5xurhptYDbUuU2M275V+V91dnoMsZ8BiQfdW1lHJ7AsP6V+w+DEEs5x9v+gWp/wClQPs+C5Xxlf8A69y/NHHXF8kMgjAyqqzP/stVC+urmPTbl3TzX8r/AIF/vNTI7yG4mMz/ACrJubav3Y6y/Fl4lnodzfpc7HZGX5X2/L/dr8QnTjz/AAnzNOtywPl79orxYmizSwr/AK64VlRm/wCWf+7XjGqR/ao47nfncm7dW9+0J4o/tzxlJDCu2KNvk+euc0u4e403yX/hr1+blhGxwy973ilGvlMU2fx0SRjdvd8VbktNqPsRmP8AHt/hqGRfupsz/tVp7nxEkDfu8fxUsMjtJv8AO+X+7UEkjqp7/PUTSOq1BUY8xr6bG810uybhv4a2JtLd/wDVpXPaXefZ5ld3xXVaXrltLb+X90/3qr7RnLYzbjQn+d0Rv+BVW/s28hdc7d/+zXSeYny/xf7NTR29sy/6j5lp8qDmMLTdU1iz2p5zKyv8ldNo/jbVYfkmDAs/zs3zVnNBCy79i/K/3Wpyy+T9yH733KOWURSkd7peoWesR+TdN5TyLteRflauk0qy1u2kjis79pQybfv/AC/8Cry/T/tPnJ/e+9uZq9b8B6x/Y+hvquq/c2/Kv/stHwwJly83Mef/ALSvha5m0i21sbWMPyy7W+7/AL1eG19A+NvFln4j02/hvHzDMrLEv3tteATBFmfZ93dT5uY3pjaKKKDYlgP7xQ4rpfDS/Z5Xnd9u1Nz7f4a5+xh3Scjd/erpppIdL8OvKj/Oy7drLU/FIwlvoc3rF9/aWqS3Mjs3bNWdJk8hPJcKwb5ttZ0a/NvFaenw/aGXen/AafxDkJdKizDZDt/2t1TJvjZf92ri6Wkm7ftbbS/2eVXGzHyVXwkcyHWd5/y28j733lqSZluY2hdGVKgt4fL++mV+7uVasrC6t8ny1HL9kPd5inJp6s2+Onra+WRvmqeON5ptj/LUv2fcuxIfm/vNT+EYiK8b7Iptvz/ItfffxpkC/sEGQN/zKOlkH/wHr4E+zuuU+Yn/ANBr75+NikfsCldhJHhHShgfW3r9f8MP+RTnn/YNL/0mZ9hwp/ueP/69P8pHwVc3HmTeS6bv92qzKm0O6NtWrKo64kmjxt/u1Ktv5keOu75ttfkMvePjI+6VIYfMO/qKuW8yWr/6nll/75oht1jbYE+XdTVh23D/ACMF/wBqlzIcpcxM19959/zbPu05pn8vYlRRRw/65x937tS/IzB3RlH96q5Y/ET78Szbtt43/wC1/wACrN1Ro2V/kZt3y7anbzlUun3W6bWrPvr4Q2+zf/F/FSBRly2MHVJELkbPutVCb5tz7PvVevpEkbekP3qz5Gz/AANQbQG1NbofMXZ96oMfvAuKt29u7SeZ5O6lzIuW59k/8Et/jvN8IfiNaSzXMf8ApE+147hvlZv4a+0/2kfiFpHxO+IVvr2laCti8lmv2xd3yNcf3lr8r/hZdTaRHFqlg7QyRvueRU+avuv9m39qnwl8TtDtfA3xIeG2v7WLZa3UkSqzNXtZLj6WCr+/8Mj5DiDKpY+HND4j2ib4C+Lfjt+zbqr6xon2qw2TW9vMqs22RVb5Wr8qNa8O3/hnWrvQdSt/Jns52ieH723a1fv9/wAE4Lyw0vUPEHwP8c38b6R4os9thNIq7d23crK1fkh/wVO/Z3f9n/8AbC8SaPb2fl2GoXTTRMv/AI83/Aq681p4ed50jmya+H5KUtP8z5xjV2kZIfl/3a+tf+Cb8ax6L4rC8j7RZ8/8Bmr5PtY3yHL4WvrD/gm/s/sTxV5aYH2iz5PU/LNX1XhN/wAl5hfSp/6bmfqXBcubiSj/ANvf+kSPH/jox/4XT4phdWA/t65ZX/u/vDXNeX+8TyeS33Gk/wDHq6X46KX+N3ikAKSNdudo/wC2hrmbeSWEbETb/CzN/DXx2ef8jvFf9fJ/+lM+czPTMK3+KX5smaP94d78L/yzqKZf33nI+x/4lo+eSTyelOaP94H35aRNvy15EY8pzS+EZ5PmL5z+Wdz0+1j8uOSZPlLP96nRqkmLZ4fu/fVf4aFLxx5Taqfx7qXxRF7seUnhj8pw/nfe+9V9bvy32Q3OF/2UrP8AOdV2I6/N8u6pI7h4/wDRn+f+F/n+9WMonqUfdgasMnkso2fP/A1WfOeXY/k8bvnjX+GsyG4+YR79qr/eepIWQqyJ5gMn/LSN/u1h9s64yNX9yqu7ybwrfd+7Uy3HnSMU3Afd2t92syGTcu9JtwZ/96rDXD+c6Ptx975aOb7JFSXMi5DNMriF0+6/zsv3ammkSSP7m1N38X3lqrDMi5jmPLL92rMcz7i/7tg3y/NXRH+U8rEcvLoElvJJE3+kfKv91KgvFT5/J3bWSrZt3+zo6Pv3fwx/w1XuGfbvNzt3ffVlrWP8qPN+H4jziS38lvs2/wC98u3Z/FT7ddzYdPm/2qmmDyXXz7nH8bbP4v71TLbozoifc+9uavO9n7vvH1FGUZFm1tHhUb0q6tvLEyYT73/jtJZ2qSRpv4b7y7q1bO1hkXycNt/ibbXFU54nsYenzajrGFFy6csy/Pt/vV02m27QwpNsUs38X92su1tUj2wpIv8Ad8xvlWtnTfkXY7/xfNtrzMRKUtj2sPTtD3jf0ffeTRpC7blX/gK12uk3Dqrwv+7Rvm8z/ZrjNHj8pjcv8is+1Nr/AHq6XSrh2aJNi5b5WXf8qrXk1pc0j0MPH3dTuPD+oGNkd412qnySfxV2+i6l5LJvlVopPm8z7rV5npvnW7LM7sqb/wCH7q11mk6o7SDZ8qL/AHk3bq8+VP3uU9WnU5oHpFjqFtIrP1SN/mk+781OuLx7iMpD99lVmjkSub0nVEa4/wBGufkX+Fl/hrQbWkuoRDPc52/eaP8Air1cLyR908nGe0KOsN5as8fzyt8rbfvVgTR3NvdSO6Llfm2s/wAv/fNauuXEKRsm/ak33Grm7i4fznhR8L/Fur6DDxjGPMj5XHRkdLZO0vhws7qSYHBK9O4rlNQeHy1+zbm+7v8AmrpdMkYeEPNVcH7LIQD+Nedav4gfT/MmmmVEW33PG3y7a/c/E+dsnyFr/oGj/wCkwPQ4sV8Hgb/8+1+UTI+J3ij+zdNTQoVZp5FZmk/2a8Zjs5ta1x7Z7aN2kf5ZGbayrV/UvGVz4m1S7ufOzE3zW+5/ur/dqxoumx28L39y7O+35G/551+UUY+6fBS+PmiaV00el6fsRPu2+1FVvu153qF9NqV00zyeWsabUZvmrd8SapczXImtpv3XlbW/vVwPjLxFNbw/Y4PkLbmZvvUv8IfDEg8aa88+pBNNmZ45G/f7V+61ch4wuIWmZLN95X5dyptqaG4CwmF9z+Z/t/drDuoprC7aG8m2/eZG3U+VGvumJq+yS3+0pDsm31jCZxJvR/m37q2bhrm6mZ/3bNvrKuoEhbzk/wCBKtHwmkYwN+3WHXNNRJpl3Knz/LXP6lphs3x/dq7o+rJHebPJ+X7u6ta80+G8h3w/NL/GtEthfCcms0ynDpuX+61TwTQ/LvRfl+bbUmp6bNbybmh2lv4aqbnjYj7rKtLlLi+Y0LeRC2/Zj/ZpfLRVV3+9Wesz7Vbdt/2hUv2p41+/u21JPKX1lRYfkeopJ4Wb7nH3qprcbv4G25p3mfMU/wDHq0KLPmJ5n31Vv/Qq3dC1B4WTY+0qtcyq7vnT+GrtrcvHJ/wDa9TzGfunWzao80yeYdqfera0uZJFb5Pu/c3f3a4+G4eTYm/dtrs/D8Pmafv3qZPvfN/dp/4SZDtUt7aS1Kb12f7VeceMPDK6XMt3bPuWT+Fa7LxRq0Ufyw7vlTa9c1Oj6pb+e4bbt20xxlynKUUs0bwzMjpgrSUHSFFFKoy3NKWwDo8fwVIsacv2qFW29qmjhdvv8/3afL9ozGLHuY8fLXuf7LXwQfxtqEfirUrZvsVnKuxWT/WSV518NPh7ceMdWFu7+VbxsrXEzf3f9mvqf4P65Z+D0l8N2yKLaNldVZPmas6kvZnBjK3ucsT6T8M+B9N0nQXcW3nSyRbtu35VWsW48J2GqaeiJprW5+9KsyVf8P8AjKfVvDdvMkzErAq7o2+9838VLdatquqQmF7/AHvu+Xy02t/u158qdOOi2PI5eWJyN94NtmZprBG/i/h+9WXN4fubWT5/MSZk2vt+bbXYw6s9vdPD9jZ1Vfmk/i2/xUNqGmyXzH7G0x3ruaP+7XP9XpSYbHEfPAsqO7Hc2x/M+9Un2tFl8lN27Z95q6S+sdHuL5U2SKGfCKyfMtMutC0pXb99t2/M/wDs1wVsLLnL5eUf8HXup/HdrIV+RVlB/wC/bVr/AByis18Q2l5dTqoWwIw/T7zc0z4avp9v43htYDGkjCQnauDJ8hri/wBsvxJe6b4m07SLVwBNpRc+o/eMM/pX7tlaUPo+4tf9Ra/KkONPmlyniH7RXjhNQ0u5ttKdhCsTbG/i214V4V0vdpaXOyRvnVV+eu8+K0k1v4XuIZgrSySqqNu+7WF4RtdukpC+0D7rfw1+R4GnyxZ6mHjy0hI7JFPkum4/7P8AFT00lJJvnRf+BPWs0KblgHzbaoSW80k29ywNehzS2NY+7rIhuLP7sezlf+BVz/jK2hbSZnSHdtT/AL5rr7PZIp/c43PtrH8eafDHo92+xtixMybaPekX9o8nf7ppaKKZ1BRQrIV96KiMQP0g/Z7/AOUbif8AYl6x/wChXVfm/X6Qfs+cf8E21x/0Jes/+hXVfm8rbq/avFb/AJEuQf8AYLH/ANJgcOE+Op6i0UUV+MHcIfmbfmlDbuaF2c7aRV20viARVw3PpTqKKXwgFKvQ/SkoqQE37mNLRSMu6r+GQC0Ab+1FLyppgCr8xra8DaNNrXiC20yC2aZ2lXbGv8XzVip94V9K/wDBM/4JXPxd/aO0W2dF+zWc/wBovWk+6sa/Nub/AGa48XWjh8PKb6GmHoyr4iMP5j9O/wBkr4V2vwh+AOg+D7Z/Ku5rL7Vfq3/LOSRf7v8Au16ZJYTRtFvdWeP5nk37d1TtY/6Y+91fa+1WVPl2/wCzU50/7Ux3pIrruVm/vf7tfk+JxXtsRKUj9mwuHjh8NGPYwptPudqfaEZf3rb2j/iX+GqFxptzbskdskjf34d/zf71dra6enmGZIV/3W+61V9W8PvI2/ZM3mRN8y/wqv8ADurnVb3kE8PS5eZnE/Z3hk2dW3/O3+zUkk025Sibm/jaRflkro4fDcPkh3h8ot/e/iWs28sfLXyU+8sW7d/Eta0anN7xy+x5o80jmtUmdeX2q/3lVfmX/drk9WvLP7cIQ/kzSbldfu11niJYbhvvsj7dybk+b5a4XxNdQzaeZHjUFX+ZtnzN/tV7uWytK72PCzDD+7oZeoX6XCv9jdl8t9vmLWRdXVzMzO9yxMb7k+fbU91ePj7mFVNqqv8Ae/vVzepas8MzTecu5l2Ov8K19Lh6nQ+NxlPlkWNUmh8tn+b5U3Osf3t396ub1a5xb+dvYlt3zN/Ey1JqXiBJo3tkRS0ifejasC+1yGRt6bmk2/xP8terT5pcp5FTkJr7VHmjjmh2tti+9/Etej+GcN8L1/e7wbCb589fv140upQyfcdt3+y/y1654PlVfg0JnyoGnXJOeoGZK/afBtWzjHf9g1T/ANKgfW8Eu+PxD/6dS/OJ5nb3PmbPs025ZpW+X+7XLfFnVJofDNz9jfbui2/7taUN4beM/eUtu2bf/Qq8++KXiqG3aTTZtrvMm7y2/wB2vxrllzHx8ZSkfKPjpXk1y4uX++0rfM1UNGvPs0zF/utWp423yaxK7/cZ91c+spinVk+YK+6uiMfd5TSJ0/kyRq7+Tjd/erKvpHjwj7sf7NaEN4l1Ypvm+9/drMvPm3OjsW/u1X90jl98pyNlvdqjkx/B92pJJHZgmxai2nd8lSaRBt7SeY74NWLXUJoW3+c23+7UTQzMN/3qU27r8nf/AGqr/EBuWHih0Ub03D/arb0/xG8jNCm1d1cRHG7fJsartr5sbLNsb/dpc3KTI7LzPtDb9iq33dtT2VruzNlX/wBmsfR7xPIX52P+9W3p94gbyUT738S1rzc0TE0/Dun/AG64+zbNnzqq7v7tdf8AED+0o9JttEtrWQJDFulb/wBBrm/Cd3DHqUKXO1Pm2uzf71eqXzaa2jtqqQ/bC0Sqys9PmI5o854Rr032e1l+TCbPmXbXnEzFpi+zG6vc/H8fhvVrVLaGwktpGXd5f3q8e8ReH5tKuN6Qt5TfNUfD8RtTlcyqVWfdspKdCqCRd+6g6TX8O2vmP8g5/iq14wmEccNmlzuRfm21P4YtkjUzO642bvu1h65fPeX7vvVlZ/vLS+2Y8vNMpk+Y1aOnzTx252P81Z8Y/eYetnSVhWNhs/4E1QEi9pd472phd97tTdSmubfds+cVFbqnmHyX+Zal1L99OIUdt+z/AIDQR8PvIr6fq1zcN5P3f4fmq/dXiW8bPs2/w7lqO3sYbWMyVHfMkkJhfk/e+WtBc3vjrfUt0iu8y4b+GtixTdDLNC7FFrnbfT3umXCfdrqtBmezt3R0UIyfMq0B8RmyX32X3VvvV96/Gtt/7AxdCBnwhpZH/kvXwpqljbM29I2279v3a+7PjXb7v2CDbK2P+KS0pQfxt6/X/DHXKc7/AOwaX/pMz7LhR3weP/69P8pHwGusJ5nlu+dv96tGHe8P2npu+5WZb6SjNs+Ulfv/AN2tGO4e2jCPtx93bX5DKMYyufG/ZJbfevM0O8f3qkkWFmR3TBk+Wq011OJN6Ju3f3f4aiWaaS4SF0YBv4qQSjyli4t/9tg23/vmq8av5e/zmI/jp/2h1uPv/dfbTG+VVTfuT+81L3ugvd5hJGfOx5lP/stZ2qKqyfc3bf4lrRkt0uN2zctMbQ55ov4f71EolnNzWbtJ8nA/vVCbGZ12bN3y/wANdP8A8I/5Kqj/APA6kbwn5S74X4b7v+zTjEz5uU5C006a4uvISNifate80+bT1jieHYy/f3Vu/D/Rba68eQ2Tur/Oqv8A3a9C/ah1rwR4i1fw94a8B+A4dGHh/S2ttW1D7b5rapcM27zP9lVX7q1nKITqX5TA8G2v/EjV/vf31/urWnazzaTcLqtm+2aP/VN/dqv4NV5ND2bMbX2tuq9cRyRtKfJ+X7u5a3p7GNSMZaH2D+xb+3NqVjJZ+D/G2vNaTQp/oGpebtbd/Cq12/8AwVO0HVfjj8O2+MupaU02o6TErPNbxf66Pb95mr4A0+6vNJukubZ2/c/Mu371fUfwN/bAfxF8M9S+Dnj+/jZ7rTWt4rq63bWX/a/2q1jWnS0+ycFXDxn7/U+R/L8ldnzff/iSvq7/AIJzNnRvFKkYIns8/wDfM1fMOs6b9h1i5tra5V0hnZFZX3Ky7q+n/wDgnTn+yvFmQB/pNn0/3Zq/RvCb/kvML6VP/Tcz7bgdy/1ho3/vf+kSPG/jkUT42+KlMTBv7duWV/X94a5qO385ld41zv3bv71dL8dlQ/GfxSWbaf7eufm9P3hrnY1/eNs+6v8Adevjs8/5HeK/6+T/APSmfO5jJf2hW/xy/NiBXW4EMnyt/dX5ql2pI3k7GLbfu79u2ntC7R74H27fu0ohLQ7P3j7fvfJ8zV4/2DkjKW8iNdm5k37fm+9UCypCVRPm+fazbNy1buFQ7odm1tm5Vaq7RC3dnfzAI/4mquZBH4rC+Z5f+kiH5lptoySKuz+9/FUUzbf3Xk//ABNSbdtwyJJt3JtrGR3x2LMNw/2dkeDf821G3/dqeOaHaPn+X+CqTFNqu7t8y7akVkaFpEdW/uVly/aOqPOaX2tI4fJ6bl/herNjL5kaTP8AN8rbt1ZUf+qTfyV+bdVyN/JZXR2X5Nv+zS5UKUpGtbSecud/3f4f71WLdtyvNMija/y7X3Vm2s27Y7/LuT5NtXbObazO5VGb5mVnreJx1uXoaFjMVVdj7n2bVjX5aZcrBsKOjMZPlpkbAyIkKYXZu3Sfw0jMis2987fl8tfu/wC9V/4Tg92XunGX1r5LtM+7bu/v1LYrvj87yVbd9z/aWrGqRp882/I2fdp1vHtVY1Tjb8jKlR7GfKe3Rlyk9rbpM2Xh/h/75rVtWeFl/fbl/u1ShjeO3/cup/2a0bVfm2eTtG2vPrUZRPawtT/wIu2Me5Vab/lp/Ey/MtaOm3HnS7PJj+X7q/d3L/8AFVm29ykbF96/991dtZEutjwpsMnzfc+XdXj4inL7J7NOtKUuVyOg02R45g+xSzP/ABfwrXRabfJHCiI+1tm7d/8AFVyVi32eOKa5Crt3KzK33mrXjvNuJpLnai/NuVP9mvJqe9I9Gm4xOxtbra2938zcyq67/wCGug0vUPs00XnQ7k3bn3VxOl655bLv/wBIWRP3rK+1l+X5Vrd0nUplji2PDjeq7ZG+7XLKPKd8akTvtJ1GG3YvN0/ur/tVpLq1hG0iJtfy4vnk3bf4ttcTp+uOZGdEVjGjNt3/ADNWjJqkMLI7p/rE3fe+Wrw/PGRjiJQkaeuXEP2hEuZtiNKyfN93/erntQ1K2hm8mN8Ov8Wz5WqXWL+Z42hfaRvVkZvm/wDHqwdUvEW382ZtzKny7vvV7uFre57x8xi6fNJyPSdHS4vvAnlWjhpZbSVYiG/iO4Dn615V4p+E3xVutFns9F0dTLcjbMXvIhuHrktVnwz8UfEPhG1XSozHdCeTdELndtiz1AwRgd8Vz/iH9srxRp2uTadpmhaVJDDN5bSSCTJx94jD8iv6Mnn/AIZ8WZNl9PNqtenVw9KNO0EraJJu/LK9+W620eup62MxvDWY4SgsbOcZU4qPur08n2OftP2YvjQlwh/sC3jVW6m/iPHpw1dHqfwG+J4tvI07w/Gw27SrXsQ4/wC+qvWX7VPjCWyS6utA0zdIcqsaScD8XqjqX7YPi+0lkih8O6XlO8iyf/F1z/UfBiMr/WsT9y/+VnlQocD30rVfw/8AkTlLz9mH44OS0XhuMgggKmpQjb+b9K5PWP2Nv2h7243x+DoWG/duOrW4/wDZ66rVv2//AIj6fN5UPg/Q5CPvLtmyP/IlYdx/wUp+J0RITwR4f47Ms/8A8cpfUfBff61ifuX/AMrGqPA3/P2r9y/+ROef9iP9pBFaRPA0DMynC/2xbfL/AOP1jX37Bf7UupzNLP4KtVI+4TrNsf8A2euwX/gpn8WiQT4E8OYPQ7Lj/wCO0y6/4KefFS3GR4F8On5f7k/3v+/lV9T8GJe99axP3L/5WEcNwN0q1fw/+ROGb9gD9qcWjQjwBaFy2Q39tWv/AMcqhP8A8E8P2ry+6P4f2h/7jlr/APHK78f8FQ/jGyGT/hAPDIA6gpcZP/kWqkv/AAVV+MiqHT4eeGcHqClxkf8AkWo+oeC//QVifuX/AMrNPYcEW/i1fu/+1OKj/wCCdv7WET7ovAVoM/8AUbtfl/8AIlbuhfsHftRwZGoeA7RQRgkazbE/pJWmf+CrnxnySvw88LlR32XP/wAdqWD/AIKpfGl0WWb4deGVRjgER3H/AMdp/UfBeOv1rE/cv/lZLocDf8/av3f/AGpR1X9gH9oS6QmHwbakj7v/ABNLcZ/8frmr/wD4JzftTOxMPge0ceg1q2H83r1jwb/wUs8f+IZxbaj4O0GFs4IQTc/nJXW6l+2t8Tre386x8MaDJu+6WSbA+v7yksD4LS0WKxP3L/5WQqPA1P3vbVfuX/yJ85r/AME6f2smTa3gG0GemdctTt/8iUi/8E5f2slBA8CWgz6a5a//AByvVPEP/BSP446KzLH8PvDL7Wxylx/8drAk/wCCrnxpiQs/w78Lgjtsuf8A47Q8v8F4/wDMVifuX/ys0jS4HltVq/d/9qcYn/BOj9q8gq/gG2A3ZGNctf8A45SJ/wAE6f2s1BUeAbTP95tctf8A45XZD/grD8Zud3w58MDH+xcf/HaVv+Cr/wAZVfb/AMK88L/98XP/AMdo+o+C9v8AesT9y/8AlZXsOCf+ftX7l/8AInIJ/wAE7v2shFg+A7UMPu41u1/+OVLD/wAE8v2r1H7zwHabv7w1u1/+OV1p/wCCrXxmwGHw98L4P+xc/wDx2g/8FW/jKOB8PPDBPpsuP/jtH9n+C/8A0FYn7l/8rEqHBEdqtX7l/wDInP2f7AH7VEGC/gW1POcf21a//HK6HT/2JP2l4bcRT+B7ZMDGI9Zt/wD4uprT/gqh8Y7ghH+HfhoMfRLj/wCO1dP/AAVD+KkcZkn8DeGxt64W45/8i01gfBf/AKCsT9y/+Vk+w4H/AOftX7l/8icxqf7Bv7UF1K0ieBrZjuyrDWbYf+1Kji/YE/acEarJ4HthtGcLrNt97/vuunt/+CoPxjuX/d/D/wANY7jZcZ/9G1dP/BTL4rKGZ/BHhtQq5O5Ljn2/1vWj6j4L2/3rE/cv/lZLocC/8/av3L/5E8w1T/gnV+1bcXHnQeArQ7upGt2o/nJVX/h3J+1r/wBE+tP/AAeWv/xyvSj/AMFRvjC0vlxfDzw37FluMf8Ao2lvf+CoHxnjtzPZfD/wyxX7yOlxn/0bS+p+C8v+YrE/cv8A5Waxo8E/8/av3f8A2p5r/wAO4/2tP+hAtP8AweWv/wAco/4dx/taf9CBaf8Ag8tf/jldr/w9j+NP/ROfC/8A3xc//HaP+Hsfxp/6Jz4X/wC+Ln/47VfUfBj/AKCsT9y/+Vl+w4K/5+1fuX/yJxcf/BOb9rQct8PrP/weWv8A8cq7pn/BOb9qKS7jXUPBNtFGWXfINatjt/APXoHhL/gpj8evF+qx6Rpvw28MvI/UrHcYH/kWvXtG/av+ItzGBqfh3RVkC/P5SSgE+gy5rKdDwUpL3sXifuX/AMrOar/qJD3ZVqv3f/ann/hf9jP4teGNJj0y08K2o2jMji/h+Zv++q11/Zg+M0Uonh8NQqw6bdQhx/6FXZN+1Z4xTIbQdK3B9pBEn/xdMm/au8dJsRfDOlbmGcEydP8Avqud4bwQlvi8T9y/+VnO6PAL3rVfuX/yJ1nw18AeONC0RtM1/SFVpBkk3EbAH8GroYvBuqrIZ2i+cH5DuXgfnXMfCv47+KPHN3NbapoVqoiAO+zjfHP+8xrsZfGOpqp2WkJbOVVsjK/3utS8B4HdcXifuX/ys5o4bw8V0q1b7l/8gcpqHw18bx3sk2nxKyOrLhJlThvqaSDwL8QbcqsmkrIsabV2XSLn9a27/wCJup2dzHGLCBkkGMhWyD+dE3xN1NWXyLO2fPVBu3fhzWNTKvA7ri8V9y/+VFPCeH0d61b7l/8AIHOyeAfiXI27+yo1bbgMLiPj/wAeqpJ8OvinNGI20CDcFI3tdx//ABVdM/xS12P5ZbSyRiMpuD4P/j1D/Fy5t4Q86WjOekcatz+OamOWeBdv97xX3L/5UDwnh7/z+rfcv/kDO+Gfws8WeHfG0HiTxAwZIg4yJlOMxlegPqa539qj4NfEb4neMdN1PwXo8dxbQab5M8pu442VvMc4w5GeCK7zwf8AEzUfEniaHRrjT4EhmD4eIMWBCFuTnA6VoeP9R+KOnTrH8PfClpqCG33NJd3CpiTJ+XBde2Ofev1nJci4FzXw1r4HLnia2D9veXLByre0Sg7KMab91Llb917vUaoeHqldVqv3L/5A+PviL+xb+0Z4hhtrHSvBkEkUcu+RpdXtwT+b0ul/sU/tFWcYR/CNsNn3R/atv/8AF17rcfED9usayLa2/Z+0A2eObhtXhz+X2rP6VqWvjL9sJ3H2r4M6Ii98alHn/wBKK+fo+HXA8YWjh8x+dCf/AMpOlU+Abfx6n4f/ACJ4Kv7Gn7QDgmTwvbKSMfLqUHyj/vurQ/Y0+NJCRt4TtgqptyNRhz/6FXuq+Lv2tyxDfB/RQAeD/aEfI/8AAipovFf7Ve0mb4S6PnPAW/j6f9/60/4h3wT/ANA+Yf8Agif/AMpE4cAPevV/D/5E+fj+xj8b443SDwpD935M6nB/8XWP4y/Yn/aP1TQJ7TS/B1u08yBdp1e3GM9eS9fTg8VftS7xn4T6Tgrk/wCnx8H0/wBfWV4x8cftoWGkmfwZ8DdEvrzeAIZ9UhVcdzk3K/zo/wCIecE2/wB3zD/wRP8A+UlRhwCmrV6n4f8AyJ8Z/wDDuP8Aa0/6EC0/8Hlr/wDHKP8Ah3H+1p/0IFp/4PLX/wCOV9P/APC0/wDgpZ/0a14X/wDB7b//ACbR/wALT/4KWf8ARrXhf/we2/8A8m0v+Ie8E/8AQPmP/gif/wApOj/jBP8An/U/D/5E+YP+Hcf7Wn/QgWn/AIPLX/45Qf8AgnH+1oeD4AtP/B5a/wDxyvp//haf/BS3/o1rwv8A+D23/wDk2j/haf8AwUs/6Na8L/8Ag9t//k2n/wAQ94J/6B8x/wDBE/8A5SH/ABgn/P8Aqfh/8idj8HvhP448JfsVr8G9d0yOLxAPDOpWZtFuUZfOlM/lrvBK8715zgZ5r4tb/gnF+1qeR8P7T/weWv8A8cr72tvHXxI8Pfs66h8Tfil4TtNK8S6VoF9f3+kwTCWGN4FldF3JI+4MqIThyfmPQ8D5A/4ex/Gn/onPhf8A74uf/jtfQcf4DgOGEy2hndStT5KKjTUVaXIlFfvE4NqWiurKzvoZYahwLeTp1qr112/+ROK/4dx/taf9E/tP/B5a/wDxyk/4dyfta/8ARPrT/wAHlr/8crtv+Hsfxp/6Jz4X/wC+Ln/47R/w9j+NP/ROvC//AHxc/wDx2vzf6j4Lv/mKxP3L/wCVnV7Dgr/n7U/r/t04r/h3H+1p/wBCBaf+Dy1/+OU3/h3D+1r1PgC0P/cdtf8A45Xb/wDD2P40/wDROfC//fFz/wDHaP8Ah7H8af8AonPhf/vi5/8AjtV9R8GP+grE/cv/AJWHsOCv+ftX7l/8icV/w7j/AGtP+hAtP/B5a/8Axyj/AIdx/taf9CBaf+Dy1/8Ajldr/wAPY/jT/wBE58L/APfFz/8AHaP+Hsfxp/6Jz4X/AO+Ln/47S+peC/8A0FYn7l/8rD2HBX/P2r9y/wDkTiv+Hcf7Wn/QgWn/AIPLX/45R/w7j/a0/wChAtP/AAeWv/xyu0b/AIKyfGkDP/CufC//AHxc/wDx2hv+CsnxpAz/AMK58L/98XP/AMdp/UfBj/oKxP3L/wCVh7Dgr/n7V+5f/InF/wDDuP8Aa0/6EC0/8Hlr/wDHKP8Ah3H+1p/0IFp/4PLX/wCOV2v/AA9j+NP/AETnwv8A98XP/wAdo/4ex/Gn/onPhf8A74uf/jtH1HwY/wCgrE/cv/lYew4K/wCftX7l/wDInE/8O4v2tN27/hAbT/weWv8A8cpf+Hcf7Wn/AEIFp/4PLX/45XpGjf8ABUn4u63GbWDwJ4ZS8P8AqY3S42yH0B83rVC4/wCCrHxvtpmt5vhv4XR0bayslz/8dpfU/Bf/AKCsT9y/+Vi9hwX/AM/av4f/ACJw8f8AwTj/AGsiw8z4f2gHf/ieWv8A8cr7Z/4JvfAy8/Zo0HV9Y+JNvb2mr6gUt47aPbMRCRl2Lx5GcgcV87+Af+Clnx48eeI7bw7p3wz8OSS3MojRYorjJY9uZa+9/D/hS3v7Czk1K6cTyWiNeCIbVjlK7iozk4rxc6oeBEKPssVjMUk+yV//AE0z18nwHC9XEe0w85trvt/6Sj0TTPi74GtExLfyE55PkPkj8q1bP41/DJZWF1qjsrY+ZrSTt9Fri9I+EmgX8ImuNRvFHcIU5/8AHa6LTP2dfCN64WXWdSXK5XDxjn/vmvjHlv0a5R1x2N+5f/KT7hUsO4rc6OL46fCKJ939uyHnAP2GXgf981ZuPj58GpQXj12QF2GV/s+XgHr/AA1n237JPgqeHzz4i1XA7Bosn/xyrUf7H3gGRjjxRqxA7AxZH/jlZrL/AKNMf+Y7G/cv/lJoqNGPulS++NvwqeV2t9dkI2lVb7DJu29v4awdU+KvgK7RWg1hg38ebST5v/Ha6GX9kTwaCUh1/VmY/c5ix+PyVzOofs9+GLOZoF8QXuVJG5gmAR2Py9a2p5b9G37OOxv3L/5SRUpUOXW5zOr+KvC95OXTUJHGSQTAw/pXH6syXzq8NxsVRt2qnOM5rrNd+HWj6ZOI7bUpnUHErMV+X9K5q+0tbZ3FuzPtkKYbg5FejQy76O9PWONxnzS/+UnlVsLlc7qTl/XyOQ1XRNbmDC3t1kOc7vMALt/eOaxNT8GeMLmdwLZGiKfuhFIibW9+ea6XVvEOoabIyJaI+DgYycn0+tcnqfxi16wLL/ZVrlBkhg33f++q97DYDwHfvQxeK+aX/wAqPm8VhuGHJqpOfy//AGTL1X4Z+PrjIs9LQEr95rpMZ9etY0/wY+J1w29tMiVmfc7LdRnH+7k1b1D9pjxJaMUTSNNBVcvv8zj/AMerPH7Vvi3d5baBpeexUSEf+hV7NLL/AAV5fdxWJ+5f/Kzx6tDgm3vVav3L/wCRF/4Ut8TyVjXRYlj/AIl+2R5/PdXqPhjw5q9h8MB4ZvoFS8+wzxGPzAwDMXxyOO4rzSD9qHxbI6o3h/TTnrt8z/4qrtv+0j4klG+TQ7HGcYUSE5/76r6fhrOPCfhnFVa2ExFZupB03zRuuVtN2tBa6Lv6HTleO4MyitOpRq1G5RcXzK+jt2itdChdfAv4iGPFpYwLuXDBrpcr+Oa808dfsl/HzxH4lS+stDsjbxwldzahGCT9M16v4g/aa8RaLa/aV8NWLARlizyuAMV45bf8FNPHl74kvNHtfhzoxhtmISVriXLY9ea+fjl/gnusViPu/wDuZxww3AnLpVqfd/8Aannmv/8ABPD9p+/unktfDmnMrNkZ1iIf1rHb/gmv+1WTn/hF9M/8HUP+Nehar/wVU+JdhI8cfwy0FinUNPMP/Zqpf8PaPigeR8K9A/8AAif/AOKq/qPgp/0FYj7n/wDKzWGG4H6Van9f9unO6J/wTr/agtYXiu/DGmKD0H9sRH+tR3H/AATo/amlfJ8N6a3zZ3DWYR/Wu60n/gqT8TNRT5/hloSv6Ceb/Gi//wCCpHxNsjtHwy0Nj6edP/8AFVH1PwT/AOgrEfc//lZDocDc2tWp93/2p52P+CbX7U3O/wAL6Yc/9RqH/GlH/BNz9qf/AKFXSh9NZh/xru1/4KqfE9ip/wCFZeH8H7x+0T8f+PVKn/BU34myjKfDPQfu5/183/xVH1HwT/6CsR9z/wDlZXseBv8An7U+7/7U4P8A4dwftR5x/wAItpeP+wzF/jUh/wCCb/7TZw58N6duH/UZi/xrtx/wVO+JaDfP8NNAVT91/tE2D+tRt/wVX+JQGV+GGhdcczzf40/7P8FP+grEfd/9zF7HgaP/AC9qfd/9qceP+CcX7TOf+Ra00D21iL/GlT/gnV+1FGqxDwzppVf+ozD/AI11b/8ABV74lBii/DHQQf8Aanm/+KpU/wCCrXxPZtrfC7QR/wBt5/8A4qmsD4Kf9BWI+5//ACsaocD9KtT7v/tTnIP+Ce37TsChV8Madgdv7Yh/xrQtv2B/2loAWHhnTQx6/wDE2i/xrXX/AIKr/Ekjc3wx0PHtPN/8VV7Tv+CoHxGu5Ak/wy0ZQ33Cs03zfrT+o+Cn/QViPuf/AMrJ+r8Df8/an3f/AGpT0/8AYZ+P0TB5/DlgrbcM39qRn+tdT4a/Ze/aH0mOS3u9As3jP3VGqR4P61d8O/t/+O9bIWTwJpCE9Assv+Ndhp37VPxC1BFmPhPSYomztlkkk28fjSWB8E+mKxH3P/5WZyw/AcdHVqfd/wDamG37KXiy/iB1HwXYCXfw63qDav8Ad4Nc1rv7B/i/V/Nh/si0CSAgH7avFel3H7V2t2G2O80HTncpuYwzOV/nViH9p7xDPp7XqeHLFSMHa0j9D361p9R8F9vrWI+7/wC5h7LgP/n9U+7/AO1Pk/Xv+CaX7SsWpyroeh6bNb7v3UjatEpx7gmoLT/gm5+1OkgE/hbTNobOf7ah/wAa+hPGn7cHxE0ATnR/AmkzCEZ3TSy4I/A159B/wVG+KjTGGb4V6GpHpPN/8VUPL/BTrisR9z/+VmkafAvL/Gqfd/8AanNr/wAE/P2l7fT3hh8NaY0hTaudWi/xrnp/+Cbn7VLS+ZH4V0zHp/bUP+NelXf/AAVI+IUFwbdPhvoeV++Wnm4/WoJP+CpfxQCmSL4Y6CVHVjPN/wDFUfUfBT/oKxH3P/5WP2PAv/P2p93/ANqeeL/wTY/ao3iQ+GNMyGz/AMhqH/GtCL/gnX+0+iHd4X03J7LrMX+NdY//AAVV+KkbfP8AC7QMeouJ/wD4qr0P/BUP4lSQea3wz0IfS5m/xpPA+CnXFYj7n/8AKwdHgbrVqfd/9qcAf+CdX7U4kJHhjTto+7/xOof8asRf8E8v2pduZfDWmA/9hiL/ABrtl/4KifEpsEfDLRBnsZ5v8aQ/8FQ/iiEDn4YaFjGT+/m4/Wn9R8FI/wDMViPuf/ysPY8DSf8AFqfd/wDanJQf8E9/2mo1O/w3p59F/teH/Gqk3/BPD9qWWUlfDGmKD1xrMP8AjXbD/gqP8Tdm9vhjoQ9B9om5/WlP/BUn4mbcj4XaHk/d/wBIm+b9aj6j4J/9BWI+5/8AysPq/A0f+XtT7v8A7U5PTv8Agnv+03asHfw1p2QMc6vEf61r2/7Bf7Q4t2iuPDlhknII1WL/ABrYtf8AgqD8T7iLzT8MNDX2M83+NWB/wU88fjHm/DjRhn0mm/xo+o+Cf/QViPuf/wArF9X4G/5+1Pu/+1Obuf2CP2jnO+PQLHOM4GrRfe/OvqL4nfDLxh4k/ZSb4WaVp6Ta1/YFja/Z/tCqpliMO8b2IXA2NznnFfP3/Dz/AOIJGV+HOife/wCe83T161E//BTn4rTz7bX4daBGgHJladifycV9FkuceEvDuGxVLC4ms1iIOnK8W3Zpr3fcVnq97+h6OAxnB+W0qsKVWbVSPK7ro77e6tdTlk/YP/acjYlfBVr0x/yF7b/4ukf9g/8Aab2/L4EtCf8AsMW3/wAXXYJ/wUr+LAjDT+AvDwJ9Fn6f9/Ken/BSj4rlA58A6Bg8nCz8D1/1lfOfUfBn/oKxP3L/AOVnlfVuB/8An7V/D/5E4qP9gv8AadVh/wAUVajHrrFt/wDF0+T9gv8AaXaZXHgq2wv/AFGLb/4uu1/4eS/FLdtHgjw8SBlsJPx/5Epo/wCClHxRMXmHwR4eHttn/wDjlT9R8F/+grE/cv8A5WDo8DL/AJe1fuX/AMicdH+wX+0kUKz+B7Y4+7/xOLb/AOLpI/2Cv2lUO4eDbUD+6dXt/wD4uuxH/BSb4stEZh4D8PADswnyf/IlKv8AwUi+Lhj81vA/hwBl3KAlx/8AHKSwPgt0xWJ+5f8Aysf1fgjl/i1fuX/yJyqfsJftGiPL+CLVn/7C1v8A/F0L+w3+0qpYnwJbEEYx/bFt/wDF11K/8FJviyzbT4F8PAj7wKT/APxyh/8AgpL8WowrnwL4eKlsfLHcf/Haby/wY+J4rE/cv/lYex4Ij/y9q/cv/kTlh+w5+0vIY9/gG0XH3ydYtj/7PWh/wxF+0N9mMf8Awhlru9f7Vt//AIutpP8AgpN8VS7B/A/h0DOFG2fJ/wDIlXV/4KJfFA2T3h8G+HhsjLY2z9v+2lNYPwYlLTFYn7l/8rJlh+ButWr9y/8AkTh/A37CH7R2k+I7jVNW8GW0SnPkumr25z+T1qax+w78fLstLF4Rt5ZC+4FtUgH/ALPXQeA/+Ci3xV8V209ze+CPD0YjbagiE/Pp1kNa2oft8/Eq1hD2/gvQ3IOHY+dgf+P1DwPgv/0FYn7l/wDKwlhuBuZXq1fuX/yJyvh/9jP9oCxsXt7rwhboWkzj+1IDn8nqxP8AsdfHtovLj8J25P8AeOpwf/F10+nft5fEW8gSWXwnoa7unyzcj/v5Sz/t3/EqJiB4O0TAGcFZs/8AoyrjgfBi3+9Yn7l/8rM5YfgWUuZ1qv3L/wCROPk/Yx+Pi/6vwhA3GOdUg/8Ai6gP7GP7RSsWi8IW6jbjaNXg/wDi660ft+fE8u0f/CH6BlRlhib/AOOVH/w8D+J+WU+DNBBHTib/AOOVp9S8GtvrOJ+5f/KzH6rwDzfxqv3L/wCROUH7Ff7QxVI38HW+M5b/AImtv/8AF17z+x18G/Hvwg07XrTxzpUdqb2a3a18u4jk3BRIGzsJx94da8zX/goF8TGQk+DNCDDsVm/+OVDe/t/fE+6spbW38LaPBLLEypLHFKWjJGNwzJjI6816+QZl4TcNZpDMcJiK8qkOaylG6d4uL2guj01Wp6WW4ngrJsXHE0atRyjeya01TX8q79zg/jjIg+NvipHG7/ieXPHp+8Nc9b/vG6qn99tn3qq3WqX+qajNqmoX73F1dStJLPMS0kjk5LMTySTzmpbeTy1/fPvVf4tlfjGOxMcbj6uItZTlKSXa7b/U/PcVWWIxM5r7Tb+93Lp+zLgumz5Pvb6W6VPLEyf3P4XqD7QjNveFWRvl3MlRXVw7Rs6Pg/7P3dtcXvfaM/djSJbi73SfIkY/hRv4ttQXEaQ4HzbG/vNSec7M8Lop+T5G/vVHJNMsafI2F/hZaUpS+FBTp83vC/aEnymxTt+Z1amfaEjZv3O7+61OuJIYo1R0Xc3zVXa42/PMm5vvIq/xVjLY76dvtEse+SQzJ8n/AEzZvvVOvkNDs/h/2ap/aIZF3v8AeqZZkbaiD51/8dqDcvQt9nXeNqr/ABrvq5bzSRyb3TerfLtas6FvtDbLqFfm/iq/FJsk+zTbWVvuVO0+YUv7pdguizG2+zK4Xa27f/FWhbSJHMCiMrN/Cq/NurNh7b/lbd93ZVyO481vvsn8Tt1rWMebc4K0pRloaq3CeSqeTlf4l/ipitP+72TbG/g+Xa23/aqKznk3Nvfeuzcn+zTmuIfOSaZNzr8r7XrXl5fhOTm94//Z\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [], - "image/jpeg": { - "width": 600 - } - }, - "execution_count": 38 + "Model Summary: 213 layers, 7225885 parameters, 0 gradients\n", + "image 1/2 /content/yolov5/data/images/bus.jpg: 640x480 4 persons, 1 bus, Done. (0.007s)\n", + "image 2/2 /content/yolov5/data/images/zidane.jpg: 384x640 2 persons, 1 tie, Done. (0.007s)\n", + "Speed: 0.5ms pre-process, 6.9ms inference, 1.3ms NMS per image at shape (1, 3, 640, 640)\n", + "Results saved to \u001b[1mruns/detect/exp\u001b[0m\n" + ] } ] }, + { + "cell_type": "markdown", + "metadata": { + "id": "hkAzDWJ7cWTr" + }, + "source": [ + "        \n", + "" + ] + }, { "cell_type": "markdown", "metadata": { "id": "0eq1SMWl6Sfn" }, "source": [ - "# 2. Test\n", - "Test a model's accuracy on [COCO](https://cocodataset.org/#home) val or test-dev datasets. Models are downloaded automatically from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases). To show results by class use the `--verbose` flag. Note that `pycocotools` metrics may be ~1% better than the equivalent repo metrics, as is visible below, due to slight differences in mAP computation." + "# 2. Validate\n", + "Validate a model's accuracy on [COCO](https://cocodataset.org/#home) val or test-dev datasets. Models are downloaded automatically from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases). To show results by class use the `--verbose` flag. Note that `pycocotools` metrics may be ~1% better than the equivalent repo metrics, as is visible below, due to slight differences in mAP computation." ] }, { @@ -652,7 +504,7 @@ "id": "eyTZYGgRjnMc" }, "source": [ - "## COCO val2017\n", + "## COCO val\n", "Download [COCO val 2017](https://github.com/ultralytics/yolov5/blob/74b34872fdf41941cddcf243951cdb090fbac17b/data/coco.yaml#L14) dataset (1GB - 5000 images), and test model accuracy." ] }, @@ -662,24 +514,27 @@ "id": "WQPtK1QYVaD_", "colab": { "base_uri": "https://localhost:8080/", - "height": 65, + "height": 48, "referenced_widgets": [ - "8815626359d84416a2f44a95500580a4", - "3b85609c4ce94a74823f2cfe141ce68e", - "876609753c2946248890344722963d44", - "8abfdd8778e44b7ca0d29881cb1ada05", - "78c6c3d97c484916b8ee167c63556800", - "9dd0f182db5d45378ceafb855e486eb8", - "a3dab28b45c247089a3d1b8b09f327de", - "32451332b7a94ba9aacddeaa6ac94d50" + "eb95db7cae194218b3fcefb439b6352f", + "769ecde6f2e64bacb596ce972f8d3d2d", + "384a001876054c93b0af45cd1e960bfe", + "dded0aeae74440f7ba2ffa0beb8dd612", + "5296d28be75740b2892ae421bbec3657", + "9f09facb2a6c4a7096810d327c8b551c", + "25621cff5d16448cb7260e839fd0f543", + "0ce7164fc0c74bb9a2b5c7037375a727", + "c4c4593c10904cb5b8a5724d60c7e181", + "473371611126476c88d5d42ec7031ed6", + "65efdfd0d26c46e79c8c5ff3b77126cc" ] }, - "outputId": "81521192-cf67-4a47-a4cc-434cb0ebc363" + "outputId": "bcf9a448-1f9b-4a41-ad49-12f181faf05a" }, "source": [ - "# Download COCO val2017\n", - "torch.hub.download_url_to_file('https://github.com/ultralytics/yolov5/releases/download/v1.0/coco2017val.zip', 'tmp.zip')\n", - "!unzip -q tmp.zip -d ../ && rm tmp.zip" + "# Download COCO val\n", + "torch.hub.download_url_to_file('https://ultralytics.com/assets/coco2017val.zip', 'tmp.zip')\n", + "!unzip -q tmp.zip -d ../datasets && rm tmp.zip" ], "execution_count": null, "outputs": [ @@ -687,24 +542,15 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "8815626359d84416a2f44a95500580a4", + "model_id": "eb95db7cae194218b3fcefb439b6352f", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, max=819257867.0), HTML(value='')))" + " 0%| | 0.00/780M [00:00

\n", + "Close the active learning loop by sampling images from your inference conditions with the `roboflow` pip package\n", + "

\n", + "\n", + "Train a YOLOv5s model on the [COCO128](https://www.kaggle.com/ultralytics/coco128) dataset with `--data coco128.yaml`, starting from pretrained `--weights yolov5s.pt`, or from randomly initialized `--weights '' --cfg yolov5s.yaml`.\n", + "\n", + "- **Pretrained [Models](https://github.com/ultralytics/yolov5/tree/master/models)** are downloaded\n", + "automatically from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases)\n", + "- **[Datasets](https://github.com/ultralytics/yolov5/tree/master/data)** available for autodownload include: [COCO](https://github.com/ultralytics/yolov5/blob/master/data/coco.yaml), [COCO128](https://github.com/ultralytics/yolov5/blob/master/data/coco128.yaml), [VOC](https://github.com/ultralytics/yolov5/blob/master/data/VOC.yaml), [Argoverse](https://github.com/ultralytics/yolov5/blob/master/data/Argoverse.yaml), [VisDrone](https://github.com/ultralytics/yolov5/blob/master/data/VisDrone.yaml), [GlobalWheat](https://github.com/ultralytics/yolov5/blob/master/data/GlobalWheat2020.yaml), [xView](https://github.com/ultralytics/yolov5/blob/master/data/xView.yaml), [Objects365](https://github.com/ultralytics/yolov5/blob/master/data/Objects365.yaml), [SKU-110K](https://github.com/ultralytics/yolov5/blob/master/data/SKU-110K.yaml).\n", + "- **Training Results** are saved to `runs/train/` with incrementing run directories, i.e. `runs/train/exp2`, `runs/train/exp3` etc.\n", + "

\n", "\n", - "All training results are saved to `runs/train/` with incrementing run directories, i.e. `runs/train/exp2`, `runs/train/exp3` etc.\n" + "## Train on Custom Data with Roboflow 🌟 NEW\n", + "\n", + "[Roboflow](https://roboflow.com/?ref=ultralytics) enables you to easily **organize, label, and prepare** a high quality dataset with your own custom data. Roboflow also makes it easy to establish an active learning pipeline, collaborate with your team on dataset improvement, and integrate directly into your model building workflow with the `roboflow` pip package.\n", + "\n", + "- Custom Training Example: [https://blog.roboflow.com/how-to-train-yolov5-on-a-custom-dataset/](https://blog.roboflow.com/how-to-train-yolov5-on-a-custom-dataset/?ref=ultralytics)\n", + "- Custom Training Notebook: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/roboflow-ai/yolov5-custom-training-tutorial/blob/main/yolov5-custom-training.ipynb)\n", + "
\n", + "\n", + "

Label images lightning fast (including with model-assisted labeling)" ] }, { @@ -917,37 +719,37 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "e715d09c-5d93-4912-a0df-9da0893f2014" + "outputId": "8724d13d-6711-4a12-d96a-1c655e5c3549" }, "source": [ "# Train YOLOv5s on COCO128 for 3 epochs\n", - "!python train.py --img 640 --batch 16 --epochs 3 --data coco128.yaml --weights yolov5s.pt --nosave --cache" + "!python train.py --img 640 --batch 16 --epochs 3 --data coco128.yaml --weights yolov5s.pt --cache" ], "execution_count": null, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ + "\u001b[34m\u001b[1mtrain: \u001b[0mweights=yolov5s.pt, cfg=, data=coco128.yaml, hyp=data/hyps/hyp.scratch-low.yaml, epochs=3, batch_size=16, imgsz=640, rect=False, resume=False, nosave=False, noval=False, noautoanchor=False, evolve=None, bucket=, cache=ram, image_weights=False, device=, multi_scale=False, single_cls=False, adam=False, sync_bn=False, workers=8, project=runs/train, name=exp, exist_ok=False, quad=False, linear_lr=False, label_smoothing=0.0, patience=100, freeze=0, save_period=-1, local_rank=-1, entity=None, upload_dataset=False, bbox_interval=-1, artifact_alias=latest\n", "\u001b[34m\u001b[1mgithub: \u001b[0mup to date with https://github.com/ultralytics/yolov5 βœ…\n", - "YOLOv5 πŸš€ v5.0-2-g54d6516 torch 1.8.1+cu101 CUDA:0 (Tesla V100-SXM2-16GB, 16160.5MB)\n", + "YOLOv5 πŸš€ v6.0-48-g84a8099 torch 1.10.0+cu102 CUDA:0 (Tesla V100-SXM2-16GB, 16160MiB)\n", "\n", - "Namespace(adam=False, artifact_alias='latest', batch_size=16, bbox_interval=-1, bucket='', cache_images=True, cfg='', data='./data/coco128.yaml', device='', entity=None, epochs=3, evolve=False, exist_ok=False, global_rank=-1, hyp='data/hyp.scratch.yaml', image_weights=False, img_size=[640, 640], label_smoothing=0.0, linear_lr=False, local_rank=-1, multi_scale=False, name='exp', noautoanchor=False, nosave=True, notest=False, project='runs/train', quad=False, rect=False, resume=False, save_dir='runs/train/exp', save_period=-1, single_cls=False, sync_bn=False, total_batch_size=16, upload_dataset=False, weights='yolov5s.pt', workers=8, world_size=1)\n", - "\u001b[34m\u001b[1mtensorboard: \u001b[0mStart with 'tensorboard --logdir runs/train', view at http://localhost:6006/\n", - "2021-04-12 10:29:58.539457: I tensorflow/stream_executor/platform/default/dso_loader.cc:49] Successfully opened dynamic library libcudart.so.11.0\n", - "\u001b[34m\u001b[1mhyperparameters: \u001b[0mlr0=0.01, lrf=0.2, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=0.05, cls=0.5, cls_pw=1.0, obj=1.0, obj_pw=1.0, iou_t=0.2, anchor_t=4.0, fl_gamma=0.0, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0\n", - "\u001b[34m\u001b[1mwandb: \u001b[0mInstall Weights & Biases for YOLOv5 logging with 'pip install wandb' (recommended)\n", + "\u001b[34m\u001b[1mhyperparameters: \u001b[0mlr0=0.01, lrf=0.1, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=0.05, cls=0.5, cls_pw=1.0, obj=1.0, obj_pw=1.0, iou_t=0.2, anchor_t=4.0, fl_gamma=0.0, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0\n", + "\u001b[34m\u001b[1mWeights & Biases: \u001b[0mrun 'pip install wandb' to automatically track and visualize YOLOv5 πŸš€ runs (RECOMMENDED)\n", + "\u001b[34m\u001b[1mTensorBoard: \u001b[0mStart with 'tensorboard --logdir runs/train', view at http://localhost:6006/\n", "\n", " from n params module arguments \n", - " 0 -1 1 3520 models.common.Focus [3, 32, 3] \n", + " 0 -1 1 3520 models.common.Conv [3, 32, 6, 2, 2] \n", " 1 -1 1 18560 models.common.Conv [32, 64, 3, 2] \n", " 2 -1 1 18816 models.common.C3 [64, 64, 1] \n", " 3 -1 1 73984 models.common.Conv [64, 128, 3, 2] \n", - " 4 -1 1 156928 models.common.C3 [128, 128, 3] \n", + " 4 -1 2 115712 models.common.C3 [128, 128, 2] \n", " 5 -1 1 295424 models.common.Conv [128, 256, 3, 2] \n", - " 6 -1 1 625152 models.common.C3 [256, 256, 3] \n", + " 6 -1 3 625152 models.common.C3 [256, 256, 3] \n", " 7 -1 1 1180672 models.common.Conv [256, 512, 3, 2] \n", - " 8 -1 1 656896 models.common.SPP [512, 512, [5, 9, 13]] \n", - " 9 -1 1 1182720 models.common.C3 [512, 512, 1, False] \n", + " 8 -1 1 1182720 models.common.C3 [512, 512, 1] \n", + " 9 -1 1 656896 models.common.SPPF [512, 512, 5] \n", " 10 -1 1 131584 models.common.Conv [512, 256, 1, 1] \n", " 11 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] \n", " 12 [-1, 6] 1 0 models.common.Concat [1] \n", @@ -963,43 +765,121 @@ " 22 [-1, 10] 1 0 models.common.Concat [1] \n", " 23 -1 1 1182720 models.common.C3 [512, 512, 1, False] \n", " 24 [17, 20, 23] 1 229245 models.yolo.Detect [80, [[10, 13, 16, 30, 33, 23], [30, 61, 62, 45, 59, 119], [116, 90, 156, 198, 373, 326]], [128, 256, 512]]\n", - "Model Summary: 283 layers, 7276605 parameters, 7276605 gradients, 17.1 GFLOPS\n", + "Model Summary: 270 layers, 7235389 parameters, 7235389 gradients, 16.5 GFLOPs\n", "\n", - "Transferred 362/362 items from yolov5s.pt\n", + "Transferred 349/349 items from yolov5s.pt\n", "Scaled weight_decay = 0.0005\n", - "Optimizer groups: 62 .bias, 62 conv.weight, 59 other\n", - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning '../coco128/labels/train2017.cache' images and labels... 128 found, 0 missing, 2 empty, 0 corrupted: 100% 128/128 [00:00<00:00, 796544.38it/s]\n", - "\u001b[34m\u001b[1mtrain: \u001b[0mCaching images (0.1GB): 100% 128/128 [00:00<00:00, 176.73it/s]\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning '../coco128/labels/train2017.cache' images and labels... 128 found, 0 missing, 2 empty, 0 corrupted: 100% 128/128 [00:00<00:00, 500812.42it/s]\n", - "\u001b[34m\u001b[1mval: \u001b[0mCaching images (0.1GB): 100% 128/128 [00:00<00:00, 134.10it/s]\n", + "\u001b[34m\u001b[1moptimizer:\u001b[0m SGD with parameter groups 57 weight, 60 weight (no decay), 60 bias\n", + "\u001b[34m\u001b[1malbumentations: \u001b[0mversion 1.0.3 required by YOLOv5, but version 0.1.12 is currently installed\n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning '../datasets/coco128/labels/train2017.cache' images and labels... 128 found, 0 missing, 2 empty, 0 corrupted: 100% 128/128 [00:00" + "

\"Weights

" ] }, { @@ -1035,67 +915,25 @@ "source": [ "## Local Logging\n", "\n", - "All results are logged by default to `runs/train`, with a new experiment directory created for each new training as `runs/train/exp2`, `runs/train/exp3`, etc. View train and test jpgs to see mosaics, labels, predictions and augmentation effects. Note a **Mosaic Dataloader** is used for training (shown below), a new concept developed by Ultralytics and first featured in [YOLOv4](https://arxiv.org/abs/2004.10934)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "riPdhraOTCO0" - }, - "source": [ - "Image(filename='runs/train/exp/train_batch0.jpg', width=800) # train batch 0 mosaics and labels\n", - "Image(filename='runs/train/exp/test_batch0_labels.jpg', width=800) # test batch 0 labels\n", - "Image(filename='runs/train/exp/test_batch0_pred.jpg', width=800) # test batch 0 predictions" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "OYG4WFEnTVrI" - }, - "source": [ - "> \n", + "All results are logged by default to `runs/train`, with a new experiment directory created for each new training as `runs/train/exp2`, `runs/train/exp3`, etc. View train and val jpgs to see mosaics, labels, predictions and augmentation effects. Note an Ultralytics **Mosaic Dataloader** is used for training (shown below), which combines 4 images into 1 mosaic during training.\n", + "\n", + "> \n", "`train_batch0.jpg` shows train batch 0 mosaics and labels\n", "\n", - "> \n", - "`test_batch0_labels.jpg` shows test batch 0 labels\n", + "> \n", + "`test_batch0_labels.jpg` shows val batch 0 labels\n", "\n", - "> \n", - "`test_batch0_pred.jpg` shows test batch 0 _predictions_\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "7KN5ghjE6ZWh" - }, - "source": [ - "Training losses and performance metrics are also logged to [Tensorboard](https://www.tensorflow.org/tensorboard) and a custom `results.txt` logfile which is plotted as `results.png` (below) after training completes. Here we show YOLOv5s trained on COCO128 to 300 epochs, starting from scratch (blue), and from pretrained `--weights yolov5s.pt` (orange)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "MDznIqPF7nk3" - }, - "source": [ + "> \n", + "`test_batch0_pred.jpg` shows val batch 0 _predictions_\n", + "\n", + "Training results are automatically logged to [Tensorboard](https://www.tensorflow.org/tensorboard) and [CSV](https://github.com/ultralytics/yolov5/pull/4148) as `results.csv`, which is plotted as `results.png` (below) after training completes. You can also plot any `results.csv` file manually:\n", + "\n", + "```python\n", "from utils.plots import plot_results \n", - "plot_results(save_dir='runs/train/exp') # plot all results*.txt as results.png\n", - "Image(filename='runs/train/exp/results.png', width=800)" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "lfrEegCSW3fK" - }, - "source": [ - "\n" + "plot_results('path/to/results.csv') # plot 'results.csv' as 'results.png'\n", + "```\n", + "\n", + "\"COCO128" ] }, { @@ -1124,7 +962,7 @@ "\n", "![CI CPU testing](https://github.com/ultralytics/yolov5/workflows/CI%20CPU%20testing/badge.svg)\n", "\n", - "If this badge is green, all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are currently passing. CI tests verify correct operation of YOLOv5 training ([train.py](https://github.com/ultralytics/yolov5/blob/master/train.py)), testing ([test.py](https://github.com/ultralytics/yolov5/blob/master/test.py)), inference ([detect.py](https://github.com/ultralytics/yolov5/blob/master/detect.py)) and export ([export.py](https://github.com/ultralytics/yolov5/blob/master/models/export.py)) on MacOS, Windows, and Ubuntu every 24 hours and on every commit.\n" + "If this badge is green, all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are currently passing. CI tests verify correct operation of YOLOv5 training ([train.py](https://github.com/ultralytics/yolov5/blob/master/train.py)), testing ([val.py](https://github.com/ultralytics/yolov5/blob/master/val.py)), inference ([detect.py](https://github.com/ultralytics/yolov5/blob/master/detect.py)) and export ([export.py](https://github.com/ultralytics/yolov5/blob/master/export.py)) on MacOS, Windows, and Ubuntu every 24 hours and on every commit.\n" ] }, { @@ -1138,20 +976,6 @@ "Optional extras below. Unit tests validate repo functionality and should be run on any PRs submitted.\n" ] }, - { - "cell_type": "code", - "metadata": { - "id": "gI6NoBev8Ib1" - }, - "source": [ - "# Re-clone repo\n", - "%cd ..\n", - "%rm -rf yolov5 && git clone https://github.com/ultralytics/yolov5\n", - "%cd yolov5" - ], - "execution_count": null, - "outputs": [] - }, { "cell_type": "code", "metadata": { @@ -1159,9 +983,9 @@ }, "source": [ "# Reproduce\n", - "for x in 'yolov5s', 'yolov5m', 'yolov5l', 'yolov5x':\n", - " !python test.py --weights {x}.pt --data coco.yaml --img 640 --conf 0.25 --iou 0.45 # speed\n", - " !python test.py --weights {x}.pt --data coco.yaml --img 640 --conf 0.001 --iou 0.65 # mAP" + "for x in 'yolov5n', 'yolov5s', 'yolov5m', 'yolov5l', 'yolov5x':\n", + " !python val.py --weights {x}.pt --data coco.yaml --img 640 --task speed # speed\n", + " !python val.py --weights {x}.pt --data coco.yaml --img 640 --conf 0.001 --iou 0.65 # mAP" ], "execution_count": null, "outputs": [] @@ -1179,7 +1003,7 @@ "model = torch.hub.load('ultralytics/yolov5', 'yolov5s')\n", "\n", "# Images\n", - "dir = 'https://github.com/ultralytics/yolov5/raw/master/data/images/'\n", + "dir = 'https://ultralytics.com/images/'\n", "imgs = [dir + f for f in ('zidane.jpg', 'bus.jpg')] # batch of images\n", "\n", "# Inference\n", @@ -1195,23 +1019,23 @@ "id": "FGH0ZjkGjejy" }, "source": [ - "# Unit tests\n", + "# CI Checks\n", "%%shell\n", "export PYTHONPATH=\"$PWD\" # to run *.py. files in subdirectories\n", - "\n", "rm -rf runs # remove runs/\n", - "for m in yolov5s; do # models\n", - " python train.py --weights $m.pt --epochs 3 --img 320 --device 0 # train pretrained\n", - " python train.py --weights '' --cfg $m.yaml --epochs 3 --img 320 --device 0 # train scratch\n", + "for m in yolov5n; do # models\n", + " python train.py --img 64 --batch 32 --weights $m.pt --epochs 1 --device 0 # train pretrained\n", + " python train.py --img 64 --batch 32 --weights '' --cfg $m.yaml --epochs 1 --device 0 # train scratch\n", " for d in 0 cpu; do # devices\n", + " python val.py --weights $m.pt --device $d # val official\n", + " python val.py --weights runs/train/exp/weights/best.pt --device $d # val custom\n", " python detect.py --weights $m.pt --device $d # detect official\n", " python detect.py --weights runs/train/exp/weights/best.pt --device $d # detect custom\n", - " python test.py --weights $m.pt --device $d # test official\n", - " python test.py --weights runs/train/exp/weights/best.pt --device $d # test custom\n", " done\n", " python hubconf.py # hub\n", - " python models/yolo.py --cfg $m.yaml # inspect\n", - " python models/export.py --weights $m.pt --img 640 --batch 1 # export\n", + " python models/yolo.py --cfg $m.yaml # build PyTorch model\n", + " python models/tf.py --weights $m.pt # build TensorFlow model\n", + " python export.py --img 64 --batch 1 --weights $m.pt --include torchscript onnx # export\n", "done" ], "execution_count": null, @@ -1224,11 +1048,11 @@ }, "source": [ "# Profile\n", - "from utils.torch_utils import profile \n", + "from utils.torch_utils import profile\n", "\n", "m1 = lambda x: x * torch.sigmoid(x)\n", "m2 = torch.nn.SiLU()\n", - "profile(x=torch.randn(16, 3, 640, 640), ops=[m1, m2], n=100)" + "results = profile(input=torch.randn(16, 3, 640, 640), ops=[m1, m2], n=100)" ], "execution_count": null, "outputs": [] @@ -1253,11 +1077,26 @@ }, "source": [ "# VOC\n", - "for b, m in zip([64, 48, 32, 16], ['yolov5s', 'yolov5m', 'yolov5l', 'yolov5x']): # zip(batch_size, model)\n", - " !python train.py --batch {b} --weights {m}.pt --data voc.yaml --epochs 50 --cache --img 512 --nosave --hyp hyp.finetune.yaml --project VOC --name {m}" + "for b, m in zip([64, 64, 32, 16], ['yolov5s', 'yolov5m', 'yolov5l', 'yolov5x']): # zip(batch_size, model)\n", + " !python train.py --batch {b} --weights {m}.pt --data VOC.yaml --epochs 50 --cache --img 512 --nosave --hyp hyp.VOC.yaml --project VOC --name {m}" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "VTRwsvA9u7ln" + }, + "source": [ + "# TensorRT \n", + "# https://docs.nvidia.com/deeplearning/tensorrt/install-guide/index.html#installing-pip\n", + "!pip install -U nvidia-tensorrt --index-url https://pypi.ngc.nvidia.com # install\n", + "!python export.py --weights yolov5s.pt --include engine --imgsz 640 640 --device 0 # export\n", + "!python detect.py --weights yolov5s.engine --imgsz 640 640 --device 0 # inference" ], "execution_count": null, "outputs": [] } ] -} \ No newline at end of file +} diff --git a/utility.py b/utility.py deleted file mode 100644 index 520ba3d3a576..000000000000 --- a/utility.py +++ /dev/null @@ -1,49 +0,0 @@ -""" -Utility functions for working with the YOLOv3 models. - -################ -Command Help: -usage: utility.py [-h] {strip} ... - -Utility functions for working with the YOLOv3 models - -positional arguments: - {strip} - -optional arguments: - -h, --help show this help message and exit - -################ -Strip Command Help: -usage: utility.py strip [-h] weights - -Strip the extra information from a models checkpoint for training from scratch - -positional arguments: - weights weights path - -optional arguments: - -h, --help show this help message and exit -""" - -import argparse - -from utils.general import strip_optimizer - -STRIP_COMMAND = "strip" - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description="Utility functions for working with the YOLOv3 models") - subparsers = parser.add_subparsers(dest="command") - strip_subparser = subparsers.add_parser( - STRIP_COMMAND, - description="Strip the extra information from a models checkpoint for training from scratch", - ) - strip_subparser.add_argument('weights', type=str, help='weights path') - args = parser.parse_args() - - if args.command == STRIP_COMMAND: - print(f"stripping extras from {args.weights}") - strip_optimizer(args.weights) - else: - raise ValueError(f"unknown command given of {args.command}") diff --git a/utils/__init__.py b/utils/__init__.py index e69de29bb2d1..a63c473a4340 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -0,0 +1,36 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +utils/initialization +""" + + +def notebook_init(verbose=True): + # Check system software and hardware + print('Checking setup...') + + import os + import shutil + + from utils.general import check_requirements, emojis, is_colab + from utils.torch_utils import select_device # imports + + check_requirements(('psutil', 'IPython')) + import psutil + from IPython import display # to display images and clear console output + + if is_colab(): + shutil.rmtree('/content/sample_data', ignore_errors=True) # remove colab /sample_data directory + + # System info + if verbose: + gb = 1 << 30 # bytes to GiB (1024 ** 3) + ram = psutil.virtual_memory().total + total, used, free = shutil.disk_usage("/") + display.clear_output() + s = f'({os.cpu_count()} CPUs, {ram / gb:.1f} GB RAM, {(total - free) / gb:.1f}/{total / gb:.1f} GB disk)' + else: + s = '' + + select_device(newline=False) + print(emojis(f'Setup complete βœ… {s}')) + return display diff --git a/utils/activations.py b/utils/activations.py index 07e864bcd241..b119d915e54c 100644 --- a/utils/activations.py +++ b/utils/activations.py @@ -1,10 +1,12 @@ -# Activation functions +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Activation functions +""" import torch import torch.nn as nn import torch.nn.functional as F - def is_activation(mod, act_types=None): if not act_types: act_types = (nn.ELU, nn.Hardshrink, nn.Hardsigmoid, nn.Hardtanh, nn.Hardswish, nn.LeakyReLU, @@ -23,7 +25,6 @@ def replace_activations(mod, act, act_types=None): else: replace_activations(child, act, act_types) - # SiLU https://arxiv.org/pdf/1606.08415.pdf ---------------------------------------------------------------------------- class SiLU(nn.Module): # export-friendly version of nn.SiLU() @staticmethod @@ -34,8 +35,8 @@ def forward(x): class Hardswish(nn.Module): # export-friendly version of nn.Hardswish() @staticmethod def forward(x): - # return x * F.hardsigmoid(x) # for torchscript and CoreML - return x * F.hardtanh(x + 3, 0., 6.) / 6. # for torchscript, CoreML and ONNX + # return x * F.hardsigmoid(x) # for TorchScript and CoreML + return x * F.hardtanh(x + 3, 0.0, 6.0) / 6.0 # for TorchScript, CoreML and ONNX # Mish https://github.com/digantamisra98/Mish -------------------------------------------------------------------------- diff --git a/utils/augmentations.py b/utils/augmentations.py new file mode 100644 index 000000000000..0311b97b63db --- /dev/null +++ b/utils/augmentations.py @@ -0,0 +1,277 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Image augmentation functions +""" + +import math +import random + +import cv2 +import numpy as np + +from utils.general import LOGGER, check_version, colorstr, resample_segments, segment2box +from utils.metrics import bbox_ioa + + +class Albumentations: + # YOLOv5 Albumentations class (optional, only used if package is installed) + def __init__(self): + self.transform = None + try: + import albumentations as A + check_version(A.__version__, '1.0.3', hard=True) # version requirement + + self.transform = A.Compose([ + A.Blur(p=0.01), + A.MedianBlur(p=0.01), + A.ToGray(p=0.01), + A.CLAHE(p=0.01), + A.RandomBrightnessContrast(p=0.0), + A.RandomGamma(p=0.0), + A.ImageCompression(quality_lower=75, p=0.0)], + bbox_params=A.BboxParams(format='yolo', label_fields=['class_labels'])) + + LOGGER.info(colorstr('albumentations: ') + ', '.join(f'{x}' for x in self.transform.transforms if x.p)) + except ImportError: # package not installed, skip + pass + except Exception as e: + LOGGER.info(colorstr('albumentations: ') + f'{e}') + + def __call__(self, im, labels, p=1.0): + if self.transform and random.random() < p: + new = self.transform(image=im, bboxes=labels[:, 1:], class_labels=labels[:, 0]) # transformed + im, labels = new['image'], np.array([[c, *b] for c, b in zip(new['class_labels'], new['bboxes'])]) + return im, labels + + +def augment_hsv(im, hgain=0.5, sgain=0.5, vgain=0.5): + # HSV color-space augmentation + if hgain or sgain or vgain: + r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains + hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) + dtype = im.dtype # uint8 + + x = np.arange(0, 256, dtype=r.dtype) + lut_hue = ((x * r[0]) % 180).astype(dtype) + lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) + lut_val = np.clip(x * r[2], 0, 255).astype(dtype) + + im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))) + cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return needed + + +def hist_equalize(im, clahe=True, bgr=False): + # Equalize histogram on BGR image 'im' with im.shape(n,m,3) and range 0-255 + yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV) + if clahe: + c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) + yuv[:, :, 0] = c.apply(yuv[:, :, 0]) + else: + yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel histogram + return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert YUV image to RGB + + +def replicate(im, labels): + # Replicate labels + h, w = im.shape[:2] + boxes = labels[:, 1:].astype(int) + x1, y1, x2, y2 = boxes.T + s = ((x2 - x1) + (y2 - y1)) / 2 # side length (pixels) + for i in s.argsort()[:round(s.size * 0.5)]: # smallest indices + x1b, y1b, x2b, y2b = boxes[i] + bh, bw = y2b - y1b, x2b - x1b + yc, xc = int(random.uniform(0, h - bh)), int(random.uniform(0, w - bw)) # offset x, y + x1a, y1a, x2a, y2a = [xc, yc, xc + bw, yc + bh] + im[y1a:y2a, x1a:x2a] = im[y1b:y2b, x1b:x2b] # im4[ymin:ymax, xmin:xmax] + labels = np.append(labels, [[labels[i, 0], x1a, y1a, x2a, y2a]], axis=0) + + return im, labels + + +def letterbox(im, new_shape=(640, 640), color=(114, 114, 114), auto=True, scaleFill=False, scaleup=True, stride=32): + # Resize and pad image while meeting stride-multiple constraints + shape = im.shape[:2] # current shape [height, width] + if isinstance(new_shape, int): + new_shape = (new_shape, new_shape) + + # Scale ratio (new / old) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + if not scaleup: # only scale down, do not scale up (for better val mAP) + r = min(r, 1.0) + + # Compute padding + ratio = r, r # width, height ratios + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) + dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding + if auto: # minimum rectangle + dw, dh = np.mod(dw, stride), np.mod(dh, stride) # wh padding + elif scaleFill: # stretch + dw, dh = 0.0, 0.0 + new_unpad = (new_shape[1], new_shape[0]) + ratio = new_shape[1] / shape[1], new_shape[0] / shape[0] # width, height ratios + + dw /= 2 # divide padding into 2 sides + dh /= 2 + + if shape[::-1] != new_unpad: # resize + im = cv2.resize(im, new_unpad, interpolation=cv2.INTER_LINEAR) + top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) + left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) + im = cv2.copyMakeBorder(im, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border + return im, ratio, (dw, dh) + + +def random_perspective(im, targets=(), segments=(), degrees=10, translate=.1, scale=.1, shear=10, perspective=0.0, + border=(0, 0)): + # torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(0.1, 0.1), scale=(0.9, 1.1), shear=(-10, 10)) + # targets = [cls, xyxy] + + height = im.shape[0] + border[0] * 2 # shape(h,w,c) + width = im.shape[1] + border[1] * 2 + + # Center + C = np.eye(3) + C[0, 2] = -im.shape[1] / 2 # x translation (pixels) + C[1, 2] = -im.shape[0] / 2 # y translation (pixels) + + # Perspective + P = np.eye(3) + P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y) + P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x) + + # Rotation and Scale + R = np.eye(3) + a = random.uniform(-degrees, degrees) + # a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations + s = random.uniform(1 - scale, 1 + scale) + # s = 2 ** random.uniform(-scale, scale) + R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s) + + # Shear + S = np.eye(3) + S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg) + S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg) + + # Translation + T = np.eye(3) + T[0, 2] = random.uniform(0.5 - translate, 0.5 + translate) * width # x translation (pixels) + T[1, 2] = random.uniform(0.5 - translate, 0.5 + translate) * height # y translation (pixels) + + # Combined rotation matrix + M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT + if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed + if perspective: + im = cv2.warpPerspective(im, M, dsize=(width, height), borderValue=(114, 114, 114)) + else: # affine + im = cv2.warpAffine(im, M[:2], dsize=(width, height), borderValue=(114, 114, 114)) + + # Visualize + # import matplotlib.pyplot as plt + # ax = plt.subplots(1, 2, figsize=(12, 6))[1].ravel() + # ax[0].imshow(im[:, :, ::-1]) # base + # ax[1].imshow(im2[:, :, ::-1]) # warped + + # Transform label coordinates + n = len(targets) + if n: + use_segments = any(x.any() for x in segments) + new = np.zeros((n, 4)) + if use_segments: # warp segments + segments = resample_segments(segments) # upsample + for i, segment in enumerate(segments): + xy = np.ones((len(segment), 3)) + xy[:, :2] = segment + xy = xy @ M.T # transform + xy = xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2] # perspective rescale or affine + + # clip + new[i] = segment2box(xy, width, height) + + else: # warp boxes + xy = np.ones((n * 4, 3)) + xy[:, :2] = targets[:, [1, 2, 3, 4, 1, 4, 3, 2]].reshape(n * 4, 2) # x1y1, x2y2, x1y2, x2y1 + xy = xy @ M.T # transform + xy = (xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2]).reshape(n, 8) # perspective rescale or affine + + # create new boxes + x = xy[:, [0, 2, 4, 6]] + y = xy[:, [1, 3, 5, 7]] + new = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T + + # clip + new[:, [0, 2]] = new[:, [0, 2]].clip(0, width) + new[:, [1, 3]] = new[:, [1, 3]].clip(0, height) + + # filter candidates + i = box_candidates(box1=targets[:, 1:5].T * s, box2=new.T, area_thr=0.01 if use_segments else 0.10) + targets = targets[i] + targets[:, 1:5] = new[i] + + return im, targets + + +def copy_paste(im, labels, segments, p=0.5): + # Implement Copy-Paste augmentation https://arxiv.org/abs/2012.07177, labels as nx5 np.array(cls, xyxy) + n = len(segments) + if p and n: + h, w, c = im.shape # height, width, channels + im_new = np.zeros(im.shape, np.uint8) + for j in random.sample(range(n), k=round(p * n)): + l, s = labels[j], segments[j] + box = w - l[3], l[2], w - l[1], l[4] + ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area + if (ioa < 0.30).all(): # allow 30% obscuration of existing labels + labels = np.concatenate((labels, [[l[0], *box]]), 0) + segments.append(np.concatenate((w - s[:, 0:1], s[:, 1:2]), 1)) + cv2.drawContours(im_new, [segments[j].astype(np.int32)], -1, (255, 255, 255), cv2.FILLED) + + result = cv2.bitwise_and(src1=im, src2=im_new) + result = cv2.flip(result, 1) # augment segments (flip left-right) + i = result > 0 # pixels to replace + # i[:, :] = result.max(2).reshape(h, w, 1) # act over ch + im[i] = result[i] # cv2.imwrite('debug.jpg', im) # debug + + return im, labels, segments + + +def cutout(im, labels, p=0.5): + # Applies image cutout augmentation https://arxiv.org/abs/1708.04552 + if random.random() < p: + h, w = im.shape[:2] + scales = [0.5] * 1 + [0.25] * 2 + [0.125] * 4 + [0.0625] * 8 + [0.03125] * 16 # image size fraction + for s in scales: + mask_h = random.randint(1, int(h * s)) # create random masks + mask_w = random.randint(1, int(w * s)) + + # box + xmin = max(0, random.randint(0, w) - mask_w // 2) + ymin = max(0, random.randint(0, h) - mask_h // 2) + xmax = min(w, xmin + mask_w) + ymax = min(h, ymin + mask_h) + + # apply random color mask + im[ymin:ymax, xmin:xmax] = [random.randint(64, 191) for _ in range(3)] + + # return unobscured labels + if len(labels) and s > 0.03: + box = np.array([xmin, ymin, xmax, ymax], dtype=np.float32) + ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area + labels = labels[ioa < 0.60] # remove >60% obscured labels + + return labels + + +def mixup(im, labels, im2, labels2): + # Applies MixUp augmentation https://arxiv.org/pdf/1710.09412.pdf + r = np.random.beta(32.0, 32.0) # mixup ratio, alpha=beta=32.0 + im = (im * r + im2 * (1 - r)).astype(np.uint8) + labels = np.concatenate((labels, labels2), 0) + return im, labels + + +def box_candidates(box1, box2, wh_thr=2, ar_thr=100, area_thr=0.1, eps=1e-16): # box1(4,n), box2(4,n) + # Compute candidate boxes: box1 before augment, box2 after augment, wh_thr (pixels), aspect_ratio_thr, area_ratio + w1, h1 = box1[2] - box1[0], box1[3] - box1[1] + w2, h2 = box2[2] - box2[0], box2[3] - box2[1] + ar = np.maximum(w2 / (h2 + eps), h2 / (w2 + eps)) # aspect ratio + return (w2 > wh_thr) & (h2 > wh_thr) & (w2 * h2 / (w1 * h1 + eps) > area_thr) & (ar < ar_thr) # candidates diff --git a/utils/autoanchor.py b/utils/autoanchor.py index 87dc394c832e..77518abe9889 100644 --- a/utils/autoanchor.py +++ b/utils/autoanchor.py @@ -1,28 +1,32 @@ -# Auto-anchor utils +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +AutoAnchor utils +""" + +import random import numpy as np import torch import yaml from tqdm import tqdm -from utils.general import colorstr +from utils.general import LOGGER, colorstr, emojis + +PREFIX = colorstr('AutoAnchor: ') def check_anchor_order(m): # Check anchor order against stride order for YOLOv5 Detect() module m, and correct if necessary - a = m.anchor_grid.prod(-1).view(-1) # anchor area + a = m.anchors.prod(-1).mean(-1).view(-1) # mean anchor area per output layer da = a[-1] - a[0] # delta a ds = m.stride[-1] - m.stride[0] # delta s - if da.sign() != ds.sign(): # same order - print('Reversing anchor order') + if da and (da.sign() != ds.sign()): # same order + LOGGER.info(f'{PREFIX}Reversing anchor order') m.anchors[:] = m.anchors.flip(0) - m.anchor_grid[:] = m.anchor_grid.flip(0) def check_anchors(dataset, model, thr=4.0, imgsz=640): # Check anchor fit to data, recompute if necessary - prefix = colorstr('autoanchor: ') - print(f'\n{prefix}Analyzing anchors... ', end='') m = model.module.model[-1] if hasattr(model, 'module') else model.model[-1] # Detect() shapes = imgsz * dataset.shapes / dataset.shapes.max(1, keepdims=True) scale = np.random.uniform(0.9, 1.1, size=(shapes.shape[0], 1)) # augment scale @@ -30,39 +34,42 @@ def check_anchors(dataset, model, thr=4.0, imgsz=640): def metric(k): # compute metric r = wh[:, None] / k[None] - x = torch.min(r, 1. / r).min(2)[0] # ratio metric + x = torch.min(r, 1 / r).min(2)[0] # ratio metric best = x.max(1)[0] # best_x - aat = (x > 1. / thr).float().sum(1).mean() # anchors above threshold - bpr = (best > 1. / thr).float().mean() # best possible recall + aat = (x > 1 / thr).float().sum(1).mean() # anchors above threshold + bpr = (best > 1 / thr).float().mean() # best possible recall return bpr, aat - anchors = m.anchor_grid.clone().cpu().view(-1, 2) # current anchors - bpr, aat = metric(anchors) - print(f'anchors/target = {aat:.2f}, Best Possible Recall (BPR) = {bpr:.4f}', end='') - if bpr < 0.98: # threshold to recompute - print('. Attempting to improve anchors, please wait...') - na = m.anchor_grid.numel() // 2 # number of anchors + stride = m.stride.to(m.anchors.device).view(-1, 1, 1) # model strides + anchors = m.anchors.clone() * stride # current anchors + bpr, aat = metric(anchors.cpu().view(-1, 2)) + s = f'\n{PREFIX}{aat:.2f} anchors/target, {bpr:.3f} Best Possible Recall (BPR). ' + if bpr > 0.98: # threshold to recompute + LOGGER.info(emojis(f'{s}Current anchors are a good fit to dataset βœ…')) + else: + LOGGER.info(emojis(f'{s}Anchors are a poor fit to dataset ⚠️, attempting to improve...')) + na = m.anchors.numel() // 2 # number of anchors try: anchors = kmean_anchors(dataset, n=na, img_size=imgsz, thr=thr, gen=1000, verbose=False) except Exception as e: - print(f'{prefix}ERROR: {e}') + LOGGER.info(f'{PREFIX}ERROR: {e}') new_bpr = metric(anchors)[0] if new_bpr > bpr: # replace anchors anchors = torch.tensor(anchors, device=m.anchors.device).type_as(m.anchors) - m.anchor_grid[:] = anchors.clone().view_as(m.anchor_grid) # for inference - m.anchors[:] = anchors.clone().view_as(m.anchors) / m.stride.to(m.anchors.device).view(-1, 1, 1) # loss - check_anchor_order(m) - print(f'{prefix}New anchors saved to model. Update model *.yaml to use these anchors in the future.') + m.anchors[:] = anchors.clone().view_as(m.anchors) + check_anchor_order(m) # must be in pixel-space (not grid-space) + m.anchors /= stride + s = f'{PREFIX}Done βœ… (optional: update model *.yaml to use these anchors in the future)' else: - print(f'{prefix}Original anchors better than new anchors. Proceeding with original anchors.') - print('') # newline + s = f'{PREFIX}Done ⚠️ (original anchors better than new anchors, proceeding with original anchors)' + LOGGER.info(emojis(s)) -def kmean_anchors(path='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen=1000, verbose=True): +def kmean_anchors(dataset='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen=1000, verbose=True): """ Creates kmeans-evolved anchors from training dataset Arguments: - path: path to dataset *.yaml, or a loaded dataset + dataset: path to data.yaml, or a loaded dataset n: number of anchors img_size: image size used for training thr: anchor-label wh ratio threshold hyperparameter hyp['anchor_t'] used for training, default=4.0 @@ -77,12 +84,12 @@ def kmean_anchors(path='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen=10 """ from scipy.cluster.vq import kmeans - thr = 1. / thr - prefix = colorstr('autoanchor: ') + npr = np.random + thr = 1 / thr def metric(k, wh): # compute metrics r = wh[:, None] / k[None] - x = torch.min(r, 1. / r).min(2)[0] # ratio metric + x = torch.min(r, 1 / r).min(2)[0] # ratio metric # x = wh_iou(wh, torch.tensor(k)) # iou metric return x, x.max(1)[0] # x, best_x @@ -90,24 +97,24 @@ def anchor_fitness(k): # mutation fitness _, best = metric(torch.tensor(k, dtype=torch.float32), wh) return (best * (best > thr).float()).mean() # fitness - def print_results(k): + def print_results(k, verbose=True): k = k[np.argsort(k.prod(1))] # sort small to large x, best = metric(k, wh0) bpr, aat = (best > thr).float().mean(), (x > thr).float().mean() * n # best possible recall, anch > thr - print(f'{prefix}thr={thr:.2f}: {bpr:.4f} best possible recall, {aat:.2f} anchors past thr') - print(f'{prefix}n={n}, img_size={img_size}, metric_all={x.mean():.3f}/{best.mean():.3f}-mean/best, ' - f'past_thr={x[x > thr].mean():.3f}-mean: ', end='') + s = f'{PREFIX}thr={thr:.2f}: {bpr:.4f} best possible recall, {aat:.2f} anchors past thr\n' \ + f'{PREFIX}n={n}, img_size={img_size}, metric_all={x.mean():.3f}/{best.mean():.3f}-mean/best, ' \ + f'past_thr={x[x > thr].mean():.3f}-mean: ' for i, x in enumerate(k): - print('%i,%i' % (round(x[0]), round(x[1])), end=', ' if i < len(k) - 1 else '\n') # use in *.cfg + s += '%i,%i, ' % (round(x[0]), round(x[1])) + if verbose: + LOGGER.info(s[:-2]) return k - if isinstance(path, str): # *.yaml file - with open(path) as f: + if isinstance(dataset, str): # *.yaml file + with open(dataset, errors='ignore') as f: data_dict = yaml.safe_load(f) # model dict from utils.datasets import LoadImagesAndLabels dataset = LoadImagesAndLabels(data_dict['train'], augment=True, rect=True) - else: - dataset = path # dataset # Get label wh shapes = img_size * dataset.shapes / dataset.shapes.max(1, keepdims=True) @@ -116,19 +123,22 @@ def print_results(k): # Filter i = (wh0 < 3.0).any(1).sum() if i: - print(f'{prefix}WARNING: Extremely small objects found. {i} of {len(wh0)} labels are < 3 pixels in size.') + LOGGER.info(f'{PREFIX}WARNING: Extremely small objects found: {i} of {len(wh0)} labels are < 3 pixels in size') wh = wh0[(wh0 >= 2.0).any(1)] # filter > 2 pixels - # wh = wh * (np.random.rand(wh.shape[0], 1) * 0.9 + 0.1) # multiply by random scale 0-1 - - # Kmeans calculation - print(f'{prefix}Running kmeans for {n} anchors on {len(wh)} points...') - s = wh.std(0) # sigmas for whitening - k, dist = kmeans(wh / s, n, iter=30) # points, mean distance - assert len(k) == n, print(f'{prefix}ERROR: scipy.cluster.vq.kmeans requested {n} points but returned only {len(k)}') - k *= s - wh = torch.tensor(wh, dtype=torch.float32) # filtered - wh0 = torch.tensor(wh0, dtype=torch.float32) # unfiltered - k = print_results(k) + # wh = wh * (npr.rand(wh.shape[0], 1) * 0.9 + 0.1) # multiply by random scale 0-1 + + # Kmeans init + try: + LOGGER.info(f'{PREFIX}Running kmeans for {n} anchors on {len(wh)} points...') + assert n <= len(wh) # apply overdetermined constraint + s = wh.std(0) # sigmas for whitening + k = kmeans(wh / s, n, iter=30)[0] * s # points + assert n == len(k) # kmeans may return fewer points than requested if wh is insufficient or too similar + except Exception: + LOGGER.warning(f'{PREFIX}WARNING: switching strategies from kmeans to random init') + k = np.sort(npr.rand(n * 2)).reshape(n, 2) * img_size # random init + wh, wh0 = (torch.tensor(x, dtype=torch.float32) for x in (wh, wh0)) + k = print_results(k, verbose=False) # Plot # k, d = [None] * 20, [None] * 20 @@ -143,19 +153,18 @@ def print_results(k): # fig.savefig('wh.png', dpi=200) # Evolve - npr = np.random f, sh, mp, s = anchor_fitness(k), k.shape, 0.9, 0.1 # fitness, generations, mutation prob, sigma - pbar = tqdm(range(gen), desc=f'{prefix}Evolving anchors with Genetic Algorithm:') # progress bar + pbar = tqdm(range(gen), bar_format='{l_bar}{bar:10}{r_bar}{bar:-10b}') # progress bar for _ in pbar: v = np.ones(sh) while (v == 1).all(): # mutate until a change occurs (prevent duplicates) - v = ((npr.random(sh) < mp) * npr.random() * npr.randn(*sh) * s + 1).clip(0.3, 3.0) + v = ((npr.random(sh) < mp) * random.random() * npr.randn(*sh) * s + 1).clip(0.3, 3.0) kg = (k.copy() * v).clip(min=2.0) fg = anchor_fitness(kg) if fg > f: f, k = fg, kg.copy() - pbar.desc = f'{prefix}Evolving anchors with Genetic Algorithm: fitness = {f:.4f}' + pbar.desc = f'{PREFIX}Evolving anchors with Genetic Algorithm: fitness = {f:.4f}' if verbose: - print_results(k) + print_results(k, verbose) return print_results(k) diff --git a/utils/autobatch.py b/utils/autobatch.py new file mode 100644 index 000000000000..e53b4787b87d --- /dev/null +++ b/utils/autobatch.py @@ -0,0 +1,58 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Auto-batch utils +""" + +from copy import deepcopy + +import numpy as np +import torch +from torch.cuda import amp + +from utils.general import LOGGER, colorstr +from utils.torch_utils import profile + + +def check_train_batch_size(model, imgsz=640): + # Check YOLOv5 training batch size + with amp.autocast(): + return autobatch(deepcopy(model).train(), imgsz) # compute optimal batch size + + +def autobatch(model, imgsz=640, fraction=0.9, batch_size=16): + # Automatically estimate best batch size to use `fraction` of available CUDA memory + # Usage: + # import torch + # from utils.autobatch import autobatch + # model = torch.hub.load('ultralytics/yolov5', 'yolov5s', autoshape=False) + # print(autobatch(model)) + + prefix = colorstr('AutoBatch: ') + LOGGER.info(f'{prefix}Computing optimal batch size for --imgsz {imgsz}') + device = next(model.parameters()).device # get model device + if device.type == 'cpu': + LOGGER.info(f'{prefix}CUDA not detected, using default CPU batch-size {batch_size}') + return batch_size + + gb = 1 << 30 # bytes to GiB (1024 ** 3) + d = str(device).upper() # 'CUDA:0' + properties = torch.cuda.get_device_properties(device) # device properties + t = properties.total_memory / gb # (GiB) + r = torch.cuda.memory_reserved(device) / gb # (GiB) + a = torch.cuda.memory_allocated(device) / gb # (GiB) + f = t - (r + a) # free inside reserved + LOGGER.info(f'{prefix}{d} ({properties.name}) {t:.2f}G total, {r:.2f}G reserved, {a:.2f}G allocated, {f:.2f}G free') + + batch_sizes = [1, 2, 4, 8, 16] + try: + img = [torch.zeros(b, 3, imgsz, imgsz) for b in batch_sizes] + y = profile(img, model, n=3, device=device) + except Exception as e: + LOGGER.warning(f'{prefix}{e}') + + y = [x[2] for x in y if x] # memory [2] + batch_sizes = batch_sizes[:len(y)] + p = np.polyfit(batch_sizes, y, deg=1) # first degree polynomial fit + b = int((f * fraction - p[1]) / p[0]) # y intercept (optimal batch size) + LOGGER.info(f'{prefix}Using batch-size {b} for {d} {t * fraction:.2f}G/{t:.2f}G ({fraction * 100:.0f}%)') + return b diff --git a/utils/aws/resume.py b/utils/aws/resume.py index 4b0d4246b594..b21731c979a1 100644 --- a/utils/aws/resume.py +++ b/utils/aws/resume.py @@ -8,7 +8,10 @@ import torch import yaml -sys.path.append('./') # to run '$ python *.py' files in subdirectories +FILE = Path(__file__).resolve() +ROOT = FILE.parents[2] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH port = 0 # --master_port path = Path('').resolve() @@ -18,7 +21,7 @@ continue # Load opt.yaml - with open(last.parent.parent / 'opt.yaml') as f: + with open(last.parent.parent / 'opt.yaml', errors='ignore') as f: opt = yaml.safe_load(f) # Get device count @@ -28,7 +31,7 @@ if ddp: # multi-GPU port += 1 - cmd = f'python -m torch.distributed.launch --nproc_per_node {nd} --master_port {port} train.py --resume {last}' + cmd = f'python -m torch.distributed.run --nproc_per_node {nd} --master_port {port} train.py --resume {last}' else: # single-GPU cmd = f'python train.py --resume {last}' diff --git a/utils/aws/userdata.sh b/utils/aws/userdata.sh index 890606b76a06..5fc1332ac1b0 100644 --- a/utils/aws/userdata.sh +++ b/utils/aws/userdata.sh @@ -7,9 +7,9 @@ cd home/ubuntu if [ ! -d yolov5 ]; then echo "Running first-time script." # install dependencies, download COCO, pull Docker - git clone https://github.com/ultralytics/yolov5 && sudo chmod -R 777 yolov5 + git clone https://github.com/ultralytics/yolov5 -b master && sudo chmod -R 777 yolov5 cd yolov5 - bash data/scripts/get_coco.sh && echo "Data done." & + bash data/scripts/get_coco.sh && echo "COCO done." & sudo docker pull ultralytics/yolov5:latest && echo "Docker done." & python -m pip install --upgrade pip && pip install -r requirements.txt && python detect.py && echo "Requirements done." & wait && echo "All tasks done." # finish background tasks diff --git a/utils/benchmarks.py b/utils/benchmarks.py new file mode 100644 index 000000000000..446248c03f68 --- /dev/null +++ b/utils/benchmarks.py @@ -0,0 +1,104 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Run YOLOv5 benchmarks on all supported export formats + +Format | `export.py --include` | Model +--- | --- | --- +PyTorch | - | yolov5s.pt +TorchScript | `torchscript` | yolov5s.torchscript +ONNX | `onnx` | yolov5s.onnx +OpenVINO | `openvino` | yolov5s_openvino_model/ +TensorRT | `engine` | yolov5s.engine +CoreML | `coreml` | yolov5s.mlmodel +TensorFlow SavedModel | `saved_model` | yolov5s_saved_model/ +TensorFlow GraphDef | `pb` | yolov5s.pb +TensorFlow Lite | `tflite` | yolov5s.tflite +TensorFlow Edge TPU | `edgetpu` | yolov5s_edgetpu.tflite +TensorFlow.js | `tfjs` | yolov5s_web_model/ + +Requirements: + $ pip install -r requirements.txt coremltools onnx onnx-simplifier onnxruntime openvino-dev tensorflow-cpu # CPU + $ pip install -r requirements.txt coremltools onnx onnx-simplifier onnxruntime-gpu openvino-dev tensorflow # GPU + $ pip install -U nvidia-tensorrt --index-url https://pypi.ngc.nvidia.com # TensorRT + +Usage: + $ python utils/benchmarks.py --weights yolov5s.pt --img 640 +""" + +import argparse +import sys +import time +from pathlib import Path + +import pandas as pd + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[1] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +# ROOT = ROOT.relative_to(Path.cwd()) # relative + +import export +import val +from utils import notebook_init +from utils.general import LOGGER, print_args +from utils.torch_utils import select_device + + +def run(weights=ROOT / 'yolov5s.pt', # weights path + imgsz=640, # inference size (pixels) + batch_size=1, # batch size + data=ROOT / 'data/coco128.yaml', # dataset.yaml path + device='', # cuda device, i.e. 0 or 0,1,2,3 or cpu + half=False, # use FP16 half-precision inference + ): + y, t = [], time.time() + formats = export.export_formats() + device = select_device(device) + for i, (name, f, suffix, gpu) in formats.iterrows(): # index, (name, file, suffix, gpu-capable) + try: + if device.type != 'cpu': + assert gpu, f'{name} inference not supported on GPU' + if f == '-': + w = weights # PyTorch format + else: + w = export.run(weights=weights, imgsz=[imgsz], include=[f], device=device, half=half)[-1] # all others + assert suffix in str(w), 'export failed' + result = val.run(data, w, batch_size, imgsz, plots=False, device=device, task='benchmark', half=half) + metrics = result[0] # metrics (mp, mr, map50, map, *losses(box, obj, cls)) + speeds = result[2] # times (preprocess, inference, postprocess) + y.append([name, round(metrics[3], 4), round(speeds[1], 2)]) # mAP, t_inference + except Exception as e: + LOGGER.warning(f'WARNING: Benchmark failure for {name}: {e}') + y.append([name, None, None]) # mAP, t_inference + + # Print results + LOGGER.info('\n') + parse_opt() + notebook_init() # print system info + py = pd.DataFrame(y, columns=['Format', 'mAP@0.5:0.95', 'Inference time (ms)']) + LOGGER.info(f'\nBenchmarks complete ({time.time() - t:.2f}s)') + LOGGER.info(str(py)) + return py + + +def parse_opt(): + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default=ROOT / 'yolov5s.pt', help='weights path') + parser.add_argument('--imgsz', '--img', '--img-size', type=int, default=640, help='inference size (pixels)') + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + parser.add_argument('--data', type=str, default=ROOT / 'data/coco128.yaml', help='dataset.yaml path') + parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--half', action='store_true', help='use FP16 half-precision inference') + opt = parser.parse_args() + print_args(FILE.stem, opt) + return opt + + +def main(opt): + run(**vars(opt)) + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) diff --git a/utils/callbacks.py b/utils/callbacks.py new file mode 100644 index 000000000000..c51c268f20d6 --- /dev/null +++ b/utils/callbacks.py @@ -0,0 +1,78 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Callback utils +""" + + +class Callbacks: + """" + Handles all registered callbacks for YOLOv5 Hooks + """ + + def __init__(self): + # Define the available callbacks + self._callbacks = { + 'on_pretrain_routine_start': [], + 'on_pretrain_routine_end': [], + + 'on_train_start': [], + 'on_train_epoch_start': [], + 'on_train_batch_start': [], + 'optimizer_step': [], + 'on_before_zero_grad': [], + 'on_train_batch_end': [], + 'on_train_epoch_end': [], + + 'on_val_start': [], + 'on_val_batch_start': [], + 'on_val_image_end': [], + 'on_val_batch_end': [], + 'on_val_end': [], + + 'on_fit_epoch_end': [], # fit = train + val + 'on_model_save': [], + 'on_train_end': [], + 'on_params_update': [], + 'teardown': [], + } + self.stop_training = False # set True to interrupt training + + def register_action(self, hook, name='', callback=None): + """ + Register a new action to a callback hook + + Args: + hook The callback hook name to register the action to + name The name of the action for later reference + callback The callback to fire + """ + assert hook in self._callbacks, f"hook '{hook}' not found in callbacks {self._callbacks}" + assert callable(callback), f"callback '{callback}' is not callable" + self._callbacks[hook].append({'name': name, 'callback': callback}) + + def get_registered_actions(self, hook=None): + """" + Returns all the registered actions by callback hook + + Args: + hook The name of the hook to check, defaults to all + """ + if hook: + return self._callbacks[hook] + else: + return self._callbacks + + def run(self, hook, *args, **kwargs): + """ + Loop through the registered actions and fire all callbacks + + Args: + hook The name of the hook to check, defaults to all + args Arguments to receive from YOLOv5 + kwargs Keyword Arguments to receive from YOLOv5 + """ + + assert hook in self._callbacks, f"hook '{hook}' not found in callbacks {self._callbacks}" + + for logger in self._callbacks[hook]: + logger['callback'](*args, **kwargs) diff --git a/utils/datasets.py b/utils/datasets.py index 36416b14e138..f212e54633be 100755 --- a/utils/datasets.py +++ b/utils/datasets.py @@ -1,34 +1,45 @@ -# Dataset utils and dataloaders +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Dataloaders and dataset utils +""" import glob -import logging +import hashlib +import json import math import os import random import shutil import time from itertools import repeat -from multiprocessing.pool import ThreadPool +from multiprocessing.pool import Pool, ThreadPool from pathlib import Path from threading import Thread +from urllib.parse import urlparse +from zipfile import ZipFile import cv2 import numpy as np import torch import torch.nn.functional as F -from PIL import Image, ExifTags -from torch.utils.data import Dataset +import yaml +from PIL import ExifTags, Image, ImageOps +from torch.utils.data import DataLoader, Dataset, dataloader, distributed from tqdm import tqdm -from utils.general import check_requirements, xyxy2xywh, xywh2xyxy, xywhn2xyxy, xyn2xy, segment2box, segments2boxes, \ - resample_segments, clean_str +from utils.augmentations import Albumentations, augment_hsv, copy_paste, letterbox, mixup, random_perspective +from utils.general import (DATASETS_DIR, LOGGER, NUM_THREADS, check_dataset, check_requirements, check_yaml, clean_str, + segments2boxes, xyn2xy, xywh2xyxy, xywhn2xyxy, xyxy2xywhn) from utils.torch_utils import torch_distributed_zero_first +# Remap +cv2.imread = lambda x: cv2.imdecode(np.fromfile(x, np.uint8), cv2.IMREAD_COLOR) # for Chinese filenames + # Parameters -help_url = 'https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data' -img_formats = ['bmp', 'jpg', 'jpeg', 'png', 'tif', 'tiff', 'dng', 'webp', 'mpo'] # acceptable image suffixes -vid_formats = ['mov', 'avi', 'mp4', 'mpg', 'mpeg', 'm4v', 'wmv', 'mkv'] # acceptable video suffixes -logger = logging.getLogger(__name__) +HELP_URL = 'https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data' +IMG_FORMATS = 'bmp', 'dng', 'jpeg', 'jpg', 'mpo', 'png', 'tif', 'tiff', 'webp' # include image suffixes +VID_FORMATS = 'asf', 'avi', 'gif', 'm4v', 'mkv', 'mov', 'mp4', 'mpeg', 'mpg', 'ts', 'wmv' # include video suffixes +BAR_FORMAT = '{l_bar}{bar:10}{r_bar}{bar:-10b}' # tqdm bar format # Get orientation exif tag for orientation in ExifTags.TAGS.keys(): @@ -36,9 +47,12 @@ break -def get_hash(files): - # Returns a single hash value of a list of files - return sum(os.path.getsize(f) for f in files if os.path.isfile(f)) +def get_hash(paths): + # Returns a single hash value of a list of paths (files or dirs) + size = sum(os.path.getsize(p) for p in paths if os.path.exists(p)) # sizes + h = hashlib.md5(str(size).encode()) # hash sizes + h.update(''.join(paths).encode()) # hash paths + return h.hexdigest() # return hash def exif_size(img): @@ -50,42 +64,70 @@ def exif_size(img): s = (s[1], s[0]) elif rotation == 8: # rotation 90 s = (s[1], s[0]) - except: + except Exception: pass return s -def create_dataloader(path, imgsz, batch_size, stride, opt, hyp=None, augment=False, cache=False, pad=0.0, rect=False, - rank=-1, world_size=1, workers=8, image_weights=False, quad=False, prefix=''): - # Make sure only the first process in DDP process the dataset first, and the following others can use the cache - with torch_distributed_zero_first(rank): +def exif_transpose(image): + """ + Transpose a PIL image accordingly if it has an EXIF Orientation tag. + Inplace version of https://github.com/python-pillow/Pillow/blob/master/src/PIL/ImageOps.py exif_transpose() + + :param image: The image to transpose. + :return: An image. + """ + exif = image.getexif() + orientation = exif.get(0x0112, 1) # default 1 + if orientation > 1: + method = {2: Image.FLIP_LEFT_RIGHT, + 3: Image.ROTATE_180, + 4: Image.FLIP_TOP_BOTTOM, + 5: Image.TRANSPOSE, + 6: Image.ROTATE_270, + 7: Image.TRANSVERSE, + 8: Image.ROTATE_90, + }.get(orientation) + if method is not None: + image = image.transpose(method) + del exif[0x0112] + image.info["exif"] = exif.tobytes() + return image + + +def create_dataloader(path, imgsz, batch_size, stride, single_cls=False, hyp=None, augment=False, cache=False, pad=0.0, + rect=False, rank=-1, workers=8, image_weights=False, quad=False, prefix='', shuffle=False): + if rect and shuffle: + LOGGER.warning('WARNING: --rect is incompatible with DataLoader shuffle, setting shuffle=False') + shuffle = False + with torch_distributed_zero_first(rank): # init dataset *.cache only once if DDP dataset = LoadImagesAndLabels(path, imgsz, batch_size, - augment=augment, # augment images - hyp=hyp, # augmentation hyperparameters - rect=rect, # rectangular training + augment=augment, # augmentation + hyp=hyp, # hyperparameters + rect=rect, # rectangular batches cache_images=cache, - single_cls=opt.single_cls, + single_cls=single_cls, stride=int(stride), pad=pad, image_weights=image_weights, prefix=prefix) batch_size = min(batch_size, len(dataset)) - nw = min([os.cpu_count() // world_size, batch_size if batch_size > 1 else 0, workers]) # number of workers - sampler = torch.utils.data.distributed.DistributedSampler(dataset) if rank != -1 else None - loader = torch.utils.data.DataLoader if image_weights else InfiniteDataLoader - # Use torch.utils.data.DataLoader() if dataset.properties will update during training else InfiniteDataLoader() - dataloader = loader(dataset, - batch_size=batch_size, - num_workers=nw, - sampler=sampler, - pin_memory=True, - collate_fn=LoadImagesAndLabels.collate_fn4 if quad else LoadImagesAndLabels.collate_fn) - return dataloader, dataset - - -class InfiniteDataLoader(torch.utils.data.dataloader.DataLoader): + nd = torch.cuda.device_count() # number of CUDA devices + nw = min([os.cpu_count() // max(nd, 1), batch_size if batch_size > 1 else 0, workers]) # number of workers + sampler = None if rank == -1 else distributed.DistributedSampler(dataset, shuffle=shuffle) + loader = DataLoader if image_weights else InfiniteDataLoader # only DataLoader allows for attribute updates + return loader(dataset, + batch_size=batch_size, + shuffle=shuffle and sampler is None, + num_workers=nw, + sampler=sampler, + pin_memory=True, + collate_fn=LoadImagesAndLabels.collate_fn4 if quad else LoadImagesAndLabels.collate_fn), dataset + + +class InfiniteDataLoader(dataloader.DataLoader): """ Dataloader that reuses workers Uses same syntax as vanilla DataLoader @@ -104,7 +146,7 @@ def __iter__(self): yield next(self.iterator) -class _RepeatSampler(object): +class _RepeatSampler: """ Sampler that repeats forever Args: @@ -119,9 +161,10 @@ def __iter__(self): yield from iter(self.sampler) -class LoadImages: # for inference - def __init__(self, path, img_size=640, stride=32): - p = str(Path(path).absolute()) # os-agnostic absolute path +class LoadImages: + # YOLOv5 image/video dataloader, i.e. `python detect.py --source image.jpg/vid.mp4` + def __init__(self, path, img_size=640, stride=32, auto=True): + p = str(Path(path).resolve()) # os-agnostic absolute path if '*' in p: files = sorted(glob.glob(p, recursive=True)) # glob elif os.path.isdir(p): @@ -131,8 +174,8 @@ def __init__(self, path, img_size=640, stride=32): else: raise Exception(f'ERROR: {p} does not exist') - images = [x for x in files if x.split('.')[-1].lower() in img_formats] - videos = [x for x in files if x.split('.')[-1].lower() in vid_formats] + images = [x for x in files if x.split('.')[-1].lower() in IMG_FORMATS] + videos = [x for x in files if x.split('.')[-1].lower() in VID_FORMATS] ni, nv = len(images), len(videos) self.img_size = img_size @@ -141,12 +184,13 @@ def __init__(self, path, img_size=640, stride=32): self.nf = ni + nv # number of files self.video_flag = [False] * ni + [True] * nv self.mode = 'image' + self.auto = auto if any(videos): self.new_video(videos[0]) # new video else: self.cap = None assert self.nf > 0, f'No images or videos found in {p}. ' \ - f'Supported formats are:\nimages: {img_formats}\nvideos: {vid_formats}' + f'Supported formats are:\nimages: {IMG_FORMATS}\nvideos: {VID_FORMATS}' def __iter__(self): self.count = 0 @@ -161,7 +205,7 @@ def __next__(self): # Read video self.mode = 'video' ret_val, img0 = self.cap.read() - if not ret_val: + while not ret_val: self.count += 1 self.cap.release() if self.count == self.nf: # last video @@ -172,23 +216,23 @@ def __next__(self): ret_val, img0 = self.cap.read() self.frame += 1 - print(f'video {self.count + 1}/{self.nf} ({self.frame}/{self.frames}) {path}: ', end='') + s = f'video {self.count + 1}/{self.nf} ({self.frame}/{self.frames}) {path}: ' else: # Read image self.count += 1 img0 = cv2.imread(path) # BGR - assert img0 is not None, 'Image Not Found ' + path - print(f'image {self.count}/{self.nf} {path}: ', end='') + assert img0 is not None, f'Image Not Found {path}' + s = f'image {self.count}/{self.nf} {path}: ' # Padded resize - img = letterbox(img0, self.img_size, stride=self.stride)[0] + img = letterbox(img0, self.img_size, stride=self.stride, auto=self.auto)[0] # Convert - img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = img.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB img = np.ascontiguousarray(img) - return path, img, img0, self.cap + return path, img, img0, self.cap, s def new_video(self, path): self.frame = 0 @@ -200,18 +244,12 @@ def __len__(self): class LoadWebcam: # for inference + # YOLOv5 local webcam dataloader, i.e. `python detect.py --source 0` def __init__(self, pipe='0', img_size=640, stride=32): self.img_size = img_size self.stride = stride - - if pipe.isnumeric(): - pipe = eval(pipe) # local camera - # pipe = 'rtsp://192.168.1.64/1' # IP camera - # pipe = 'rtsp://username:password@192.168.1.64/1' # IP camera with login - # pipe = 'http://wmccpinetop.axiscam.net/mjpg/video.mjpg' # IP golf camera - - self.pipe = pipe - self.cap = cv2.VideoCapture(pipe) # video capture object + self.pipe = eval(pipe) if pipe.isnumeric() else pipe + self.cap = cv2.VideoCapture(self.pipe) # video capture object self.cap.set(cv2.CAP_PROP_BUFFERSIZE, 3) # set buffer size def __iter__(self): @@ -226,45 +264,36 @@ def __next__(self): raise StopIteration # Read frame - if self.pipe == 0: # local camera - ret_val, img0 = self.cap.read() - img0 = cv2.flip(img0, 1) # flip left-right - else: # IP camera - n = 0 - while True: - n += 1 - self.cap.grab() - if n % 30 == 0: # skip frames - ret_val, img0 = self.cap.retrieve() - if ret_val: - break + ret_val, img0 = self.cap.read() + img0 = cv2.flip(img0, 1) # flip left-right # Print assert ret_val, f'Camera Error {self.pipe}' img_path = 'webcam.jpg' - print(f'webcam {self.count}: ', end='') + s = f'webcam {self.count}: ' # Padded resize img = letterbox(img0, self.img_size, stride=self.stride)[0] # Convert - img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = img.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB img = np.ascontiguousarray(img) - return img_path, img, img0, None + return img_path, img, img0, None, s def __len__(self): return 0 -class LoadStreams: # multiple IP or RTSP cameras - def __init__(self, sources='streams.txt', img_size=640, stride=32): +class LoadStreams: + # YOLOv5 streamloader, i.e. `python detect.py --source 'rtsp://example.com/media.mp4' # RTSP, RTMP, HTTP streams` + def __init__(self, sources='streams.txt', img_size=640, stride=32, auto=True): self.mode = 'stream' self.img_size = img_size self.stride = stride if os.path.isfile(sources): - with open(sources, 'r') as f: + with open(sources) as f: sources = [x.strip() for x in f.read().strip().splitlines() if len(x.strip())] else: sources = [sources] @@ -272,43 +301,50 @@ def __init__(self, sources='streams.txt', img_size=640, stride=32): n = len(sources) self.imgs, self.fps, self.frames, self.threads = [None] * n, [0] * n, [0] * n, [None] * n self.sources = [clean_str(x) for x in sources] # clean source names for later + self.auto = auto for i, s in enumerate(sources): # index, source # Start thread to read frames from video stream - print(f'{i + 1}/{n}: {s}... ', end='') - if 'youtube.com/' in s or 'youtu.be/' in s: # if source is YouTube video - check_requirements(('pafy', 'youtube_dl')) + st = f'{i + 1}/{n}: {s}... ' + if urlparse(s).hostname in ('youtube.com', 'youtu.be'): # if source is YouTube video + check_requirements(('pafy', 'youtube_dl==2020.12.2')) import pafy s = pafy.new(s).getbest(preftype="mp4").url # YouTube URL s = eval(s) if s.isnumeric() else s # i.e. s = '0' local webcam cap = cv2.VideoCapture(s) - assert cap.isOpened(), f'Failed to open {s}' + assert cap.isOpened(), f'{st}Failed to open {s}' w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) - self.fps[i] = max(cap.get(cv2.CAP_PROP_FPS) % 100, 0) or 30.0 # 30 FPS fallback + fps = cap.get(cv2.CAP_PROP_FPS) # warning: may return 0 or nan self.frames[i] = max(int(cap.get(cv2.CAP_PROP_FRAME_COUNT)), 0) or float('inf') # infinite stream fallback + self.fps[i] = max((fps if math.isfinite(fps) else 0) % 100, 0) or 30 # 30 FPS fallback _, self.imgs[i] = cap.read() # guarantee first frame - self.threads[i] = Thread(target=self.update, args=([i, cap]), daemon=True) - print(f" success ({self.frames[i]} frames {w}x{h} at {self.fps[i]:.2f} FPS)") + self.threads[i] = Thread(target=self.update, args=([i, cap, s]), daemon=True) + LOGGER.info(f"{st} Success ({self.frames[i]} frames {w}x{h} at {self.fps[i]:.2f} FPS)") self.threads[i].start() - print('') # newline + LOGGER.info('') # newline # check for common shapes - s = np.stack([letterbox(x, self.img_size, stride=self.stride)[0].shape for x in self.imgs], 0) # shapes + s = np.stack([letterbox(x, self.img_size, stride=self.stride, auto=self.auto)[0].shape for x in self.imgs]) self.rect = np.unique(s, axis=0).shape[0] == 1 # rect inference if all shapes equal if not self.rect: - print('WARNING: Different stream shapes detected. For optimal performance supply similarly-shaped streams.') + LOGGER.warning('WARNING: Stream shapes differ. For optimal performance supply similarly-shaped streams.') - def update(self, i, cap): + def update(self, i, cap, stream): # Read stream `i` frames in daemon thread - n, f = 0, self.frames[i] + n, f, read = 0, self.frames[i], 1 # frame number, frame array, inference every 'read' frame while cap.isOpened() and n < f: n += 1 # _, self.imgs[index] = cap.read() cap.grab() - if n % 4: # read every 4th frame + if n % read == 0: success, im = cap.retrieve() - self.imgs[i] = im if success else self.imgs[i] * 0 + if success: + self.imgs[i] = im + else: + LOGGER.warning('WARNING: Video stream unresponsive, please check your IP camera connection.') + self.imgs[i] = np.zeros_like(self.imgs[i]) + cap.open(stream) # re-open stream if signal was lost time.sleep(1 / self.fps[i]) # wait time def __iter__(self): @@ -323,28 +359,31 @@ def __next__(self): # Letterbox img0 = self.imgs.copy() - img = [letterbox(x, self.img_size, auto=self.rect, stride=self.stride)[0] for x in img0] + img = [letterbox(x, self.img_size, stride=self.stride, auto=self.rect and self.auto)[0] for x in img0] # Stack img = np.stack(img, 0) # Convert - img = img[:, :, :, ::-1].transpose(0, 3, 1, 2) # BGR to RGB, to bsx3x416x416 + img = img[..., ::-1].transpose((0, 3, 1, 2)) # BGR to RGB, BHWC to BCHW img = np.ascontiguousarray(img) - return self.sources, img, img0, None + return self.sources, img, img0, None, '' def __len__(self): - return 0 # 1E12 frames = 32 streams at 30 FPS for 30 years + return len(self.sources) # 1E12 frames = 32 streams at 30 FPS for 30 years def img2label_paths(img_paths): # Define label paths as a function of image paths sa, sb = os.sep + 'images' + os.sep, os.sep + 'labels' + os.sep # /images/, /labels/ substrings - return ['txt'.join(x.replace(sa, sb, 1).rsplit(x.split('.')[-1], 1)) for x in img_paths] + return [sb.join(x.rsplit(sa, 1)).rsplit('.', 1)[0] + '.txt' for x in img_paths] + +class LoadImagesAndLabels(Dataset): + # YOLOv5 train_loader/val_loader, loads images and labels for training and validation + cache_version = 0.6 # dataset labels *.cache version -class LoadImagesAndLabels(Dataset): # for training/testing def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, rect=False, image_weights=False, cache_images=False, single_cls=False, stride=32, pad=0.0, prefix=''): self.img_size = img_size @@ -356,6 +395,7 @@ def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, r self.mosaic_border = [-img_size // 2, -img_size // 2] self.stride = stride self.path = path + self.albumentations = Albumentations() if augment else None try: f = [] # image files @@ -363,50 +403,47 @@ def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, r p = Path(p) # os-agnostic if p.is_dir(): # dir f += glob.glob(str(p / '**' / '*.*'), recursive=True) - # f = list(p.rglob('**/*.*')) # pathlib + # f = list(p.rglob('*.*')) # pathlib elif p.is_file(): # file - with open(p, 'r') as t: + with open(p) as t: t = t.read().strip().splitlines() parent = str(p.parent) + os.sep f += [x.replace('./', parent) if x.startswith('./') else x for x in t] # local to global path # f += [p.parent / x.lstrip(os.sep) for x in t] # local to global path (pathlib) else: raise Exception(f'{prefix}{p} does not exist') - self.img_files = sorted([x.replace('/', os.sep) for x in f if x.split('.')[-1].lower() in img_formats]) - # self.img_files = sorted([x for x in f if x.suffix[1:].lower() in img_formats]) # pathlib - assert self.img_files, f'{prefix}No images found' + self.im_files = sorted(x.replace('/', os.sep) for x in f if x.split('.')[-1].lower() in IMG_FORMATS) + # self.img_files = sorted([x for x in f if x.suffix[1:].lower() in IMG_FORMATS]) # pathlib + assert self.im_files, f'{prefix}No images found' except Exception as e: - raise Exception(f'{prefix}Error loading data from {path}: {e}\nSee {help_url}') + raise Exception(f'{prefix}Error loading data from {path}: {e}\nSee {HELP_URL}') # Check cache - self.label_files = img2label_paths(self.img_files) # labels - cache_path = (p if p.is_file() else Path(self.label_files[0]).parent).with_suffix('.cache') # cached labels - if cache_path.is_file(): - cache, exists = torch.load(cache_path), True # load - if cache['hash'] != get_hash(self.label_files + self.img_files) or 'version' not in cache: # changed - cache, exists = self.cache_labels(cache_path, prefix), False # re-cache - else: + self.label_files = img2label_paths(self.im_files) # labels + cache_path = (p if p.is_file() else Path(self.label_files[0]).parent).with_suffix('.cache') + try: + cache, exists = np.load(cache_path, allow_pickle=True).item(), True # load dict + assert cache['version'] == self.cache_version # same version + assert cache['hash'] == get_hash(self.label_files + self.im_files) # same hash + except Exception: cache, exists = self.cache_labels(cache_path, prefix), False # cache # Display cache - nf, nm, ne, nc, n = cache.pop('results') # found, missing, empty, corrupted, total + nf, nm, ne, nc, n = cache.pop('results') # found, missing, empty, corrupt, total if exists: - d = f"Scanning '{cache_path}' images and labels... {nf} found, {nm} missing, {ne} empty, {nc} corrupted" - tqdm(None, desc=prefix + d, total=n, initial=n) # display cache results - assert nf > 0 or not augment, f'{prefix}No labels in {cache_path}. Can not train without labels. See {help_url}' + d = f"Scanning '{cache_path}' images and labels... {nf} found, {nm} missing, {ne} empty, {nc} corrupt" + tqdm(None, desc=prefix + d, total=n, initial=n, bar_format=BAR_FORMAT) # display cache results + if cache['msgs']: + LOGGER.info('\n'.join(cache['msgs'])) # display warnings + assert nf > 0 or not augment, f'{prefix}No labels in {cache_path}. Can not train without labels. See {HELP_URL}' # Read cache - cache.pop('hash') # remove hash - cache.pop('version') # remove version + [cache.pop(k) for k in ('hash', 'version', 'msgs')] # remove items labels, shapes, self.segments = zip(*cache.values()) self.labels = list(labels) self.shapes = np.array(shapes, dtype=np.float64) - self.img_files = list(cache.keys()) # update + self.im_files = list(cache.keys()) # update self.label_files = img2label_paths(cache.keys()) # update - if single_cls: - for x in self.labels: - x[:, 0] = 0 - n = len(shapes) # number of images bi = np.floor(np.arange(n) / batch_size).astype(np.int) # batch index nb = bi[-1] + 1 # number of batches @@ -414,13 +451,27 @@ def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, r self.n = n self.indices = range(n) + # Update labels + include_class = [] # filter labels to include only these classes (optional) + include_class_array = np.array(include_class).reshape(1, -1) + for i, (label, segment) in enumerate(zip(self.labels, self.segments)): + if include_class: + j = (label[:, 0:1] == include_class_array).any(1) + self.labels[i] = label[j] + if segment: + self.segments[i] = segment[j] + if single_cls: # single-class training, merge all classes into 0 + self.labels[i][:, 0] = 0 + if segment: + self.segments[i][:, 0] = 0 + # Rectangular Training if self.rect: # Sort by aspect ratio s = self.shapes # wh ar = s[:, 1] / s[:, 0] # aspect ratio irect = ar.argsort() - self.img_files = [self.img_files[i] for i in irect] + self.im_files = [self.im_files[i] for i in irect] self.label_files = [self.label_files[i] for i in irect] self.labels = [self.labels[i] for i in irect] self.shapes = s[irect] # wh @@ -438,79 +489,62 @@ def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, r self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(np.int) * stride - # Cache images into memory for faster training (WARNING: large datasets may exceed system RAM) - self.imgs = [None] * n + # Cache images into RAM/disk for faster training (WARNING: large datasets may exceed system resources) + self.ims = [None] * n + self.npy_files = [Path(f).with_suffix('.npy') for f in self.im_files] if cache_images: gb = 0 # Gigabytes of cached images - self.img_hw0, self.img_hw = [None] * n, [None] * n - results = ThreadPool(8).imap(lambda x: load_image(*x), zip(repeat(self), range(n))) # 8 threads - pbar = tqdm(enumerate(results), total=n) + self.im_hw0, self.im_hw = [None] * n, [None] * n + fcn = self.cache_images_to_disk if cache_images == 'disk' else self.load_image + results = ThreadPool(NUM_THREADS).imap(fcn, range(n)) + pbar = tqdm(enumerate(results), total=n, bar_format=BAR_FORMAT) for i, x in pbar: - self.imgs[i], self.img_hw0[i], self.img_hw[i] = x # img, hw_original, hw_resized = load_image(self, i) - gb += self.imgs[i].nbytes - pbar.desc = f'{prefix}Caching images ({gb / 1E9:.1f}GB)' + if cache_images == 'disk': + gb += self.npy_files[i].stat().st_size + else: # 'ram' + self.ims[i], self.im_hw0[i], self.im_hw[i] = x # im, hw_orig, hw_resized = load_image(self, i) + gb += self.ims[i].nbytes + pbar.desc = f'{prefix}Caching images ({gb / 1E9:.1f}GB {cache_images})' pbar.close() def cache_labels(self, path=Path('./labels.cache'), prefix=''): # Cache dataset labels, check images and read shapes x = {} # dict - nm, nf, ne, nc = 0, 0, 0, 0 # number missing, found, empty, duplicate - pbar = tqdm(zip(self.img_files, self.label_files), desc='Scanning images', total=len(self.img_files)) - for i, (im_file, lb_file) in enumerate(pbar): - try: - # verify images - im = Image.open(im_file) - im.verify() # PIL verify - shape = exif_size(im) # image size - segments = [] # instance segments - assert (shape[0] > 9) & (shape[1] > 9), f'image size {shape} <10 pixels' - assert im.format.lower() in img_formats, f'invalid image format {im.format}' - - # verify labels - if os.path.isfile(lb_file): - nf += 1 # label found - with open(lb_file, 'r') as f: - l = [x.split() for x in f.read().strip().splitlines()] - if any([len(x) > 8 for x in l]): # is segment - classes = np.array([x[0] for x in l], dtype=np.float32) - segments = [np.array(x[1:], dtype=np.float32).reshape(-1, 2) for x in l] # (cls, xy1...) - l = np.concatenate((classes.reshape(-1, 1), segments2boxes(segments)), 1) # (cls, xywh) - l = np.array(l, dtype=np.float32) - if len(l): - assert l.shape[1] == 5, 'labels require 5 columns each' - assert (l >= 0).all(), 'negative labels' - assert (l[:, 1:] <= 1).all(), 'non-normalized or out of bounds coordinate labels' - assert np.unique(l, axis=0).shape[0] == l.shape[0], 'duplicate labels' - else: - ne += 1 # label empty - l = np.zeros((0, 5), dtype=np.float32) - else: - nm += 1 # label missing - l = np.zeros((0, 5), dtype=np.float32) - x[im_file] = [l, shape, segments] - except Exception as e: - nc += 1 - logging.info(f'{prefix}WARNING: Ignoring corrupted image and/or label {im_file}: {e}') - - pbar.desc = f"{prefix}Scanning '{path.parent / path.stem}' images and labels... " \ - f"{nf} found, {nm} missing, {ne} empty, {nc} corrupted" - pbar.close() + nm, nf, ne, nc, msgs = 0, 0, 0, 0, [] # number missing, found, empty, corrupt, messages + desc = f"{prefix}Scanning '{path.parent / path.stem}' images and labels..." + with Pool(NUM_THREADS) as pool: + pbar = tqdm(pool.imap(verify_image_label, zip(self.im_files, self.label_files, repeat(prefix))), + desc=desc, total=len(self.im_files), bar_format=BAR_FORMAT) + for im_file, lb, shape, segments, nm_f, nf_f, ne_f, nc_f, msg in pbar: + nm += nm_f + nf += nf_f + ne += ne_f + nc += nc_f + if im_file: + x[im_file] = [lb, shape, segments] + if msg: + msgs.append(msg) + pbar.desc = f"{desc}{nf} found, {nm} missing, {ne} empty, {nc} corrupt" + pbar.close() + if msgs: + LOGGER.info('\n'.join(msgs)) if nf == 0: - logging.info(f'{prefix}WARNING: No labels found in {path}. See {help_url}') - - x['hash'] = get_hash(self.label_files + self.img_files) - x['results'] = nf, nm, ne, nc, i + 1 - x['version'] = 0.1 # cache version + LOGGER.warning(f'{prefix}WARNING: No labels found in {path}. See {HELP_URL}') + x['hash'] = get_hash(self.label_files + self.im_files) + x['results'] = nf, nm, ne, nc, len(self.im_files) + x['msgs'] = msgs # warnings + x['version'] = self.cache_version # cache version try: - torch.save(x, path) # save for next time - logging.info(f'{prefix}New cache created: {path}') + np.save(path, x) # save cache for next time + path.with_suffix('.cache.npy').rename(path) # remove .npy suffix + LOGGER.info(f'{prefix}New cache created: {path}') except Exception as e: - logging.info(f'{prefix}WARNING: Cache directory {path.parent} is not writeable: {e}') # path not writeable + LOGGER.warning(f'{prefix}WARNING: Cache directory {path.parent} is not writeable: {e}') # not writeable return x def __len__(self): - return len(self.img_files) + return len(self.im_files) # def __iter__(self): # self.count = -1 @@ -525,19 +559,16 @@ def __getitem__(self, index): mosaic = self.mosaic and random.random() < hyp['mosaic'] if mosaic: # Load mosaic - img, labels = load_mosaic(self, index) + img, labels = self.load_mosaic(index) shapes = None - # MixUp https://arxiv.org/pdf/1710.09412.pdf + # MixUp augmentation if random.random() < hyp['mixup']: - img2, labels2 = load_mosaic(self, random.randint(0, self.n - 1)) - r = np.random.beta(8.0, 8.0) # mixup ratio, alpha=beta=8.0 - img = (img * r + img2 * (1 - r)).astype(np.uint8) - labels = np.concatenate((labels, labels2), 0) + img, labels = mixup(img, labels, *self.load_mosaic(random.randint(0, self.n - 1))) else: # Load image - img, (h0, w0), (h, w) = load_image(self, index) + img, (h0, w0), (h, w) = self.load_image(index) # Letterbox shape = self.batch_shapes[self.batch[index]] if self.rect else self.img_size # final letterboxed shape @@ -548,9 +579,7 @@ def __getitem__(self, index): if labels.size: # normalized xywh to pixel xyxy format labels[:, 1:] = xywhn2xyxy(labels[:, 1:], ratio[0] * w, ratio[1] * h, padw=pad[0], padh=pad[1]) - if self.augment: - # Augment imagespace - if not mosaic: + if self.augment: img, labels = random_perspective(img, labels, degrees=hyp['degrees'], translate=hyp['translate'], @@ -558,442 +587,234 @@ def __getitem__(self, index): shear=hyp['shear'], perspective=hyp['perspective']) - # Augment colorspace - augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'], vgain=hyp['hsv_v']) + nl = len(labels) # number of labels + if nl: + labels[:, 1:5] = xyxy2xywhn(labels[:, 1:5], w=img.shape[1], h=img.shape[0], clip=True, eps=1E-3) - # Apply cutouts - # if random.random() < 0.9: - # labels = cutout(img, labels) + if self.augment: + # Albumentations + img, labels = self.albumentations(img, labels) + nl = len(labels) # update after albumentations - nL = len(labels) # number of labels - if nL: - labels[:, 1:5] = xyxy2xywh(labels[:, 1:5]) # convert xyxy to xywh - labels[:, [2, 4]] /= img.shape[0] # normalized height 0-1 - labels[:, [1, 3]] /= img.shape[1] # normalized width 0-1 + # HSV color-space + augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'], vgain=hyp['hsv_v']) - if self.augment: - # flip up-down + # Flip up-down if random.random() < hyp['flipud']: img = np.flipud(img) - if nL: + if nl: labels[:, 2] = 1 - labels[:, 2] - # flip left-right + # Flip left-right if random.random() < hyp['fliplr']: img = np.fliplr(img) - if nL: + if nl: labels[:, 1] = 1 - labels[:, 1] - labels_out = torch.zeros((nL, 6)) - if nL: + # Cutouts + # labels = cutout(img, labels, p=0.5) + # nl = len(labels) # update after cutout + + labels_out = torch.zeros((nl, 6)) + if nl: labels_out[:, 1:] = torch.from_numpy(labels) # Convert - img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = img.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB img = np.ascontiguousarray(img) - return torch.from_numpy(img), labels_out, self.img_files[index], shapes + return torch.from_numpy(img), labels_out, self.im_files[index], shapes + + def load_image(self, i): + # Loads 1 image from dataset index 'i', returns (im, original hw, resized hw) + im, f, fn = self.ims[i], self.im_files[i], self.npy_files[i], + if im is None: # not cached in RAM + if fn.exists(): # load npy + im = np.load(fn) + else: # read image + im = cv2.imread(f) # BGR + assert im is not None, f'Image Not Found {f}' + h0, w0 = im.shape[:2] # orig hw + r = self.img_size / max(h0, w0) # ratio + if r != 1: # if sizes are not equal + im = cv2.resize(im, + (int(w0 * r), int(h0 * r)), + interpolation=cv2.INTER_LINEAR if (self.augment or r > 1) else cv2.INTER_AREA) + return im, (h0, w0), im.shape[:2] # im, hw_original, hw_resized + else: + return self.ims[i], self.im_hw0[i], self.im_hw[i] # im, hw_original, hw_resized + + def cache_images_to_disk(self, i): + # Saves an image as an *.npy file for faster loading + f = self.npy_files[i] + if not f.exists(): + np.save(f.as_posix(), cv2.imread(self.im_files[i])) + + def load_mosaic(self, index): + # YOLOv5 4-mosaic loader. Loads 1 image + 3 random images into a 4-image mosaic + labels4, segments4 = [], [] + s = self.img_size + yc, xc = (int(random.uniform(-x, 2 * s + x)) for x in self.mosaic_border) # mosaic center x, y + indices = [index] + random.choices(self.indices, k=3) # 3 additional image indices + random.shuffle(indices) + for i, index in enumerate(indices): + # Load image + img, _, (h, w) = self.load_image(index) + + # place img in img4 + if i == 0: # top left + img4 = np.full((s * 2, s * 2, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles + x1a, y1a, x2a, y2a = max(xc - w, 0), max(yc - h, 0), xc, yc # xmin, ymin, xmax, ymax (large image) + x1b, y1b, x2b, y2b = w - (x2a - x1a), h - (y2a - y1a), w, h # xmin, ymin, xmax, ymax (small image) + elif i == 1: # top right + x1a, y1a, x2a, y2a = xc, max(yc - h, 0), min(xc + w, s * 2), yc + x1b, y1b, x2b, y2b = 0, h - (y2a - y1a), min(w, x2a - x1a), h + elif i == 2: # bottom left + x1a, y1a, x2a, y2a = max(xc - w, 0), yc, xc, min(s * 2, yc + h) + x1b, y1b, x2b, y2b = w - (x2a - x1a), 0, w, min(y2a - y1a, h) + elif i == 3: # bottom right + x1a, y1a, x2a, y2a = xc, yc, min(xc + w, s * 2), min(s * 2, yc + h) + x1b, y1b, x2b, y2b = 0, 0, min(w, x2a - x1a), min(y2a - y1a, h) + + img4[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] + padw = x1a - x1b + padh = y1a - y1b + + # Labels + labels, segments = self.labels[index].copy(), self.segments[index].copy() + if labels.size: + labels[:, 1:] = xywhn2xyxy(labels[:, 1:], w, h, padw, padh) # normalized xywh to pixel xyxy format + segments = [xyn2xy(x, w, h, padw, padh) for x in segments] + labels4.append(labels) + segments4.extend(segments) + + # Concat/clip labels + labels4 = np.concatenate(labels4, 0) + for x in (labels4[:, 1:], *segments4): + np.clip(x, 0, 2 * s, out=x) # clip when using random_perspective() + # img4, labels4 = replicate(img4, labels4) # replicate + + # Augment + img4, labels4, segments4 = copy_paste(img4, labels4, segments4, p=self.hyp['copy_paste']) + img4, labels4 = random_perspective(img4, labels4, segments4, + degrees=self.hyp['degrees'], + translate=self.hyp['translate'], + scale=self.hyp['scale'], + shear=self.hyp['shear'], + perspective=self.hyp['perspective'], + border=self.mosaic_border) # border to remove + + return img4, labels4 + + def load_mosaic9(self, index): + # YOLOv5 9-mosaic loader. Loads 1 image + 8 random images into a 9-image mosaic + labels9, segments9 = [], [] + s = self.img_size + indices = [index] + random.choices(self.indices, k=8) # 8 additional image indices + random.shuffle(indices) + hp, wp = -1, -1 # height, width previous + for i, index in enumerate(indices): + # Load image + img, _, (h, w) = self.load_image(index) + + # place img in img9 + if i == 0: # center + img9 = np.full((s * 3, s * 3, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles + h0, w0 = h, w + c = s, s, s + w, s + h # xmin, ymin, xmax, ymax (base) coordinates + elif i == 1: # top + c = s, s - h, s + w, s + elif i == 2: # top right + c = s + wp, s - h, s + wp + w, s + elif i == 3: # right + c = s + w0, s, s + w0 + w, s + h + elif i == 4: # bottom right + c = s + w0, s + hp, s + w0 + w, s + hp + h + elif i == 5: # bottom + c = s + w0 - w, s + h0, s + w0, s + h0 + h + elif i == 6: # bottom left + c = s + w0 - wp - w, s + h0, s + w0 - wp, s + h0 + h + elif i == 7: # left + c = s - w, s + h0 - h, s, s + h0 + elif i == 8: # top left + c = s - w, s + h0 - hp - h, s, s + h0 - hp + + padx, pady = c[:2] + x1, y1, x2, y2 = (max(x, 0) for x in c) # allocate coords + + # Labels + labels, segments = self.labels[index].copy(), self.segments[index].copy() + if labels.size: + labels[:, 1:] = xywhn2xyxy(labels[:, 1:], w, h, padx, pady) # normalized xywh to pixel xyxy format + segments = [xyn2xy(x, w, h, padx, pady) for x in segments] + labels9.append(labels) + segments9.extend(segments) + + # Image + img9[y1:y2, x1:x2] = img[y1 - pady:, x1 - padx:] # img9[ymin:ymax, xmin:xmax] + hp, wp = h, w # height, width previous + + # Offset + yc, xc = (int(random.uniform(0, s)) for _ in self.mosaic_border) # mosaic center x, y + img9 = img9[yc:yc + 2 * s, xc:xc + 2 * s] + + # Concat/clip labels + labels9 = np.concatenate(labels9, 0) + labels9[:, [1, 3]] -= xc + labels9[:, [2, 4]] -= yc + c = np.array([xc, yc]) # centers + segments9 = [x - c for x in segments9] + + for x in (labels9[:, 1:], *segments9): + np.clip(x, 0, 2 * s, out=x) # clip when using random_perspective() + # img9, labels9 = replicate(img9, labels9) # replicate + + # Augment + img9, labels9 = random_perspective(img9, labels9, segments9, + degrees=self.hyp['degrees'], + translate=self.hyp['translate'], + scale=self.hyp['scale'], + shear=self.hyp['shear'], + perspective=self.hyp['perspective'], + border=self.mosaic_border) # border to remove + + return img9, labels9 @staticmethod def collate_fn(batch): - img, label, path, shapes = zip(*batch) # transposed - for i, l in enumerate(label): - l[:, 0] = i # add target image index for build_targets() - return torch.stack(img, 0), torch.cat(label, 0), path, shapes + im, label, path, shapes = zip(*batch) # transposed + for i, lb in enumerate(label): + lb[:, 0] = i # add target image index for build_targets() + return torch.stack(im, 0), torch.cat(label, 0), path, shapes @staticmethod def collate_fn4(batch): img, label, path, shapes = zip(*batch) # transposed n = len(shapes) // 4 - img4, label4, path4, shapes4 = [], [], path[:n], shapes[:n] + im4, label4, path4, shapes4 = [], [], path[:n], shapes[:n] - ho = torch.tensor([[0., 0, 0, 1, 0, 0]]) - wo = torch.tensor([[0., 0, 1, 0, 0, 0]]) - s = torch.tensor([[1, 1, .5, .5, .5, .5]]) # scale + ho = torch.tensor([[0.0, 0, 0, 1, 0, 0]]) + wo = torch.tensor([[0.0, 0, 1, 0, 0, 0]]) + s = torch.tensor([[1, 1, 0.5, 0.5, 0.5, 0.5]]) # scale for i in range(n): # zidane torch.zeros(16,3,720,1280) # BCHW i *= 4 if random.random() < 0.5: - im = F.interpolate(img[i].unsqueeze(0).float(), scale_factor=2., mode='bilinear', align_corners=False)[ + im = F.interpolate(img[i].unsqueeze(0).float(), scale_factor=2.0, mode='bilinear', align_corners=False)[ 0].type(img[i].type()) - l = label[i] + lb = label[i] else: im = torch.cat((torch.cat((img[i], img[i + 1]), 1), torch.cat((img[i + 2], img[i + 3]), 1)), 2) - l = torch.cat((label[i], label[i + 1] + ho, label[i + 2] + wo, label[i + 3] + ho + wo), 0) * s - img4.append(im) - label4.append(l) + lb = torch.cat((label[i], label[i + 1] + ho, label[i + 2] + wo, label[i + 3] + ho + wo), 0) * s + im4.append(im) + label4.append(lb) - for i, l in enumerate(label4): - l[:, 0] = i # add target image index for build_targets() + for i, lb in enumerate(label4): + lb[:, 0] = i # add target image index for build_targets() - return torch.stack(img4, 0), torch.cat(label4, 0), path4, shapes4 + return torch.stack(im4, 0), torch.cat(label4, 0), path4, shapes4 # Ancillary functions -------------------------------------------------------------------------------------------------- -def load_image(self, index): - # loads 1 image from dataset, returns img, original hw, resized hw - img = self.imgs[index] - if img is None: # not cached - path = self.img_files[index] - img = cv2.imread(path) # BGR - assert img is not None, 'Image Not Found ' + path - h0, w0 = img.shape[:2] # orig hw - r = self.img_size / max(h0, w0) # ratio - if r != 1: # if sizes are not equal - img = cv2.resize(img, (int(w0 * r), int(h0 * r)), - interpolation=cv2.INTER_AREA if r < 1 and not self.augment else cv2.INTER_LINEAR) - return img, (h0, w0), img.shape[:2] # img, hw_original, hw_resized - else: - return self.imgs[index], self.img_hw0[index], self.img_hw[index] # img, hw_original, hw_resized - - -def augment_hsv(img, hgain=0.5, sgain=0.5, vgain=0.5): - r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains - hue, sat, val = cv2.split(cv2.cvtColor(img, cv2.COLOR_BGR2HSV)) - dtype = img.dtype # uint8 - - x = np.arange(0, 256, dtype=np.int16) - lut_hue = ((x * r[0]) % 180).astype(dtype) - lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) - lut_val = np.clip(x * r[2], 0, 255).astype(dtype) - - img_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))).astype(dtype) - cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img) # no return needed - - -def hist_equalize(img, clahe=True, bgr=False): - # Equalize histogram on BGR image 'img' with img.shape(n,m,3) and range 0-255 - yuv = cv2.cvtColor(img, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV) - if clahe: - c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) - yuv[:, :, 0] = c.apply(yuv[:, :, 0]) - else: - yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel histogram - return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert YUV image to RGB - - -def load_mosaic(self, index): - # loads images in a 4-mosaic - - labels4, segments4 = [], [] - s = self.img_size - yc, xc = [int(random.uniform(-x, 2 * s + x)) for x in self.mosaic_border] # mosaic center x, y - indices = [index] + random.choices(self.indices, k=3) # 3 additional image indices - for i, index in enumerate(indices): - # Load image - img, _, (h, w) = load_image(self, index) - - # place img in img4 - if i == 0: # top left - img4 = np.full((s * 2, s * 2, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles - x1a, y1a, x2a, y2a = max(xc - w, 0), max(yc - h, 0), xc, yc # xmin, ymin, xmax, ymax (large image) - x1b, y1b, x2b, y2b = w - (x2a - x1a), h - (y2a - y1a), w, h # xmin, ymin, xmax, ymax (small image) - elif i == 1: # top right - x1a, y1a, x2a, y2a = xc, max(yc - h, 0), min(xc + w, s * 2), yc - x1b, y1b, x2b, y2b = 0, h - (y2a - y1a), min(w, x2a - x1a), h - elif i == 2: # bottom left - x1a, y1a, x2a, y2a = max(xc - w, 0), yc, xc, min(s * 2, yc + h) - x1b, y1b, x2b, y2b = w - (x2a - x1a), 0, w, min(y2a - y1a, h) - elif i == 3: # bottom right - x1a, y1a, x2a, y2a = xc, yc, min(xc + w, s * 2), min(s * 2, yc + h) - x1b, y1b, x2b, y2b = 0, 0, min(w, x2a - x1a), min(y2a - y1a, h) - - img4[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] - padw = x1a - x1b - padh = y1a - y1b - - # Labels - labels, segments = self.labels[index].copy(), self.segments[index].copy() - if labels.size: - labels[:, 1:] = xywhn2xyxy(labels[:, 1:], w, h, padw, padh) # normalized xywh to pixel xyxy format - segments = [xyn2xy(x, w, h, padw, padh) for x in segments] - labels4.append(labels) - segments4.extend(segments) - - # Concat/clip labels - labels4 = np.concatenate(labels4, 0) - for x in (labels4[:, 1:], *segments4): - np.clip(x, 0, 2 * s, out=x) # clip when using random_perspective() - # img4, labels4 = replicate(img4, labels4) # replicate - - # Augment - img4, labels4 = random_perspective(img4, labels4, segments4, - degrees=self.hyp['degrees'], - translate=self.hyp['translate'], - scale=self.hyp['scale'], - shear=self.hyp['shear'], - perspective=self.hyp['perspective'], - border=self.mosaic_border) # border to remove - - return img4, labels4 - - -def load_mosaic9(self, index): - # loads images in a 9-mosaic - - labels9, segments9 = [], [] - s = self.img_size - indices = [index] + random.choices(self.indices, k=8) # 8 additional image indices - for i, index in enumerate(indices): - # Load image - img, _, (h, w) = load_image(self, index) - - # place img in img9 - if i == 0: # center - img9 = np.full((s * 3, s * 3, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles - h0, w0 = h, w - c = s, s, s + w, s + h # xmin, ymin, xmax, ymax (base) coordinates - elif i == 1: # top - c = s, s - h, s + w, s - elif i == 2: # top right - c = s + wp, s - h, s + wp + w, s - elif i == 3: # right - c = s + w0, s, s + w0 + w, s + h - elif i == 4: # bottom right - c = s + w0, s + hp, s + w0 + w, s + hp + h - elif i == 5: # bottom - c = s + w0 - w, s + h0, s + w0, s + h0 + h - elif i == 6: # bottom left - c = s + w0 - wp - w, s + h0, s + w0 - wp, s + h0 + h - elif i == 7: # left - c = s - w, s + h0 - h, s, s + h0 - elif i == 8: # top left - c = s - w, s + h0 - hp - h, s, s + h0 - hp - - padx, pady = c[:2] - x1, y1, x2, y2 = [max(x, 0) for x in c] # allocate coords - - # Labels - labels, segments = self.labels[index].copy(), self.segments[index].copy() - if labels.size: - labels[:, 1:] = xywhn2xyxy(labels[:, 1:], w, h, padx, pady) # normalized xywh to pixel xyxy format - segments = [xyn2xy(x, w, h, padx, pady) for x in segments] - labels9.append(labels) - segments9.extend(segments) - - # Image - img9[y1:y2, x1:x2] = img[y1 - pady:, x1 - padx:] # img9[ymin:ymax, xmin:xmax] - hp, wp = h, w # height, width previous - - # Offset - yc, xc = [int(random.uniform(0, s)) for _ in self.mosaic_border] # mosaic center x, y - img9 = img9[yc:yc + 2 * s, xc:xc + 2 * s] - - # Concat/clip labels - labels9 = np.concatenate(labels9, 0) - labels9[:, [1, 3]] -= xc - labels9[:, [2, 4]] -= yc - c = np.array([xc, yc]) # centers - segments9 = [x - c for x in segments9] - - for x in (labels9[:, 1:], *segments9): - np.clip(x, 0, 2 * s, out=x) # clip when using random_perspective() - # img9, labels9 = replicate(img9, labels9) # replicate - - # Augment - img9, labels9 = random_perspective(img9, labels9, segments9, - degrees=self.hyp['degrees'], - translate=self.hyp['translate'], - scale=self.hyp['scale'], - shear=self.hyp['shear'], - perspective=self.hyp['perspective'], - border=self.mosaic_border) # border to remove - - return img9, labels9 - - -def replicate(img, labels): - # Replicate labels - h, w = img.shape[:2] - boxes = labels[:, 1:].astype(int) - x1, y1, x2, y2 = boxes.T - s = ((x2 - x1) + (y2 - y1)) / 2 # side length (pixels) - for i in s.argsort()[:round(s.size * 0.5)]: # smallest indices - x1b, y1b, x2b, y2b = boxes[i] - bh, bw = y2b - y1b, x2b - x1b - yc, xc = int(random.uniform(0, h - bh)), int(random.uniform(0, w - bw)) # offset x, y - x1a, y1a, x2a, y2a = [xc, yc, xc + bw, yc + bh] - img[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] - labels = np.append(labels, [[labels[i, 0], x1a, y1a, x2a, y2a]], axis=0) - - return img, labels - - -def letterbox(img, new_shape=(640, 640), color=(114, 114, 114), auto=True, scaleFill=False, scaleup=True, stride=32): - # Resize and pad image while meeting stride-multiple constraints - shape = img.shape[:2] # current shape [height, width] - if isinstance(new_shape, int): - new_shape = (new_shape, new_shape) - - # Scale ratio (new / old) - r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) - if not scaleup: # only scale down, do not scale up (for better test mAP) - r = min(r, 1.0) - - # Compute padding - ratio = r, r # width, height ratios - new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) - dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding - if auto: # minimum rectangle - dw, dh = np.mod(dw, stride), np.mod(dh, stride) # wh padding - elif scaleFill: # stretch - dw, dh = 0.0, 0.0 - new_unpad = (new_shape[1], new_shape[0]) - ratio = new_shape[1] / shape[1], new_shape[0] / shape[0] # width, height ratios - - dw /= 2 # divide padding into 2 sides - dh /= 2 - - if shape[::-1] != new_unpad: # resize - img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR) - top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) - left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) - img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border - return img, ratio, (dw, dh) - - -def random_perspective(img, targets=(), segments=(), degrees=10, translate=.1, scale=.1, shear=10, perspective=0.0, - border=(0, 0)): - # torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-10, 10)) - # targets = [cls, xyxy] - - height = img.shape[0] + border[0] * 2 # shape(h,w,c) - width = img.shape[1] + border[1] * 2 - - # Center - C = np.eye(3) - C[0, 2] = -img.shape[1] / 2 # x translation (pixels) - C[1, 2] = -img.shape[0] / 2 # y translation (pixels) - - # Perspective - P = np.eye(3) - P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y) - P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x) - - # Rotation and Scale - R = np.eye(3) - a = random.uniform(-degrees, degrees) - # a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations - s = random.uniform(1 - scale, 1 + scale) - # s = 2 ** random.uniform(-scale, scale) - R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s) - - # Shear - S = np.eye(3) - S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg) - S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg) - - # Translation - T = np.eye(3) - T[0, 2] = random.uniform(0.5 - translate, 0.5 + translate) * width # x translation (pixels) - T[1, 2] = random.uniform(0.5 - translate, 0.5 + translate) * height # y translation (pixels) - - # Combined rotation matrix - M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT - if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed - if perspective: - img = cv2.warpPerspective(img, M, dsize=(width, height), borderValue=(114, 114, 114)) - else: # affine - img = cv2.warpAffine(img, M[:2], dsize=(width, height), borderValue=(114, 114, 114)) - - # Visualize - # import matplotlib.pyplot as plt - # ax = plt.subplots(1, 2, figsize=(12, 6))[1].ravel() - # ax[0].imshow(img[:, :, ::-1]) # base - # ax[1].imshow(img2[:, :, ::-1]) # warped - - # Transform label coordinates - n = len(targets) - if n: - use_segments = any(x.any() for x in segments) - new = np.zeros((n, 4)) - if use_segments: # warp segments - segments = resample_segments(segments) # upsample - for i, segment in enumerate(segments): - xy = np.ones((len(segment), 3)) - xy[:, :2] = segment - xy = xy @ M.T # transform - xy = xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2] # perspective rescale or affine - - # clip - new[i] = segment2box(xy, width, height) - - else: # warp boxes - xy = np.ones((n * 4, 3)) - xy[:, :2] = targets[:, [1, 2, 3, 4, 1, 4, 3, 2]].reshape(n * 4, 2) # x1y1, x2y2, x1y2, x2y1 - xy = xy @ M.T # transform - xy = (xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2]).reshape(n, 8) # perspective rescale or affine - - # create new boxes - x = xy[:, [0, 2, 4, 6]] - y = xy[:, [1, 3, 5, 7]] - new = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T - - # clip - new[:, [0, 2]] = new[:, [0, 2]].clip(0, width) - new[:, [1, 3]] = new[:, [1, 3]].clip(0, height) - - # filter candidates - i = box_candidates(box1=targets[:, 1:5].T * s, box2=new.T, area_thr=0.01 if use_segments else 0.10) - targets = targets[i] - targets[:, 1:5] = new[i] - - return img, targets - - -def box_candidates(box1, box2, wh_thr=2, ar_thr=20, area_thr=0.1, eps=1e-16): # box1(4,n), box2(4,n) - # Compute candidate boxes: box1 before augment, box2 after augment, wh_thr (pixels), aspect_ratio_thr, area_ratio - w1, h1 = box1[2] - box1[0], box1[3] - box1[1] - w2, h2 = box2[2] - box2[0], box2[3] - box2[1] - ar = np.maximum(w2 / (h2 + eps), h2 / (w2 + eps)) # aspect ratio - return (w2 > wh_thr) & (h2 > wh_thr) & (w2 * h2 / (w1 * h1 + eps) > area_thr) & (ar < ar_thr) # candidates - - -def cutout(image, labels): - # Applies image cutout augmentation https://arxiv.org/abs/1708.04552 - h, w = image.shape[:2] - - def bbox_ioa(box1, box2): - # Returns the intersection over box2 area given box1, box2. box1 is 4, box2 is nx4. boxes are x1y1x2y2 - box2 = box2.transpose() - - # Get the coordinates of bounding boxes - b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] - b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] - - # Intersection area - inter_area = (np.minimum(b1_x2, b2_x2) - np.maximum(b1_x1, b2_x1)).clip(0) * \ - (np.minimum(b1_y2, b2_y2) - np.maximum(b1_y1, b2_y1)).clip(0) - - # box2 area - box2_area = (b2_x2 - b2_x1) * (b2_y2 - b2_y1) + 1e-16 - - # Intersection over box2 area - return inter_area / box2_area - - # create random masks - scales = [0.5] * 1 + [0.25] * 2 + [0.125] * 4 + [0.0625] * 8 + [0.03125] * 16 # image size fraction - for s in scales: - mask_h = random.randint(1, int(h * s)) - mask_w = random.randint(1, int(w * s)) - - # box - xmin = max(0, random.randint(0, w) - mask_w // 2) - ymin = max(0, random.randint(0, h) - mask_h // 2) - xmax = min(w, xmin + mask_w) - ymax = min(h, ymin + mask_h) - - # apply random color mask - image[ymin:ymax, xmin:xmax] = [random.randint(64, 191) for _ in range(3)] - - # return unobscured labels - if len(labels) and s > 0.03: - box = np.array([xmin, ymin, xmax, ymax], dtype=np.float32) - ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area - labels = labels[ioa < 0.60] # remove >60% obscured labels - - return labels - - def create_folder(path='./new'): # Create folder if os.path.exists(path): @@ -1001,23 +822,22 @@ def create_folder(path='./new'): os.makedirs(path) # make new output folder -def flatten_recursive(path='../coco128'): +def flatten_recursive(path=DATASETS_DIR / 'coco128'): # Flatten a recursive directory by bringing all files to top level - new_path = Path(path + '_flat') + new_path = Path(str(path) + '_flat') create_folder(new_path) for file in tqdm(glob.glob(str(Path(path)) + '/**/*.*', recursive=True)): shutil.copyfile(file, new_path / Path(file).name) -def extract_boxes(path='../coco128/'): # from utils.datasets import *; extract_boxes('../coco128') +def extract_boxes(path=DATASETS_DIR / 'coco128'): # from utils.datasets import *; extract_boxes() # Convert detection dataset into classification dataset, with one directory per class - path = Path(path) # images dir shutil.rmtree(path / 'classifier') if (path / 'classifier').is_dir() else None # remove existing files = list(path.rglob('*.*')) n = len(files) # number of files for im_file in tqdm(files, total=n): - if im_file.suffix[1:] in img_formats: + if im_file.suffix[1:] in IMG_FORMATS: # image im = cv2.imread(str(im_file))[..., ::-1] # BGR to RGB h, w = im.shape[:2] @@ -1025,7 +845,7 @@ def extract_boxes(path='../coco128/'): # from utils.datasets import *; extract_ # labels lb_file = Path(img2label_paths([str(im_file)])[0]) if Path(lb_file).exists(): - with open(lb_file, 'r') as f: + with open(lb_file) as f: lb = np.array([x.split() for x in f.read().strip().splitlines()], dtype=np.float32) # labels for j, x in enumerate(lb): @@ -1044,24 +864,179 @@ def extract_boxes(path='../coco128/'): # from utils.datasets import *; extract_ assert cv2.imwrite(str(f), im[b[1]:b[3], b[0]:b[2]]), f'box failure in {f}' -def autosplit(path='../coco128', weights=(0.9, 0.1, 0.0), annotated_only=False): +def autosplit(path=DATASETS_DIR / 'coco128/images', weights=(0.9, 0.1, 0.0), annotated_only=False): """ Autosplit a dataset into train/val/test splits and save path/autosplit_*.txt files - Usage: from utils.datasets import *; autosplit('../coco128') + Usage: from utils.datasets import *; autosplit() Arguments - path: Path to images directory - weights: Train, val, test weights (list) - annotated_only: Only use images with an annotated txt file + path: Path to images directory + weights: Train, val, test weights (list, tuple) + annotated_only: Only use images with an annotated txt file """ path = Path(path) # images dir - files = sum([list(path.rglob(f"*.{img_ext}")) for img_ext in img_formats], []) # image files only + files = sorted(x for x in path.rglob('*.*') if x.suffix[1:].lower() in IMG_FORMATS) # image files only n = len(files) # number of files + random.seed(0) # for reproducibility indices = random.choices([0, 1, 2], weights=weights, k=n) # assign each image to a split txt = ['autosplit_train.txt', 'autosplit_val.txt', 'autosplit_test.txt'] # 3 txt files - [(path / x).unlink() for x in txt if (path / x).exists()] # remove existing + [(path.parent / x).unlink(missing_ok=True) for x in txt] # remove existing print(f'Autosplitting images from {path}' + ', using *.txt labeled images only' * annotated_only) for i, img in tqdm(zip(indices, files), total=n): if not annotated_only or Path(img2label_paths([str(img)])[0]).exists(): # check label - with open(path / txt[i], 'a') as f: - f.write(str(img) + '\n') # add image to txt file + with open(path.parent / txt[i], 'a') as f: + f.write('./' + img.relative_to(path.parent).as_posix() + '\n') # add image to txt file + + +def verify_image_label(args): + # Verify one image-label pair + im_file, lb_file, prefix = args + nm, nf, ne, nc, msg, segments = 0, 0, 0, 0, '', [] # number (missing, found, empty, corrupt), message, segments + try: + # verify images + im = Image.open(im_file) + im.verify() # PIL verify + shape = exif_size(im) # image size + assert (shape[0] > 9) & (shape[1] > 9), f'image size {shape} <10 pixels' + assert im.format.lower() in IMG_FORMATS, f'invalid image format {im.format}' + if im.format.lower() in ('jpg', 'jpeg'): + with open(im_file, 'rb') as f: + f.seek(-2, 2) + if f.read() != b'\xff\xd9': # corrupt JPEG + ImageOps.exif_transpose(Image.open(im_file)).save(im_file, 'JPEG', subsampling=0, quality=100) + msg = f'{prefix}WARNING: {im_file}: corrupt JPEG restored and saved' + + # verify labels + if os.path.isfile(lb_file): + nf = 1 # label found + with open(lb_file) as f: + lb = [x.split() for x in f.read().strip().splitlines() if len(x)] + if any(len(x) > 6 for x in lb): # is segment + classes = np.array([x[0] for x in lb], dtype=np.float32) + segments = [np.array(x[1:], dtype=np.float32).reshape(-1, 2) for x in lb] # (cls, xy1...) + lb = np.concatenate((classes.reshape(-1, 1), segments2boxes(segments)), 1) # (cls, xywh) + lb = np.array(lb, dtype=np.float32) + nl = len(lb) + if nl: + assert lb.shape[1] == 5, f'labels require 5 columns, {lb.shape[1]} columns detected' + assert (lb >= 0).all(), f'negative label values {lb[lb < 0]}' + assert (lb[:, 1:] <= 1).all(), f'non-normalized or out of bounds coordinates {lb[:, 1:][lb[:, 1:] > 1]}' + _, i = np.unique(lb, axis=0, return_index=True) + if len(i) < nl: # duplicate row check + lb = lb[i] # remove duplicates + if segments: + segments = segments[i] + msg = f'{prefix}WARNING: {im_file}: {nl - len(i)} duplicate labels removed' + else: + ne = 1 # label empty + lb = np.zeros((0, 5), dtype=np.float32) + else: + nm = 1 # label missing + lb = np.zeros((0, 5), dtype=np.float32) + return im_file, lb, shape, segments, nm, nf, ne, nc, msg + except Exception as e: + nc = 1 + msg = f'{prefix}WARNING: {im_file}: ignoring corrupt image/label: {e}' + return [None, None, None, None, nm, nf, ne, nc, msg] + + +def dataset_stats(path='coco128.yaml', autodownload=False, verbose=False, profile=False, hub=False): + """ Return dataset statistics dictionary with images and instances counts per split per class + To run in parent directory: export PYTHONPATH="$PWD/yolov5" + Usage1: from utils.datasets import *; dataset_stats('coco128.yaml', autodownload=True) + Usage2: from utils.datasets import *; dataset_stats('path/to/coco128_with_yaml.zip') + Arguments + path: Path to data.yaml or data.zip (with data.yaml inside data.zip) + autodownload: Attempt to download dataset if not found locally + verbose: Print stats dictionary + """ + + def round_labels(labels): + # Update labels to integer class and 6 decimal place floats + return [[int(c), *(round(x, 4) for x in points)] for c, *points in labels] + + def unzip(path): + # Unzip data.zip TODO: CONSTRAINT: path/to/abc.zip MUST unzip to 'path/to/abc/' + if str(path).endswith('.zip'): # path is data.zip + assert Path(path).is_file(), f'Error unzipping {path}, file not found' + ZipFile(path).extractall(path=path.parent) # unzip + dir = path.with_suffix('') # dataset directory == zip name + return True, str(dir), next(dir.rglob('*.yaml')) # zipped, data_dir, yaml_path + else: # path is data.yaml + return False, None, path + + def hub_ops(f, max_dim=1920): + # HUB ops for 1 image 'f': resize and save at reduced quality in /dataset-hub for web/app viewing + f_new = im_dir / Path(f).name # dataset-hub image filename + try: # use PIL + im = Image.open(f) + r = max_dim / max(im.height, im.width) # ratio + if r < 1.0: # image too large + im = im.resize((int(im.width * r), int(im.height * r))) + im.save(f_new, 'JPEG', quality=75, optimize=True) # save + except Exception as e: # use OpenCV + print(f'WARNING: HUB ops PIL failure {f}: {e}') + im = cv2.imread(f) + im_height, im_width = im.shape[:2] + r = max_dim / max(im_height, im_width) # ratio + if r < 1.0: # image too large + im = cv2.resize(im, (int(im_width * r), int(im_height * r)), interpolation=cv2.INTER_AREA) + cv2.imwrite(str(f_new), im) + + zipped, data_dir, yaml_path = unzip(Path(path)) + with open(check_yaml(yaml_path), errors='ignore') as f: + data = yaml.safe_load(f) # data dict + if zipped: + data['path'] = data_dir # TODO: should this be dir.resolve()? + check_dataset(data, autodownload) # download dataset if missing + hub_dir = Path(data['path'] + ('-hub' if hub else '')) + stats = {'nc': data['nc'], 'names': data['names']} # statistics dictionary + for split in 'train', 'val', 'test': + if data.get(split) is None: + stats[split] = None # i.e. no test set + continue + x = [] + dataset = LoadImagesAndLabels(data[split]) # load dataset + for label in tqdm(dataset.labels, total=dataset.n, desc='Statistics'): + x.append(np.bincount(label[:, 0].astype(int), minlength=data['nc'])) + x = np.array(x) # shape(128x80) + stats[split] = {'instance_stats': {'total': int(x.sum()), 'per_class': x.sum(0).tolist()}, + 'image_stats': {'total': dataset.n, 'unlabelled': int(np.all(x == 0, 1).sum()), + 'per_class': (x > 0).sum(0).tolist()}, + 'labels': [{str(Path(k).name): round_labels(v.tolist())} for k, v in + zip(dataset.im_files, dataset.labels)]} + + if hub: + im_dir = hub_dir / 'images' + im_dir.mkdir(parents=True, exist_ok=True) + for _ in tqdm(ThreadPool(NUM_THREADS).imap(hub_ops, dataset.im_files), total=dataset.n, desc='HUB Ops'): + pass + + # Profile + stats_path = hub_dir / 'stats.json' + if profile: + for _ in range(1): + file = stats_path.with_suffix('.npy') + t1 = time.time() + np.save(file, stats) + t2 = time.time() + x = np.load(file, allow_pickle=True) + print(f'stats.npy times: {time.time() - t2:.3f}s read, {t2 - t1:.3f}s write') + + file = stats_path.with_suffix('.json') + t1 = time.time() + with open(file, 'w') as f: + json.dump(stats, f) # save stats *.json + t2 = time.time() + with open(file) as f: + x = json.load(f) # load hyps dict + print(f'stats.json times: {time.time() - t2:.3f}s read, {t2 - t1:.3f}s write') + + # Save, print and return + if hub: + print(f'Saving {stats_path.resolve()}...') + with open(stats_path, 'w') as f: + json.dump(stats, f) # save stats.json + if verbose: + print(json.dumps(stats, indent=2, sort_keys=False)) + return stats diff --git a/utils/google_utils.py b/utils/downloads.py similarity index 55% rename from utils/google_utils.py rename to utils/downloads.py index 5c5f52170268..714ffb2a0452 100644 --- a/utils/google_utils.py +++ b/utils/downloads.py @@ -1,10 +1,15 @@ -# Google utils: https://cloud.google.com/storage/docs/reference/libraries +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Download utils +""" import os import platform import subprocess import time +import urllib from pathlib import Path +from zipfile import ZipFile import requests import torch @@ -16,52 +21,70 @@ def gsutil_getsize(url=''): return eval(s.split(' ')[0]) if len(s) else 0 # bytes -def attempt_download(file, repo='ultralytics/yolov5'): - if not isinstance(file, str) or file.startswith("zoo:"): +def safe_download(file, url, url2=None, min_bytes=1E0, error_msg=''): + # Attempts to download file from url or url2, checks and removes incomplete downloads < min_bytes + file = Path(file) + assert_msg = f"Downloaded file '{file}' does not exist or size is < min_bytes={min_bytes}" + try: # url1 + print(f'Downloading {url} to {file}...') + torch.hub.download_url_to_file(url, str(file)) + assert file.exists() and file.stat().st_size > min_bytes, assert_msg # check + except Exception as e: # url2 + file.unlink(missing_ok=True) # remove partial downloads + print(f'ERROR: {e}\nRe-attempting {url2 or url} to {file}...') + os.system(f"curl -L '{url2 or url}' -o '{file}' --retry 3 -C -") # curl download, retry and resume on fail + finally: + if not file.exists() or file.stat().st_size < min_bytes: # check + file.unlink(missing_ok=True) # remove partial downloads + print(f"ERROR: {assert_msg}\n{error_msg}") + print('') + + +def attempt_download(file, repo='ultralytics/yolov5'): # from utils.downloads import *; attempt_download() + # Attempt file download if does not exist + if not isinstance(file, (Path, str)) or str(file).startswith("zoo:"): return - # Attempt file download if does not exist file = Path(str(file).strip().replace("'", '')) if not file.exists(): + # URL specified + name = Path(urllib.parse.unquote(str(file))).name # decode '%2F' to '/' etc. + if str(file).startswith(('http:/', 'https:/')): # download + url = str(file).replace(':/', '://') # Pathlib turns :// -> :/ + file = name.split('?')[0] # parse authentication https://url.com/file.txt?auth... + if Path(file).is_file(): + print(f'Found {url} locally at {file}') # file already exists + else: + safe_download(file=file, url=url, min_bytes=1E5) + return file + + # GitHub assets file.parent.mkdir(parents=True, exist_ok=True) # make parent dir (if required) try: response = requests.get(f'https://github.com/gitapi/repos/{repo}/releases/latest').json() # github api assets = [x['name'] for x in response['assets']] # release assets, i.e. ['yolov5s.pt', 'yolov5m.pt', ...] tag = response['tag_name'] # i.e. 'v1.0' - except: # fallback plan - assets = ['yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt', - 'yolov5s6.pt', 'yolov5m6.pt', 'yolov5l6.pt', 'yolov5x6.pt'] + except Exception: # fallback plan + assets = ['yolov5n.pt', 'yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt', + 'yolov5n6.pt', 'yolov5s6.pt', 'yolov5m6.pt', 'yolov5l6.pt', 'yolov5x6.pt'] try: tag = subprocess.check_output('git tag', shell=True, stderr=subprocess.STDOUT).decode().split()[-1] - except: - tag = 'v5.0' # current release + except Exception: + tag = 'v6.0' # current release - name = file.name if name in assets: - msg = f'{file} missing, try downloading from https://github.com/{repo}/releases/' - redundant = False # second download option - try: # GitHub - url = f'https://github.com/{repo}/releases/download/{tag}/{name}' - print(f'Downloading {url} to {file}...') - torch.hub.download_url_to_file(url, file) - assert file.exists() and file.stat().st_size > 1E6 # check - except Exception as e: # GCP - print(f'Download error: {e}') - assert redundant, 'No secondary mirror' - url = f'https://storage.googleapis.com/{repo}/ckpt/{name}' - print(f'Downloading {url} to {file}...') - os.system(f"curl -L '{url}' -o '{file}' --retry 3 -C -") # curl download, retry and resume on fail - finally: - if not file.exists() or file.stat().st_size < 1E6: # check - file.unlink(missing_ok=True) # remove partial downloads - print(f'ERROR: Download failure: {msg}') - print('') - return + safe_download(file, + url=f'https://github.com/{repo}/releases/download/{tag}/{name}', + # url2=f'https://storage.googleapis.com/{repo}/ckpt/{name}', # backup url (optional) + min_bytes=1E5, + error_msg=f'{file} missing, try downloading from https://github.com/{repo}/releases/') + + return str(file) def gdrive_download(id='16TiPfZj7htmTyhntwcZyEEAejOUxuT6m', file='tmp.zip'): - # Downloads a file from Google Drive. from yolov5.utils.google_utils import *; gdrive_download() + # Downloads a file from Google Drive. from yolov5.utils.downloads import *; gdrive_download() t = time.time() file = Path(file) cookie = Path('cookie') # gdrive cookie @@ -88,8 +111,8 @@ def gdrive_download(id='16TiPfZj7htmTyhntwcZyEEAejOUxuT6m', file='tmp.zip'): # Unzip if archive if file.suffix == '.zip': print('unzipping... ', end='') - os.system(f'unzip -q {file}') # unzip - file.unlink() # remove zip to free space + ZipFile(file).extractall(path=file.parent) # unzip + file.unlink() # remove zip print(f'Done ({time.time() - t:.1f}s)') return r @@ -102,6 +125,9 @@ def get_token(cookie="./cookie"): return line.split()[-1] return "" +# Google utils: https://cloud.google.com/storage/docs/reference/libraries ---------------------------------------------- +# +# # def upload_blob(bucket_name, source_file_name, destination_blob_name): # # Uploads a file to a bucket # # https://cloud.google.com/storage/docs/uploading-objects#storage-upload-object-python diff --git a/utils/flask_rest_api/README.md b/utils/flask_rest_api/README.md index 324c2416dcd9..a726acbd9204 100644 --- a/utils/flask_rest_api/README.md +++ b/utils/flask_rest_api/README.md @@ -1,9 +1,13 @@ # Flask REST API -[REST](https://en.wikipedia.org/wiki/Representational_state_transfer) [API](https://en.wikipedia.org/wiki/API)s are commonly used to expose Machine Learning (ML) models to other services. This folder contains an example REST API created using Flask to expose the YOLOv5s model from [PyTorch Hub](https://pytorch.org/hub/ultralytics_yolov5/). + +[REST](https://en.wikipedia.org/wiki/Representational_state_transfer) [API](https://en.wikipedia.org/wiki/API)s are +commonly used to expose Machine Learning (ML) models to other services. This folder contains an example REST API +created using Flask to expose the YOLOv5s model from [PyTorch Hub](https://pytorch.org/hub/ultralytics_yolov5/). ## Requirements [Flask](https://palletsprojects.com/p/flask/) is required. Install with: + ```shell $ pip install Flask ``` @@ -19,7 +23,7 @@ $ python3 restapi.py --port 5000 Then use [curl](https://curl.se/) to perform a request: ```shell -$ curl -X POST -F image=@zidane.jpg 'http://localhost:5000/v1/object-detection/yolov5s'` +$ curl -X POST -F image=@zidane.jpg 'http://localhost:5000/v1/object-detection/yolov5s' ``` The model inference results are returned as a JSON response: @@ -65,4 +69,5 @@ The model inference results are returned as a JSON response: ] ``` -An example python script to perform inference using [requests](https://docs.python-requests.org/en/master/) is given in `example_request.py` +An example python script to perform inference using [requests](https://docs.python-requests.org/en/master/) is given +in `example_request.py` diff --git a/utils/flask_rest_api/restapi.py b/utils/flask_rest_api/restapi.py index a54e2309715c..b93ad16a0f58 100644 --- a/utils/flask_rest_api/restapi.py +++ b/utils/flask_rest_api/restapi.py @@ -5,8 +5,8 @@ import io import torch -from PIL import Image from flask import Flask, request +from PIL import Image app = Flask(__name__) diff --git a/utils/general.py b/utils/general.py index 7e0ac772bb03..dcdbf95ddca1 100755 --- a/utils/general.py +++ b/utils/general.py @@ -1,5 +1,9 @@ -# YOLOv5 general utils +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +General utils +""" +import contextlib import glob import logging import math @@ -7,11 +11,16 @@ import platform import random import re -import subprocess +import shutil +import signal import time +import urllib +from datetime import datetime from itertools import repeat from multiprocessing.pool import ThreadPool from pathlib import Path +from subprocess import check_output +from zipfile import ZipFile import cv2 import numpy as np @@ -21,29 +30,157 @@ import torchvision import yaml -from utils.google_utils import gsutil_getsize -from utils.metrics import fitness -from utils.torch_utils import init_torch_seeds +from utils.downloads import gsutil_getsize +from utils.metrics import box_iou, fitness # Settings +FILE = Path(__file__).resolve() +ROOT = FILE.parents[1] # YOLOv5 root directory +DATASETS_DIR = ROOT.parent / 'datasets' # YOLOv5 datasets directory +NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads +VERBOSE = str(os.getenv('YOLOv5_VERBOSE', True)).lower() == 'true' # global verbose mode +FONT = 'Arial.ttf' # https://ultralytics.com/assets/Arial.ttf + torch.set_printoptions(linewidth=320, precision=5, profile='long') np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5 pd.options.display.max_columns = 10 cv2.setNumThreads(0) # prevent OpenCV from multithreading (incompatible with PyTorch DataLoader) -os.environ['NUMEXPR_MAX_THREADS'] = str(min(os.cpu_count(), 8)) # NumExpr max threads +os.environ['NUMEXPR_MAX_THREADS'] = str(NUM_THREADS) # NumExpr max threads +os.environ['OMP_NUM_THREADS'] = str(NUM_THREADS) # OpenMP max threads (PyTorch and SciPy) + + +def is_kaggle(): + # Is environment a Kaggle Notebook? + try: + assert os.environ.get('PWD') == '/kaggle/working' + assert os.environ.get('KAGGLE_URL_BASE') == 'https://www.kaggle.com' + return True + except AssertionError: + return False + + +def is_writeable(dir, test=False): + # Return True if directory has write permissions, test opening a file with write permissions if test=True + if test: # method 1 + file = Path(dir) / 'tmp.txt' + try: + with open(file, 'w'): # open file with write permissions + pass + file.unlink() # remove file + return True + except OSError: + return False + else: # method 2 + return os.access(dir, os.R_OK) # possible issues on Windows + + +def set_logging(name=None, verbose=VERBOSE): + # Sets level and returns logger + if is_kaggle(): + for h in logging.root.handlers: + logging.root.removeHandler(h) # remove all handlers associated with the root logger object + rank = int(os.getenv('RANK', -1)) # rank in world for Multi-GPU trainings + logging.basicConfig(format="%(message)s", level=logging.INFO if (verbose and rank in (-1, 0)) else logging.WARNING) + return logging.getLogger(name) + + +LOGGER = set_logging('yolov5') # define globally (used in train.py, val.py, detect.py, etc.) + + +def user_config_dir(dir='Ultralytics', env_var='YOLOV5_CONFIG_DIR'): + # Return path of user configuration directory. Prefer environment variable if exists. Make dir if required. + env = os.getenv(env_var) + if env: + path = Path(env) # use environment variable + else: + cfg = {'Windows': 'AppData/Roaming', 'Linux': '.config', 'Darwin': 'Library/Application Support'} # 3 OS dirs + path = Path.home() / cfg.get(platform.system(), '') # OS-specific config dir + path = (path if is_writeable(path) else Path('/tmp')) / dir # GCP and AWS lambda fix, only /tmp is writeable + path.mkdir(exist_ok=True) # make if required + return path + + +CONFIG_DIR = user_config_dir() # Ultralytics settings dir + + +class Profile(contextlib.ContextDecorator): + # Usage: @Profile() decorator or 'with Profile():' context manager + def __enter__(self): + self.start = time.time() + + def __exit__(self, type, value, traceback): + print(f'Profile results: {time.time() - self.start:.5f}s') + + +class Timeout(contextlib.ContextDecorator): + # Usage: @Timeout(seconds) decorator or 'with Timeout(seconds):' context manager + def __init__(self, seconds, *, timeout_msg='', suppress_timeout_errors=True): + self.seconds = int(seconds) + self.timeout_message = timeout_msg + self.suppress = bool(suppress_timeout_errors) + + def _timeout_handler(self, signum, frame): + raise TimeoutError(self.timeout_message) + + def __enter__(self): + if platform.system() != 'Windows': # not supported on Windows + signal.signal(signal.SIGALRM, self._timeout_handler) # Set handler for SIGALRM + signal.alarm(self.seconds) # start countdown for SIGALRM to be raised + + def __exit__(self, exc_type, exc_val, exc_tb): + if platform.system() != 'Windows': + signal.alarm(0) # Cancel SIGALRM if it's scheduled + if self.suppress and exc_type is TimeoutError: # Suppress TimeoutError + return True -def set_logging(rank=-1, verbose=True): - logging.basicConfig( - format="%(message)s", - level=logging.INFO if (verbose and rank in [-1, 0]) else logging.WARN) +class WorkingDirectory(contextlib.ContextDecorator): + # Usage: @WorkingDirectory(dir) decorator or 'with WorkingDirectory(dir):' context manager + def __init__(self, new_dir): + self.dir = new_dir # new dir + self.cwd = Path.cwd().resolve() # current dir + + def __enter__(self): + os.chdir(self.dir) + + def __exit__(self, exc_type, exc_val, exc_tb): + os.chdir(self.cwd) + + +def try_except(func): + # try-except function. Usage: @try_except decorator + def handler(*args, **kwargs): + try: + func(*args, **kwargs) + except Exception as e: + print(e) + + return handler + + +def methods(instance): + # Get class/instance methods + return [f for f in dir(instance) if callable(getattr(instance, f)) and not f.startswith("__")] + + +def print_args(name, opt): + # Print argparser arguments + LOGGER.info(colorstr(f'{name}: ') + ', '.join(f'{k}={v}' for k, v in vars(opt).items())) def init_seeds(seed=0): - # Initialize random number generator (RNG) seeds + # Initialize random number generator (RNG) seeds https://pytorch.org/docs/stable/notes/randomness.html + # cudnn seed 0 settings are slower and more reproducible, else faster and less reproducible + import torch.backends.cudnn as cudnn random.seed(seed) np.random.seed(seed) - init_torch_seeds(seed) + torch.manual_seed(seed) + cudnn.benchmark, cudnn.deterministic = (False, True) if seed == 0 else (True, False) + + +def intersect_dicts(da, db, exclude=()): + # Dictionary intersection of matching keys and shapes, omitting 'exclude' keys, using da values + return {k: v for k, v in da.items() if k in db and not any(x in k for x in exclude) and v.shape == db[k].shape} def get_latest_run(search_dir='.'): @@ -53,107 +190,169 @@ def get_latest_run(search_dir='.'): def is_docker(): - # Is environment a Docker container + # Is environment a Docker container? return Path('/workspace').exists() # or Path('/.dockerenv').exists() def is_colab(): - # Is environment a Google Colab instance + # Is environment a Google Colab instance? try: import google.colab return True - except Exception as e: + except ImportError: return False +def is_pip(): + # Is file in a pip package? + return 'site-packages' in Path(__file__).resolve().parts + + +def is_ascii(s=''): + # Is string composed of all ASCII (no UTF) characters? (note str().isascii() introduced in python 3.7) + s = str(s) # convert list, tuple, None, etc. to str + return len(s.encode().decode('ascii', 'ignore')) == len(s) + + +def is_chinese(s='δΊΊε·₯智能'): + # Is string composed of any Chinese characters? + return True if re.search('[\u4e00-\u9fff]', str(s)) else False + + def emojis(str=''): # Return platform-dependent emoji-safe version of string return str.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else str -def file_size(file): - # Return file size in MB - return Path(file).stat().st_size / 1e6 +def file_age(path=__file__): + # Return days since last file update + dt = (datetime.now() - datetime.fromtimestamp(Path(path).stat().st_mtime)) # delta + return dt.days # + dt.seconds / 86400 # fractional days + + +def file_update_date(path=__file__): + # Return human-readable file modification date, i.e. '2021-3-26' + t = datetime.fromtimestamp(Path(path).stat().st_mtime) + return f'{t.year}-{t.month}-{t.day}' + + +def file_size(path): + # Return file/dir size (MB) + mb = 1 << 20 # bytes to MiB (1024 ** 2) + path = Path(path) + if path.is_file(): + return path.stat().st_size / mb + elif path.is_dir(): + return sum(f.stat().st_size for f in path.glob('**/*') if f.is_file()) / mb + else: + return 0.0 def check_online(): # Check internet connectivity import socket try: - socket.create_connection(("1.1.1.1", 443), 5) # check host accesability + socket.create_connection(("1.1.1.1", 443), 5) # check host accessibility return True except OSError: return False +def git_describe(path=ROOT): # path must be a directory + # Return human-readable git description, i.e. v5.0-5-g3e25f1e https://git-scm.com/docs/git-describe + try: + return check_output(f'git -C {path} describe --tags --long --always', shell=True).decode()[:-1] + except Exception: + return '' + + +@try_except +@WorkingDirectory(ROOT) def check_git_status(): # Recommend 'git pull' if code is out of date - print(colorstr('github: '), end='') - try: - assert Path('.git').exists(), 'skipping check (not a git repository)' - assert not is_docker(), 'skipping check (Docker image)' - assert check_online(), 'skipping check (offline)' - - cmd = 'git fetch && git config --get remote.origin.url' - url = subprocess.check_output(cmd, shell=True).decode().strip().rstrip('.git') # github repo url - branch = subprocess.check_output('git rev-parse --abbrev-ref HEAD', shell=True).decode().strip() # checked out - n = int(subprocess.check_output(f'git rev-list {branch}..origin/master --count', shell=True)) # commits behind - if n > 0: - s = f"⚠️ WARNING: code is out of date by {n} commit{'s' * (n > 1)}. " \ - f"Use 'git pull' to update or 'git clone {url}' to download latest." - else: - s = f'up to date with {url} βœ…' - print(emojis(s)) # emoji-safe - except Exception as e: - print(e) + msg = ', for updates see https://github.com/ultralytics/yolov5' + s = colorstr('github: ') # string + assert Path('.git').exists(), s + 'skipping check (not a git repository)' + msg + assert not is_docker(), s + 'skipping check (Docker image)' + msg + assert check_online(), s + 'skipping check (offline)' + msg + + cmd = 'git fetch && git config --get remote.origin.url' + url = check_output(cmd, shell=True, timeout=5).decode().strip().rstrip('.git') # git fetch + branch = check_output('git rev-parse --abbrev-ref HEAD', shell=True).decode().strip() # checked out + n = int(check_output(f'git rev-list {branch}..origin/master --count', shell=True)) # commits behind + if n > 0: + s += f"⚠️ YOLOv5 is out of date by {n} commit{'s' * (n > 1)}. Use `git pull` or `git clone {url}` to update." + else: + s += f'up to date with {url} βœ…' + LOGGER.info(emojis(s)) # emoji-safe -def check_python(minimum='3.7.0', required=True): +def check_python(minimum='3.6.2'): # Check current python version vs. required python version - current = platform.python_version() - result = pkg.parse_version(current) >= pkg.parse_version(minimum) - if required: - assert result, f'Python {minimum} required by YOLOv5, but Python {current} is currently installed' + check_version(platform.python_version(), minimum, name='Python ', hard=True) + + +def check_version(current='0.0.0', minimum='0.0.0', name='version ', pinned=False, hard=False, verbose=False): + # Check version vs. required version + current, minimum = (pkg.parse_version(x) for x in (current, minimum)) + result = (current == minimum) if pinned else (current >= minimum) # bool + s = f'{name}{minimum} required by YOLOv5, but {name}{current} is currently installed' # string + if hard: + assert result, s # assert min requirements met + if verbose and not result: + LOGGER.warning(s) return result -def check_requirements(requirements='requirements.txt', exclude=()): +@try_except +def check_requirements(requirements=ROOT / 'requirements.txt', exclude=(), install=True): # Check installed dependencies meet requirements (pass *.txt file or list of packages) prefix = colorstr('red', 'bold', 'requirements:') check_python() # check python version if isinstance(requirements, (str, Path)): # requirements.txt file file = Path(requirements) - if not file.exists(): - print(f"{prefix} {file.resolve()} not found, check failed.") - return - requirements = [f'{x.name}{x.specifier}' for x in pkg.parse_requirements(file.open()) if x.name not in exclude] + assert file.exists(), f"{prefix} {file.resolve()} not found, check failed." + with file.open() as f: + requirements = [f'{x.name}{x.specifier}' for x in pkg.parse_requirements(f) if x.name not in exclude] else: # list or tuple of packages requirements = [x for x in requirements if x not in exclude] n = 0 # number of packages updates for r in requirements: + if r.startswith("sparseml"): + version = r.split("sparseml")[1] + if pkg.working_set.find(pkg.Requirement("sparseml-nightly" + version)): + continue try: pkg.require(r) - except Exception as e: # DistributionNotFound or VersionConflict if requirements not met - n += 1 - print(f"{prefix} {r} not found and is required by YOLOv5, attempting auto-update...") - try: - print(subprocess.check_output(f"pip install '{r}'", shell=True).decode()) - except Exception as e: - print(f'{prefix} {e}') + except Exception: # DistributionNotFound or VersionConflict if requirements not met + s = f"{prefix} {r} not found and is required by YOLOv5" + if install: + LOGGER.info(f"{s}, attempting auto-update...") + try: + assert check_online(), f"'pip install {r}' skipped (offline)" + LOGGER.info(check_output(f"pip install '{r}'", shell=True).decode()) + n += 1 + except Exception as e: + LOGGER.warning(f'{prefix} {e}') + else: + LOGGER.info(f'{s}. Please install and rerun your command.') if n: # if packages updated source = file.resolve() if 'file' in locals() else requirements s = f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n" \ f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n" - print(emojis(s)) # emoji-safe + LOGGER.info(emojis(s)) -def check_img_size(img_size, s=32): - # Verify img_size is a multiple of stride s - new_size = make_divisible(img_size, int(s)) # ceil gs-multiple - if new_size != img_size: - print('WARNING: --img-size %g must be multiple of max stride %g, updating to %g' % (img_size, s, new_size)) +def check_img_size(imgsz, s=32, floor=0): + # Verify image size is a multiple of stride s in each dimension + if isinstance(imgsz, int): # integer i.e. img_size=640 + new_size = max(make_divisible(imgsz, int(s)), floor) + else: # list i.e. img_size=[640, 480] + new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz] + if new_size != imgsz: + LOGGER.warning(f'WARNING: --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}') return new_size @@ -168,64 +367,146 @@ def check_imshow(): cv2.waitKey(1) return True except Exception as e: - print(f'WARNING: Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}') + LOGGER.warning(f'WARNING: Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}') return False -def check_file(file): - # Search for file if not found - if Path(file).is_file() or file == '': +def check_suffix(file='yolov5s.pt', suffix=('.pt',), msg=''): + # Check file(s) for acceptable suffix + if file and suffix: + if isinstance(suffix, str): + suffix = [suffix] + for f in file if isinstance(file, (list, tuple)) else [file]: + s = Path(f).suffix.lower() # file suffix + if len(s): + assert s in suffix, f"{msg}{f} acceptable suffix is {suffix}" + + +def check_yaml(file, suffix=('.yaml', '.yml')): + # Search/download YAML file (if necessary) and return path, checking suffix + return check_file(file, suffix) + + +def check_file(file, suffix=''): + # Search/download file (if necessary) and return path + check_suffix(file, suffix) # optional + file = str(file) # convert to str() + if Path(file).is_file() or file == '': # exists return file - else: - files = glob.glob('./**/' + file, recursive=True) # find file - assert len(files), f'File Not Found: {file}' # assert file was found + elif file.startswith(('http:/', 'https:/')): # download + url = str(Path(file)).replace(':/', '://') # Pathlib turns :// -> :/ + file = Path(urllib.parse.unquote(file).split('?')[0]).name # '%2F' to '/', split https://url.com/file.txt?auth + if Path(file).is_file(): + LOGGER.info(f'Found {url} locally at {file}') # file already exists + else: + LOGGER.info(f'Downloading {url} to {file}...') + torch.hub.download_url_to_file(url, file) + assert Path(file).exists() and Path(file).stat().st_size > 0, f'File download failed: {url}' # check + return file + else: # search + files = [] + for d in 'data', 'models', 'utils': # search directories + files.extend(glob.glob(str(ROOT / d / '**' / file), recursive=True)) # find file + assert len(files), f'File not found: {file}' # assert file was found assert len(files) == 1, f"Multiple files match '{file}', specify exact path: {files}" # assert unique return files[0] # return file -def check_dataset(dict): - # Download dataset if not found locally - val, s = dict.get('val'), dict.get('download') - if val and len(val): +def check_font(font=FONT): + # Download font to CONFIG_DIR if necessary + font = Path(font) + if not font.exists() and not (CONFIG_DIR / font.name).exists(): + url = "https://ultralytics.com/assets/" + font.name + LOGGER.info(f'Downloading {url} to {CONFIG_DIR / font.name}...') + torch.hub.download_url_to_file(url, str(font), progress=False) + + +def check_dataset(data, autodownload=True): + # Download and/or unzip dataset if not found locally + # Usage: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128_with_yaml.zip + + # Download (optional) + extract_dir = '' + if isinstance(data, (str, Path)) and str(data).endswith('.zip'): # i.e. gs://bucket/dir/coco128.zip + download(data, dir=DATASETS_DIR, unzip=True, delete=False, curl=False, threads=1) + data = next((DATASETS_DIR / Path(data).stem).rglob('*.yaml')) + extract_dir, autodownload = data.parent, False + + # Read yaml (optional) + if isinstance(data, (str, Path)): + with open(data, errors='ignore') as f: + data = yaml.safe_load(f) # dictionary + + # Resolve paths + path = Path(extract_dir or data.get('path') or '') # optional 'path' default to '.' + if not path.is_absolute(): + path = (ROOT / path).resolve() + for k in 'train', 'val', 'test': + if data.get(k): # prepend path + data[k] = str(path / data[k]) if isinstance(data[k], str) else [str(path / x) for x in data[k]] + + # Parse yaml + assert 'nc' in data, "Dataset 'nc' key missing." + if 'names' not in data: + data['names'] = [f'class{i}' for i in range(data['nc'])] # assign class names if missing + train, val, test, s = (data.get(x) for x in ('train', 'val', 'test', 'download')) + if val: val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path if not all(x.exists() for x in val): - print('\nWARNING: Dataset not found, nonexistent paths: %s' % [str(x) for x in val if not x.exists()]) - if s and len(s): # download script + LOGGER.info(emojis('\nDataset not found ⚠️, missing paths %s' % [str(x) for x in val if not x.exists()])) + if s and autodownload: # download script + t = time.time() + root = path.parent if 'path' in data else '..' # unzip directory i.e. '../' if s.startswith('http') and s.endswith('.zip'): # URL f = Path(s).name # filename - print(f'Downloading {s} ...') + LOGGER.info(f'Downloading {s} to {f}...') torch.hub.download_url_to_file(s, f) - r = os.system(f'unzip -q {f} -d ../ && rm {f}') # unzip + Path(root).mkdir(parents=True, exist_ok=True) # create root + ZipFile(f).extractall(path=root) # unzip + Path(f).unlink() # remove zip + r = None # success elif s.startswith('bash '): # bash script - print(f'Running {s} ...') + LOGGER.info(f'Running {s} ...') r = os.system(s) else: # python script - r = exec(s) # return None - print('Dataset autodownload %s\n' % ('success' if r in (0, None) else 'failure')) # print result + r = exec(s, {'yaml': data}) # return None + dt = f'({round(time.time() - t, 1)}s)' + s = f"success βœ… {dt}, saved to {colorstr('bold', root)}" if r in (0, None) else f"failure {dt} ❌" + LOGGER.info(emojis(f"Dataset download {s}")) else: - raise Exception('Dataset not found.') + raise Exception(emojis('Dataset not found ❌')) + + return data # dictionary + + +def url2file(url): + # Convert URL to filename, i.e. https://url.com/file.txt?auth -> file.txt + url = str(Path(url)).replace(':/', '://') # Pathlib turns :// -> :/ + file = Path(urllib.parse.unquote(url)).name.split('?')[0] # '%2F' to '/', split https://url.com/file.txt?auth + return file def download(url, dir='.', unzip=True, delete=True, curl=False, threads=1): - # Multi-threaded file download and unzip function + # Multi-threaded file download and unzip function, used in data.yaml for autodownload def download_one(url, dir): # Download 1 file f = dir / Path(url).name # filename - if not f.exists(): - print(f'Downloading {url} to {f}...') + if Path(url).is_file(): # exists in current path + Path(url).rename(f) # move to dir + elif not f.exists(): + LOGGER.info(f'Downloading {url} to {f}...') if curl: os.system(f"curl -L '{url}' -o '{f}' --retry 9 -C -") # curl download, retry and resume on fail else: - torch.hub.download_url_to_file(url, f, progress=True) # torch download + torch.hub.download_url_to_file(url, f, progress=threads == 1) # torch download if unzip and f.suffix in ('.zip', '.gz'): - print(f'Unzipping {f}...') + LOGGER.info(f'Unzipping {f}...') if f.suffix == '.zip': - s = f'unzip -qo {f} -d {dir} && rm {f}' # unzip -quiet -overwrite + ZipFile(f).extractall(path=dir) # unzip elif f.suffix == '.gz': - s = f'tar xfz {f} --directory {f.parent}' # unzip - if delete: # delete zip file after unzip - s += f' && rm {f}' - os.system(s) + os.system(f'tar xfz {f} --directory {f.parent}') # unzip + if delete: + f.unlink() # remove zip dir = Path(dir) dir.mkdir(parents=True, exist_ok=True) # make directory @@ -235,12 +516,14 @@ def download_one(url, dir): pool.close() pool.join() else: - for u in tuple(url) if isinstance(url, str) else url: + for u in [url] if isinstance(url, (str, Path)) else url: download_one(u, dir) def make_divisible(x, divisor): - # Returns x evenly divisible by divisor + # Returns nearest x divisible by divisor + if isinstance(divisor, torch.Tensor): + divisor = int(divisor.max()) # to int return math.ceil(x / divisor) * divisor @@ -250,7 +533,7 @@ def clean_str(s): def one_cycle(y1=0.0, y2=1.0, steps=100): - # lambda function for sinusoidal ramp from y1 to y2 + # lambda function for sinusoidal ramp from y1 to y2 https://arxiv.org/pdf/1812.01187.pdf return lambda x: ((1 - math.cos(x * math.pi / steps)) / 2) * (y2 - y1) + y1 @@ -348,6 +631,18 @@ def xywhn2xyxy(x, w=640, h=640, padw=0, padh=0): return y +def xyxy2xywhn(x, w=640, h=640, clip=False, eps=0.0): + # Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] normalized where xy1=top-left, xy2=bottom-right + if clip: + clip_coords(x, (h - eps, w - eps)) # warning: inplace clip + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = ((x[:, 0] + x[:, 2]) / 2) / w # x center + y[:, 1] = ((x[:, 1] + x[:, 3]) / 2) / h # y center + y[:, 2] = (x[:, 2] - x[:, 0]) / w # width + y[:, 3] = (x[:, 3] - x[:, 1]) / h # height + return y + + def xyn2xy(x, w=640, h=640, padw=0, padh=0): # Convert normalized segments into pixel segments, shape (n,2) y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) @@ -398,90 +693,16 @@ def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None): return coords -def clip_coords(boxes, img_shape): +def clip_coords(boxes, shape): # Clip bounding xyxy bounding boxes to image shape (height, width) - boxes[:, 0].clamp_(0, img_shape[1]) # x1 - boxes[:, 1].clamp_(0, img_shape[0]) # y1 - boxes[:, 2].clamp_(0, img_shape[1]) # x2 - boxes[:, 3].clamp_(0, img_shape[0]) # y2 - - -def bbox_iou(box1, box2, x1y1x2y2=True, GIoU=False, DIoU=False, CIoU=False, eps=1e-7): - # Returns the IoU of box1 to box2. box1 is 4, box2 is nx4 - box2 = box2.T - - # Get the coordinates of bounding boxes - if x1y1x2y2: # x1, y1, x2, y2 = box1 - b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] - b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] - else: # transform from xywh to xyxy - b1_x1, b1_x2 = box1[0] - box1[2] / 2, box1[0] + box1[2] / 2 - b1_y1, b1_y2 = box1[1] - box1[3] / 2, box1[1] + box1[3] / 2 - b2_x1, b2_x2 = box2[0] - box2[2] / 2, box2[0] + box2[2] / 2 - b2_y1, b2_y2 = box2[1] - box2[3] / 2, box2[1] + box2[3] / 2 - - # Intersection area - inter = (torch.min(b1_x2, b2_x2) - torch.max(b1_x1, b2_x1)).clamp(0) * \ - (torch.min(b1_y2, b2_y2) - torch.max(b1_y1, b2_y1)).clamp(0) - - # Union Area - w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1 + eps - w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1 + eps - union = w1 * h1 + w2 * h2 - inter + eps - - iou = inter / union - if GIoU or DIoU or CIoU: - cw = torch.max(b1_x2, b2_x2) - torch.min(b1_x1, b2_x1) # convex (smallest enclosing box) width - ch = torch.max(b1_y2, b2_y2) - torch.min(b1_y1, b2_y1) # convex height - if CIoU or DIoU: # Distance or Complete IoU https://arxiv.org/abs/1911.08287v1 - c2 = cw ** 2 + ch ** 2 + eps # convex diagonal squared - rho2 = ((b2_x1 + b2_x2 - b1_x1 - b1_x2) ** 2 + - (b2_y1 + b2_y2 - b1_y1 - b1_y2) ** 2) / 4 # center distance squared - if DIoU: - return iou - rho2 / c2 # DIoU - elif CIoU: # https://github.com/Zzh-tju/DIoU-SSD-pytorch/blob/master/utils/box/box_utils.py#L47 - v = (4 / math.pi ** 2) * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) - with torch.no_grad(): - alpha = v / (v - iou + (1 + eps)) - return iou - (rho2 / c2 + v * alpha) # CIoU - else: # GIoU https://arxiv.org/pdf/1902.09630.pdf - c_area = cw * ch + eps # convex area - return iou - (c_area - union) / c_area # GIoU - else: - return iou # IoU - - -def box_iou(box1, box2): - # https://github.com/pytorch/vision/blob/master/torchvision/ops/boxes.py - """ - Return intersection-over-union (Jaccard index) of boxes. - Both sets of boxes are expected to be in (x1, y1, x2, y2) format. - Arguments: - box1 (Tensor[N, 4]) - box2 (Tensor[M, 4]) - Returns: - iou (Tensor[N, M]): the NxM matrix containing the pairwise - IoU values for every element in boxes1 and boxes2 - """ - - def box_area(box): - # box = 4xn - return (box[2] - box[0]) * (box[3] - box[1]) - - area1 = box_area(box1.T) - area2 = box_area(box2.T) - - # inter(N,M) = (rb(N,M,2) - lt(N,M,2)).clamp(0).prod(2) - inter = (torch.min(box1[:, None, 2:], box2[:, 2:]) - torch.max(box1[:, None, :2], box2[:, :2])).clamp(0).prod(2) - return inter / (area1[:, None] + area2 - inter) # iou = inter / (area1 + area2 - inter) - - -def wh_iou(wh1, wh2): - # Returns the nxm IoU matrix. wh1 is nx2, wh2 is mx2 - wh1 = wh1[:, None] # [N,1,2] - wh2 = wh2[None] # [1,M,2] - inter = torch.min(wh1, wh2).prod(2) # [N,M] - return inter / (wh1.prod(2) + wh2.prod(2) - inter) # iou = inter / (area1 + area2 - inter) + if isinstance(boxes, torch.Tensor): # faster individually + boxes[:, 0].clamp_(0, shape[1]) # x1 + boxes[:, 1].clamp_(0, shape[0]) # y1 + boxes[:, 2].clamp_(0, shape[1]) # x2 + boxes[:, 3].clamp_(0, shape[0]) # y2 + else: # np.array (faster grouped) + boxes[:, [0, 2]] = boxes[:, [0, 2]].clip(0, shape[1]) # x1, x2 + boxes[:, [1, 3]] = boxes[:, [1, 3]].clip(0, shape[0]) # y1, y2 def non_max_suppression(prediction, conf_thres=0.25, iou_thres=0.45, classes=None, agnostic=False, multi_label=False, @@ -500,7 +721,7 @@ def non_max_suppression(prediction, conf_thres=0.25, iou_thres=0.45, classes=Non assert 0 <= iou_thres <= 1, f'Invalid IoU {iou_thres}, valid values are between 0.0 and 1.0' # Settings - min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height + min_wh, max_wh = 2, 7680 # (pixels) minimum and maximum box width and height max_nms = 30000 # maximum number of boxes into torchvision.ops.nms() time_limit = 10.0 # seconds to quit after redundant = True # require redundant detections @@ -511,16 +732,16 @@ def non_max_suppression(prediction, conf_thres=0.25, iou_thres=0.45, classes=Non output = [torch.zeros((0, 6), device=prediction.device)] * prediction.shape[0] for xi, x in enumerate(prediction): # image index, image inference # Apply constraints - # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height + x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height x = x[xc[xi]] # confidence # Cat apriori labels if autolabelling if labels and len(labels[xi]): - l = labels[xi] - v = torch.zeros((len(l), nc + 5), device=x.device) - v[:, :4] = l[:, 1:5] # box + lb = labels[xi] + v = torch.zeros((len(lb), nc + 5), device=x.device) + v[:, :4] = lb[:, 1:5] # box v[:, 4] = 1.0 # conf - v[range(len(l)), l[:, 0].long() + 5] = 1.0 # cls + v[range(len(lb)), lb[:, 0].long() + 5] = 1.0 # cls x = torch.cat((x, v), 0) # If none remain process next image @@ -572,7 +793,7 @@ def non_max_suppression(prediction, conf_thres=0.25, iou_thres=0.45, classes=Non output[xi] = x[i] if (time.time() - t) > time_limit: - print(f'WARNING: NMS time limit {time_limit}s exceeded') + LOGGER.warning(f'WARNING: NMS time limit {time_limit}s exceeded') break # time limit exceeded return output @@ -583,7 +804,7 @@ def strip_optimizer(f='best.pt', s=''): # from utils.general import *; strip_op x = torch.load(f, map_location=torch.device('cpu')) if x.get('ema'): x['model'] = x['ema'] # replace model with ema - for k in 'optimizer', 'training_results', 'wandb_id', 'ema', 'updates': # keys + for k in 'optimizer', 'best_fitness', 'wandb_id', 'ema', 'updates': # keys x[k] = None x['epoch'] = -1 pickled = isinstance(x['model'], torch.nn.Module) @@ -593,42 +814,54 @@ def strip_optimizer(f='best.pt', s=''): # from utils.general import *; strip_op p.requires_grad = False torch.save(x, s or f) mb = os.path.getsize(s or f) / 1E6 # filesize - print(f"Optimizer stripped from {f},{(' saved as %s,' % s) if s else ''} {mb:.1f}MB") + LOGGER.info(f"Optimizer stripped from {f},{(' saved as %s,' % s) if s else ''} {mb:.1f}MB") -def print_mutation(hyp, results, yaml_file='hyp_evolved.yaml', bucket=''): - # Print mutation results to evolve.txt (for use with train.py --evolve) - a = '%10s' * len(hyp) % tuple(hyp.keys()) # hyperparam keys - b = '%10.3g' * len(hyp) % tuple(hyp.values()) # hyperparam values - c = '%10.4g' * len(results) % results # results (P, R, mAP@0.5, mAP@0.5:0.95, val_losses x 3) - print('\n%s\n%s\nEvolved fitness: %s\n' % (a, b, c)) +def print_mutation(results, hyp, save_dir, bucket, prefix=colorstr('evolve: ')): + evolve_csv = save_dir / 'evolve.csv' + evolve_yaml = save_dir / 'hyp_evolve.yaml' + keys = ('metrics/precision', 'metrics/recall', 'metrics/mAP_0.5', 'metrics/mAP_0.5:0.95', + 'val/box_loss', 'val/obj_loss', 'val/cls_loss') + tuple(hyp.keys()) # [results + hyps] + keys = tuple(x.strip() for x in keys) + vals = results + tuple(hyp.values()) + n = len(keys) + # Download (optional) if bucket: - url = 'gs://%s/evolve.txt' % bucket - if gsutil_getsize(url) > (os.path.getsize('evolve.txt') if os.path.exists('evolve.txt') else 0): - os.system('gsutil cp %s .' % url) # download evolve.txt if larger than local + url = f'gs://{bucket}/evolve.csv' + if gsutil_getsize(url) > (evolve_csv.stat().st_size if evolve_csv.exists() else 0): + os.system(f'gsutil cp {url} {save_dir}') # download evolve.csv if larger than local - with open('evolve.txt', 'a') as f: # append result - f.write(c + b + '\n') - x = np.unique(np.loadtxt('evolve.txt', ndmin=2), axis=0) # load unique rows - x = x[np.argsort(-fitness(x))] # sort - np.savetxt('evolve.txt', x, '%10.3g') # save sort by fitness + # Log to evolve.csv + s = '' if evolve_csv.exists() else (('%20s,' * n % keys).rstrip(',') + '\n') # add header + with open(evolve_csv, 'a') as f: + f.write(s + ('%20.5g,' * n % vals).rstrip(',') + '\n') # Save yaml - for i, k in enumerate(hyp.keys()): - hyp[k] = float(x[0, i + 7]) - with open(yaml_file, 'w') as f: - results = tuple(x[0, :7]) - c = '%10.4g' * len(results) % results # results (P, R, mAP@0.5, mAP@0.5:0.95, val_losses x 3) - f.write('# Hyperparameter Evolution Results\n# Generations: %g\n# Metrics: ' % len(x) + c + '\n\n') - yaml.safe_dump(hyp, f, sort_keys=False) + with open(evolve_yaml, 'w') as f: + data = pd.read_csv(evolve_csv) + data = data.rename(columns=lambda x: x.strip()) # strip keys + i = np.argmax(fitness(data.values[:, :4])) # + generations = len(data) + f.write('# YOLOv5 Hyperparameter Evolution Results\n' + + f'# Best generation: {i}\n' + + f'# Last generation: {generations - 1}\n' + + '# ' + ', '.join(f'{x.strip():>20s}' for x in keys[:7]) + '\n' + + '# ' + ', '.join(f'{x:>20.5g}' for x in data.values[i, :7]) + '\n\n') + yaml.safe_dump(data.loc[i][7:].to_dict(), f, sort_keys=False) + + # Print to screen + LOGGER.info(prefix + f'{generations} generations finished, current result:\n' + + prefix + ', '.join(f'{x.strip():>20s}' for x in keys) + '\n' + + prefix + ', '.join(f'{x:20.5g}' for x in vals) + '\n\n') if bucket: - os.system('gsutil cp evolve.txt %s gs://%s' % (yaml_file, bucket)) # upload + os.system(f'gsutil cp {evolve_csv} {evolve_yaml} gs://{bucket}') # upload def apply_classifier(x, model, img, im0): - # Apply a second stage classifier to yolo outputs + # Apply a second stage classifier to YOLO outputs + # Example model = torchvision.models.__dict__['efficientnet_b0'](pretrained=True).to(device).eval() im0 = [im0] if isinstance(im0, np.ndarray) else im0 for i, d in enumerate(x): # per image if d is not None and len(d): @@ -649,11 +882,11 @@ def apply_classifier(x, model, img, im0): for j, a in enumerate(d): # per item cutout = im0[i][int(a[1]):int(a[3]), int(a[0]):int(a[2])] im = cv2.resize(cutout, (224, 224)) # BGR - # cv2.imwrite('test%i.jpg' % j, cutout) + # cv2.imwrite('example%i.jpg' % j, cutout) im = im[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 im = np.ascontiguousarray(im, dtype=np.float32) # uint8 to float32 - im /= 255.0 # 0 - 255 to 0.0 - 1.0 + im /= 255 # 0 - 255 to 0.0 - 1.0 ims.append(im) pred_cls2 = model(torch.Tensor(ims).to(d.device)).argmax(1) # classifier prediction @@ -662,33 +895,20 @@ def apply_classifier(x, model, img, im0): return x -def save_one_box(xyxy, im, file='image.jpg', gain=1.02, pad=10, square=False, BGR=False, save=True): - # Save image crop as {file} with crop size multiple {gain} and {pad} pixels. Save and/or return crop - xyxy = torch.tensor(xyxy).view(-1, 4) - b = xyxy2xywh(xyxy) # boxes - if square: - b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # attempt rectangle to square - b[:, 2:] = b[:, 2:] * gain + pad # box wh * gain + pad - xyxy = xywh2xyxy(b).long() - clip_coords(xyxy, im.shape) - crop = im[int(xyxy[0, 1]):int(xyxy[0, 3]), int(xyxy[0, 0]):int(xyxy[0, 2]), ::(1 if BGR else -1)] - if save: - cv2.imwrite(str(increment_path(file, mkdir=True).with_suffix('.jpg')), crop) - return crop - - def increment_path(path, exist_ok=False, sep='', mkdir=False): # Increment file or directory path, i.e. runs/exp --> runs/exp{sep}2, runs/exp{sep}3, ... etc. path = Path(path) # os-agnostic if path.exists() and not exist_ok: - suffix = path.suffix - path = path.with_suffix('') + path, suffix = (path.with_suffix(''), path.suffix) if path.is_file() else (path, '') dirs = glob.glob(f"{path}{sep}*") # similar paths matches = [re.search(rf"%s{sep}(\d+)" % path.stem, d) for d in dirs] i = [int(m.groups()[0]) for m in matches if m] # indices n = max(i) + 1 if i else 2 # increment number - path = Path(f"{path}{sep}{n}{suffix}") # update path - dir = path if path.suffix == '' else path.parent # directory - if not dir.exists() and mkdir: - dir.mkdir(parents=True, exist_ok=True) # make directory + path = Path(f"{path}{sep}{n}{suffix}") # increment path + if mkdir: + path.mkdir(parents=True, exist_ok=True) # make directory return path + + +# Variables +NCOLS = 0 if is_docker() else shutil.get_terminal_size().columns # terminal window size for tqdm diff --git a/utils/google_app_engine/additional_requirements.txt b/utils/google_app_engine/additional_requirements.txt index 5fcc30524a59..42d7ffc0eed8 100644 --- a/utils/google_app_engine/additional_requirements.txt +++ b/utils/google_app_engine/additional_requirements.txt @@ -1,4 +1,4 @@ # add these requirements in your app on top of the existing ones -pip==18.1 +pip==21.1 Flask==1.0.2 gunicorn==19.9.0 diff --git a/utils/google_app_engine/app.yaml b/utils/google_app_engine/app.yaml index ac29d104b144..5056b7c1186d 100644 --- a/utils/google_app_engine/app.yaml +++ b/utils/google_app_engine/app.yaml @@ -11,4 +11,4 @@ manual_scaling: resources: cpu: 1 memory_gb: 4 - disk_size_gb: 20 \ No newline at end of file + disk_size_gb: 20 diff --git a/utils/loggers/__init__.py b/utils/loggers/__init__.py new file mode 100644 index 000000000000..3b2230c02a14 --- /dev/null +++ b/utils/loggers/__init__.py @@ -0,0 +1,174 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Logging utils +""" + +import os +import warnings +from threading import Thread + +import pkg_resources as pkg +import torch +from torch.utils.tensorboard import SummaryWriter + +from utils.general import colorstr, emojis +from utils.loggers.wandb.wandb_utils import WandbLogger +from utils.plots import plot_images, plot_results +from utils.torch_utils import de_parallel + +LOGGERS = ('csv', 'tb', 'wandb') # text-file, TensorBoard, Weights & Biases +RANK = int(os.getenv('RANK', -1)) + +try: + import wandb + + assert hasattr(wandb, '__version__') # verify package import not local dir + if pkg.parse_version(wandb.__version__) >= pkg.parse_version('0.12.2') and RANK in [0, -1]: + try: + wandb_login_success = wandb.login(timeout=30) + except wandb.errors.UsageError: # known non-TTY terminal issue + wandb_login_success = False + if not wandb_login_success: + wandb = None +except (ImportError, AssertionError): + wandb = None + + +class Loggers(): + # YOLOv5 Loggers class + def __init__(self, save_dir=None, weights=None, opt=None, hyp=None, logger=None, include=LOGGERS): + self.save_dir = save_dir + self.weights = weights + self.opt = opt + self.hyp = hyp + self.logger = logger # for printing results to console + self.include = include + self.keys = ['train/box_loss', 'train/obj_loss', 'train/cls_loss', # train loss + 'metrics/precision', 'metrics/recall', 'metrics/mAP_0.5', 'metrics/mAP_0.5:0.95', # metrics + 'val/box_loss', 'val/obj_loss', 'val/cls_loss', # val loss + 'x/lr0', 'x/lr1', 'x/lr2'] # params + self.best_keys = ['best/epoch', 'best/precision', 'best/recall', 'best/mAP_0.5', 'best/mAP_0.5:0.95'] + for k in LOGGERS: + setattr(self, k, None) # init empty logger dictionary + self.csv = True # always log to csv + + # Message + if not wandb: + prefix = colorstr('Weights & Biases: ') + s = f"{prefix}run 'pip install wandb' to automatically track and visualize YOLOv5 πŸš€ runs (RECOMMENDED)" + self.logger.info(emojis(s)) + + # TensorBoard + s = self.save_dir + if 'tb' in self.include and not self.opt.evolve: + prefix = colorstr('TensorBoard: ') + self.logger.info(f"{prefix}Start with 'tensorboard --logdir {s.parent}', view at http://localhost:6006/") + self.tb = SummaryWriter(str(s)) + + # W&B + if wandb and 'wandb' in self.include: + wandb_artifact_resume = isinstance(self.opt.resume, str) and self.opt.resume.startswith('wandb-artifact://') + run_id = torch.load(self.weights).get('wandb_id') if self.opt.resume and not wandb_artifact_resume else None + self.opt.hyp = self.hyp # add hyperparameters + self.wandb = WandbLogger(self.opt, run_id) + else: + self.wandb = None + + def on_pretrain_routine_end(self): + # Callback runs on pre-train routine end + paths = self.save_dir.glob('*labels*.jpg') # training labels + if self.wandb: + self.wandb.log({"Labels": [wandb.Image(str(x), caption=x.name) for x in paths]}) + + def on_train_batch_end(self, ni, model, imgs, targets, paths, plots, sync_bn): + # Callback runs on train batch end + if plots: + if ni == 0: + if not sync_bn: # tb.add_graph() --sync known issue https://github.com/ultralytics/yolov5/issues/3754 + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress jit trace warning + try: + self.tb.add_graph(torch.jit.trace(de_parallel(model), imgs[0:1], strict=False), []) + except Exception: + warnings.warn("Couldn't create quantized graph for Tensorboard") + if ni < 3: + f = self.save_dir / f'train_batch{ni}.jpg' # filename + Thread(target=plot_images, args=(imgs, targets, paths, f), daemon=True).start() + if self.wandb and ni == 10: + files = sorted(self.save_dir.glob('train*.jpg')) + self.wandb.log({'Mosaics': [wandb.Image(str(f), caption=f.name) for f in files if f.exists()]}) + + def on_train_epoch_end(self, epoch): + # Callback runs on train epoch end + if self.wandb: + self.wandb.current_epoch = epoch + 1 + + def on_val_image_end(self, pred, predn, path, names, im): + # Callback runs on val image end + if self.wandb: + self.wandb.val_one_image(pred, predn, path, names, im) + + def on_val_end(self): + # Callback runs on val end + if self.wandb: + files = sorted(self.save_dir.glob('val*.jpg')) + self.wandb.log({"Validation": [wandb.Image(str(f), caption=f.name) for f in files]}) + + def on_fit_epoch_end(self, vals, epoch, best_fitness, fi): + # Callback runs at the end of each fit (train+val) epoch + x = {k: v for k, v in zip(self.keys, vals)} # dict + if self.csv: + file = self.save_dir / 'results.csv' + n = len(x) + 1 # number of cols + s = '' if file.exists() else (('%20s,' * n % tuple(['epoch'] + self.keys)).rstrip(',') + '\n') # add header + with open(file, 'a') as f: + f.write(s + ('%20.5g,' * n % tuple([epoch] + vals)).rstrip(',') + '\n') + + if self.tb: + for k, v in x.items(): + self.tb.add_scalar(k, v, epoch) + + if self.wandb: + if best_fitness == fi: + best_results = [epoch] + vals[3:7] + for i, name in enumerate(self.best_keys): + self.wandb.wandb_run.summary[name] = best_results[i] # log best results in the summary + self.wandb.log(x) + self.wandb.end_epoch(best_result=best_fitness == fi) + + def on_model_save(self, last, epoch, final_epoch, best_fitness, fi): + # Callback runs on model save event + if self.wandb: + if ((epoch + 1) % self.opt.save_period == 0 and not final_epoch) and self.opt.save_period != -1: + self.wandb.log_model(last.parent, self.opt, epoch, fi, best_model=best_fitness == fi) + + def on_train_end(self, last, best, plots, epoch, results): + # Callback runs on training end + if plots: + plot_results(file=self.save_dir / 'results.csv') # save results.png + files = ['results.png', 'confusion_matrix.png', *(f'{x}_curve.png' for x in ('F1', 'PR', 'P', 'R'))] + files = [(self.save_dir / f) for f in files if (self.save_dir / f).exists()] # filter + + if self.tb: + import cv2 + import numpy as np + + cv2.imread = lambda x: cv2.imdecode(np.fromfile(x, np.uint8), cv2.IMREAD_COLOR) # remap for Chinese files + for f in files: + self.tb.add_image(f.stem, cv2.imread(str(f))[..., ::-1], epoch, dataformats='HWC') + + if self.wandb: + self.wandb.log({k: v for k, v in zip(self.keys[3:10], results)}) # log best.pt val results + self.wandb.log({"Results": [wandb.Image(str(f), caption=f.name) for f in files]}) + # Calling wandb.log. TODO: Refactor this into WandbLogger.log_model + if not self.opt.evolve: + wandb.log_artifact(str(best if best.exists() else last), type='model', + name='run_' + self.wandb.wandb_run.id + '_model', + aliases=['latest', 'best', 'stripped']) + self.wandb.finish_run() + + def on_params_update(self, params): + # Update hyperparams or configs of the experiment + # params: A dict containing {param: value} pairs + if self.wandb: + self.wandb.wandb_run.config.update(params, allow_val_change=True) diff --git a/utils/loggers/wandb/README.md b/utils/loggers/wandb/README.md new file mode 100644 index 000000000000..63d999859e6d --- /dev/null +++ b/utils/loggers/wandb/README.md @@ -0,0 +1,152 @@ +πŸ“š This guide explains how to use **Weights & Biases** (W&B) with YOLOv5 πŸš€. UPDATED 29 September 2021. +* [About Weights & Biases](#about-weights-&-biases) +* [First-Time Setup](#first-time-setup) +* [Viewing runs](#viewing-runs) +* [Disabling wandb](#disabling-wandb) +* [Advanced Usage: Dataset Versioning and Evaluation](#advanced-usage) +* [Reports: Share your work with the world!](#reports) + +## About Weights & Biases +Think of [W&B](https://wandb.ai/site?utm_campaign=repo_yolo_wandbtutorial) like GitHub for machine learning models. With a few lines of code, save everything you need to debug, compare and reproduce your models β€” architecture, hyperparameters, git commits, model weights, GPU usage, and even datasets and predictions. + +Used by top researchers including teams at OpenAI, Lyft, Github, and MILA, W&B is part of the new standard of best practices for machine learning. How W&B can help you optimize your machine learning workflows: + + * [Debug](https://wandb.ai/wandb/getting-started/reports/Visualize-Debug-Machine-Learning-Models--VmlldzoyNzY5MDk#Free-2) model performance in real time + * [GPU usage](https://wandb.ai/wandb/getting-started/reports/Visualize-Debug-Machine-Learning-Models--VmlldzoyNzY5MDk#System-4) visualized automatically + * [Custom charts](https://wandb.ai/wandb/customizable-charts/reports/Powerful-Custom-Charts-To-Debug-Model-Peformance--VmlldzoyNzY4ODI) for powerful, extensible visualization + * [Share insights](https://wandb.ai/wandb/getting-started/reports/Visualize-Debug-Machine-Learning-Models--VmlldzoyNzY5MDk#Share-8) interactively with collaborators + * [Optimize hyperparameters](https://docs.wandb.com/sweeps) efficiently + * [Track](https://docs.wandb.com/artifacts) datasets, pipelines, and production models + +## First-Time Setup +
+ Toggle Details +When you first train, W&B will prompt you to create a new account and will generate an **API key** for you. If you are an existing user you can retrieve your key from https://wandb.ai/authorize. This key is used to tell W&B where to log your data. You only need to supply your key once, and then it is remembered on the same device. + +W&B will create a cloud **project** (default is 'YOLOv5') for your training runs, and each new training run will be provided a unique run **name** within that project as project/name. You can also manually set your project and run name as: + + ```shell + $ python train.py --project ... --name ... + ``` + +YOLOv5 notebook example: Open In Colab Open In Kaggle +Screen Shot 2021-09-29 at 10 23 13 PM + + +
+ +## Viewing Runs +
+ Toggle Details +Run information streams from your environment to the W&B cloud console as you train. This allows you to monitor and even cancel runs in realtime . All important information is logged: + + * Training & Validation losses + * Metrics: Precision, Recall, mAP@0.5, mAP@0.5:0.95 + * Learning Rate over time + * A bounding box debugging panel, showing the training progress over time + * GPU: Type, **GPU Utilization**, power, temperature, **CUDA memory usage** + * System: Disk I/0, CPU utilization, RAM memory usage + * Your trained model as W&B Artifact + * Environment: OS and Python types, Git repository and state, **training command** + +

Weights & Biases dashboard

+
+ + ## Disabling wandb +* training after running `wandb disabled` inside that directory creates no wandb run +![Screenshot (84)](https://user-images.githubusercontent.com/15766192/143441777-c780bdd7-7cb4-4404-9559-b4316030a985.png) + +* To enable wandb again, run `wandb online` +![Screenshot (85)](https://user-images.githubusercontent.com/15766192/143441866-7191b2cb-22f0-4e0f-ae64-2dc47dc13078.png) + +## Advanced Usage +You can leverage W&B artifacts and Tables integration to easily visualize and manage your datasets, models and training evaluations. Here are some quick examples to get you started. +
+

1: Train and Log Evaluation simultaneousy

+ This is an extension of the previous section, but it'll also training after uploading the dataset. This also evaluation Table + Evaluation table compares your predictions and ground truths across the validation set for each epoch. It uses the references to the already uploaded datasets, + so no images will be uploaded from your system more than once. +
+ Usage + Code $ python train.py --upload_data val + +![Screenshot from 2021-11-21 17-40-06](https://user-images.githubusercontent.com/15766192/142761183-c1696d8c-3f38-45ab-991a-bb0dfd98ae7d.png) +
+ +

2. Visualize and Version Datasets

+ Log, visualize, dynamically query, and understand your data with W&B Tables. You can use the following command to log your dataset as a W&B Table. This will generate a {dataset}_wandb.yaml file which can be used to train from dataset artifact. +
+ Usage + Code $ python utils/logger/wandb/log_dataset.py --project ... --name ... --data .. + + ![Screenshot (64)](https://user-images.githubusercontent.com/15766192/128486078-d8433890-98a3-4d12-8986-b6c0e3fc64b9.png) +
+ +

3: Train using dataset artifact

+ When you upload a dataset as described in the first section, you get a new config file with an added `_wandb` to its name. This file contains the information that + can be used to train a model directly from the dataset artifact. This also logs evaluation +
+ Usage + Code $ python train.py --data {data}_wandb.yaml + +![Screenshot (72)](https://user-images.githubusercontent.com/15766192/128979739-4cf63aeb-a76f-483f-8861-1c0100b938a5.png) +
+ +

4: Save model checkpoints as artifacts

+ To enable saving and versioning checkpoints of your experiment, pass `--save_period n` with the base cammand, where `n` represents checkpoint interval. + You can also log both the dataset and model checkpoints simultaneously. If not passed, only the final model will be logged + +
+ Usage + Code $ python train.py --save_period 1 + +![Screenshot (68)](https://user-images.githubusercontent.com/15766192/128726138-ec6c1f60-639d-437d-b4ee-3acd9de47ef3.png) +
+ +
+ +

5: Resume runs from checkpoint artifacts.

+Any run can be resumed using artifacts if the --resume argument starts withΒ wandb-artifact://Β prefix followed by the run path, i.e,Β wandb-artifact://username/project/runid . This doesn't require the model checkpoint to be present on the local system. + +
+ Usage + Code $ python train.py --resume wandb-artifact://{run_path} + +![Screenshot (70)](https://user-images.githubusercontent.com/15766192/128728988-4e84b355-6c87-41ae-a591-14aecf45343e.png) +
+ +

6: Resume runs from dataset artifact & checkpoint artifacts.

+ Local dataset or model checkpoints are not required. This can be used to resume runs directly on a different device + The syntax is same as the previous section, but you'll need to lof both the dataset and model checkpoints as artifacts, i.e, set bot --upload_dataset or + train from _wandb.yaml file and set --save_period + +
+ Usage + Code $ python train.py --resume wandb-artifact://{run_path} + +![Screenshot (70)](https://user-images.githubusercontent.com/15766192/128728988-4e84b355-6c87-41ae-a591-14aecf45343e.png) +
+ +
+ +

Reports

+W&B Reports can be created from your saved runs for sharing online. Once a report is created you will receive a link you can use to publically share your results. Here is an example report created from the COCO128 tutorial trainings of all four YOLOv5 models ([link](https://wandb.ai/glenn-jocher/yolov5_tutorial/reports/YOLOv5-COCO128-Tutorial-Results--VmlldzozMDI5OTY)). + +Weights & Biases Reports + + +## Environments + +YOLOv5 may be run in any of the following up-to-date verified environments (with all dependencies including [CUDA](https://developer.nvidia.com/cuda)/[CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/) and [PyTorch](https://pytorch.org/) preinstalled): + +- **Google Colab and Kaggle** notebooks with free GPU: Open In Colab Open In Kaggle +- **Google Cloud** Deep Learning VM. See [GCP Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/GCP-Quickstart) +- **Amazon** Deep Learning AMI. See [AWS Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/AWS-Quickstart) +- **Docker Image**. See [Docker Quickstart Guide](https://github.com/ultralytics/yolov5/wiki/Docker-Quickstart) Docker Pulls + + +## Status + +![CI CPU testing](https://github.com/ultralytics/yolov5/workflows/CI%20CPU%20testing/badge.svg) + +If this badge is green, all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are currently passing. CI tests verify correct operation of YOLOv5 training ([train.py](https://github.com/ultralytics/yolov5/blob/master/train.py)), validation ([val.py](https://github.com/ultralytics/yolov5/blob/master/val.py)), inference ([detect.py](https://github.com/ultralytics/yolov5/blob/master/detect.py)) and export ([export.py](https://github.com/ultralytics/yolov5/blob/master/export.py)) on MacOS, Windows, and Ubuntu every 24 hours and on every commit. diff --git a/__init__.py b/utils/loggers/wandb/__init__.py similarity index 100% rename from __init__.py rename to utils/loggers/wandb/__init__.py diff --git a/utils/wandb_logging/log_dataset.py b/utils/loggers/wandb/log_dataset.py similarity index 61% rename from utils/wandb_logging/log_dataset.py rename to utils/loggers/wandb/log_dataset.py index f45a23011f15..06e81fb69307 100644 --- a/utils/wandb_logging/log_dataset.py +++ b/utils/loggers/wandb/log_dataset.py @@ -1,16 +1,16 @@ import argparse -import yaml - from wandb_utils import WandbLogger +from utils.general import LOGGER + WANDB_ARTIFACT_PREFIX = 'wandb-artifact://' def create_dataset_artifact(opt): - with open(opt.data) as f: - data = yaml.safe_load(f) # data dict - logger = WandbLogger(opt, '', None, data, job_type='Dataset Creation') + logger = WandbLogger(opt, None, job_type='Dataset Creation') # TODO: return value unused + if not logger.wandb: + LOGGER.info("install wandb using `pip install wandb` to log the dataset") if __name__ == '__main__': @@ -18,6 +18,9 @@ def create_dataset_artifact(opt): parser.add_argument('--data', type=str, default='data/coco128.yaml', help='data.yaml path') parser.add_argument('--single-cls', action='store_true', help='train as single-class dataset') parser.add_argument('--project', type=str, default='YOLOv5', help='name of W&B Project') + parser.add_argument('--entity', default=None, help='W&B entity') + parser.add_argument('--name', type=str, default='log dataset', help='name of W&B run') + opt = parser.parse_args() opt.resume = False # Explicitly disallow resume check for dataset upload job diff --git a/utils/loggers/wandb/sweep.py b/utils/loggers/wandb/sweep.py new file mode 100644 index 000000000000..206059bc30bf --- /dev/null +++ b/utils/loggers/wandb/sweep.py @@ -0,0 +1,41 @@ +import sys +from pathlib import Path + +import wandb + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[3] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH + +from train import parse_opt, train +from utils.callbacks import Callbacks +from utils.general import increment_path +from utils.torch_utils import select_device + + +def sweep(): + wandb.init() + # Get hyp dict from sweep agent + hyp_dict = vars(wandb.config).get("_items") + + # Workaround: get necessary opt args + opt = parse_opt(known=True) + opt.batch_size = hyp_dict.get("batch_size") + opt.save_dir = str(increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok or opt.evolve)) + opt.epochs = hyp_dict.get("epochs") + opt.nosave = True + opt.data = hyp_dict.get("data") + opt.weights = str(opt.weights) + opt.cfg = str(opt.cfg) + opt.data = str(opt.data) + opt.hyp = str(opt.hyp) + opt.project = str(opt.project) + device = select_device(opt.device, batch_size=opt.batch_size) + + # train + train(hyp_dict, opt, device, callbacks=Callbacks()) + + +if __name__ == "__main__": + sweep() diff --git a/utils/loggers/wandb/sweep.yaml b/utils/loggers/wandb/sweep.yaml new file mode 100644 index 000000000000..688b1ea0285f --- /dev/null +++ b/utils/loggers/wandb/sweep.yaml @@ -0,0 +1,143 @@ +# Hyperparameters for training +# To set range- +# Provide min and max values as: +# parameter: +# +# min: scalar +# max: scalar +# OR +# +# Set a specific list of search space- +# parameter: +# values: [scalar1, scalar2, scalar3...] +# +# You can use grid, bayesian and hyperopt search strategy +# For more info on configuring sweeps visit - https://docs.wandb.ai/guides/sweeps/configuration + +program: utils/loggers/wandb/sweep.py +method: random +metric: + name: metrics/mAP_0.5 + goal: maximize + +parameters: + # hyperparameters: set either min, max range or values list + data: + value: "data/coco128.yaml" + batch_size: + values: [64] + epochs: + values: [10] + + lr0: + distribution: uniform + min: 1e-5 + max: 1e-1 + lrf: + distribution: uniform + min: 0.01 + max: 1.0 + momentum: + distribution: uniform + min: 0.6 + max: 0.98 + weight_decay: + distribution: uniform + min: 0.0 + max: 0.001 + warmup_epochs: + distribution: uniform + min: 0.0 + max: 5.0 + warmup_momentum: + distribution: uniform + min: 0.0 + max: 0.95 + warmup_bias_lr: + distribution: uniform + min: 0.0 + max: 0.2 + box: + distribution: uniform + min: 0.02 + max: 0.2 + cls: + distribution: uniform + min: 0.2 + max: 4.0 + cls_pw: + distribution: uniform + min: 0.5 + max: 2.0 + obj: + distribution: uniform + min: 0.2 + max: 4.0 + obj_pw: + distribution: uniform + min: 0.5 + max: 2.0 + iou_t: + distribution: uniform + min: 0.1 + max: 0.7 + anchor_t: + distribution: uniform + min: 2.0 + max: 8.0 + fl_gamma: + distribution: uniform + min: 0.0 + max: 4.0 + hsv_h: + distribution: uniform + min: 0.0 + max: 0.1 + hsv_s: + distribution: uniform + min: 0.0 + max: 0.9 + hsv_v: + distribution: uniform + min: 0.0 + max: 0.9 + degrees: + distribution: uniform + min: 0.0 + max: 45.0 + translate: + distribution: uniform + min: 0.0 + max: 0.9 + scale: + distribution: uniform + min: 0.0 + max: 0.9 + shear: + distribution: uniform + min: 0.0 + max: 10.0 + perspective: + distribution: uniform + min: 0.0 + max: 0.001 + flipud: + distribution: uniform + min: 0.0 + max: 1.0 + fliplr: + distribution: uniform + min: 0.0 + max: 1.0 + mosaic: + distribution: uniform + min: 0.0 + max: 1.0 + mixup: + distribution: uniform + min: 0.0 + max: 1.0 + copy_paste: + distribution: uniform + min: 0.0 + max: 1.0 diff --git a/utils/loggers/wandb/wandb_utils.py b/utils/loggers/wandb/wandb_utils.py new file mode 100644 index 000000000000..a2c7102bce14 --- /dev/null +++ b/utils/loggers/wandb/wandb_utils.py @@ -0,0 +1,566 @@ +"""Utilities and tools for tracking runs with Weights & Biases.""" + +import logging +import os +import sys +from contextlib import contextmanager +from pathlib import Path +from typing import Dict + +import yaml +from tqdm import tqdm + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[3] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH + +from utils.datasets import LoadImagesAndLabels, img2label_paths +from utils.general import LOGGER, check_dataset, check_file + +try: + import wandb + + assert hasattr(wandb, '__version__') # verify package import not local dir +except (ImportError, AssertionError): + wandb = None + +RANK = int(os.getenv('RANK', -1)) +WANDB_ARTIFACT_PREFIX = 'wandb-artifact://' + + +def remove_prefix(from_string, prefix=WANDB_ARTIFACT_PREFIX): + return from_string[len(prefix):] + + +def check_wandb_config_file(data_config_file): + wandb_config = '_wandb.'.join(data_config_file.rsplit('.', 1)) # updated data.yaml path + if Path(wandb_config).is_file(): + return wandb_config + return data_config_file + + +def check_wandb_dataset(data_file): + is_trainset_wandb_artifact = False + is_valset_wandb_artifact = False + if check_file(data_file) and data_file.endswith('.yaml'): + with open(data_file, errors='ignore') as f: + data_dict = yaml.safe_load(f) + is_trainset_wandb_artifact = (isinstance(data_dict['train'], str) and + data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX)) + is_valset_wandb_artifact = (isinstance(data_dict['val'], str) and + data_dict['val'].startswith(WANDB_ARTIFACT_PREFIX)) + if is_trainset_wandb_artifact or is_valset_wandb_artifact: + return data_dict + else: + return check_dataset(data_file) + + +def get_run_info(run_path): + run_path = Path(remove_prefix(run_path, WANDB_ARTIFACT_PREFIX)) + run_id = run_path.stem + project = run_path.parent.stem + entity = run_path.parent.parent.stem + model_artifact_name = 'run_' + run_id + '_model' + return entity, project, run_id, model_artifact_name + + +def check_wandb_resume(opt): + process_wandb_config_ddp_mode(opt) if RANK not in [-1, 0] else None + if isinstance(opt.resume, str): + if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): + if RANK not in [-1, 0]: # For resuming DDP runs + entity, project, run_id, model_artifact_name = get_run_info(opt.resume) + api = wandb.Api() + artifact = api.artifact(entity + '/' + project + '/' + model_artifact_name + ':latest') + modeldir = artifact.download() + opt.weights = str(Path(modeldir) / "last.pt") + return True + return None + + +def process_wandb_config_ddp_mode(opt): + with open(check_file(opt.data), errors='ignore') as f: + data_dict = yaml.safe_load(f) # data dict + train_dir, val_dir = None, None + if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX): + api = wandb.Api() + train_artifact = api.artifact(remove_prefix(data_dict['train']) + ':' + opt.artifact_alias) + train_dir = train_artifact.download() + train_path = Path(train_dir) / 'data/images/' + data_dict['train'] = str(train_path) + + if isinstance(data_dict['val'], str) and data_dict['val'].startswith(WANDB_ARTIFACT_PREFIX): + api = wandb.Api() + val_artifact = api.artifact(remove_prefix(data_dict['val']) + ':' + opt.artifact_alias) + val_dir = val_artifact.download() + val_path = Path(val_dir) / 'data/images/' + data_dict['val'] = str(val_path) + if train_dir or val_dir: + ddp_data_path = str(Path(val_dir) / 'wandb_local_data.yaml') + with open(ddp_data_path, 'w') as f: + yaml.safe_dump(data_dict, f) + opt.data = ddp_data_path + + +class WandbLogger(): + """Log training runs, datasets, models, and predictions to Weights & Biases. + + This logger sends information to W&B at wandb.ai. By default, this information + includes hyperparameters, system configuration and metrics, model metrics, + and basic data metrics and analyses. + + By providing additional command line arguments to train.py, datasets, + models and predictions can also be logged. + + For more on how this logger is used, see the Weights & Biases documentation: + https://docs.wandb.com/guides/integrations/yolov5 + """ + + def __init__(self, opt, run_id=None, job_type='Training'): + """ + - Initialize WandbLogger instance + - Upload dataset if opt.upload_dataset is True + - Setup trainig processes if job_type is 'Training' + + arguments: + opt (namespace) -- Commandline arguments for this run + run_id (str) -- Run ID of W&B run to be resumed + job_type (str) -- To set the job_type for this run + + """ + # Pre-training routine -- + self.job_type = job_type + self.wandb, self.wandb_run = wandb, None if not wandb else wandb.run + self.val_artifact, self.train_artifact = None, None + self.train_artifact_path, self.val_artifact_path = None, None + self.result_artifact = None + self.val_table, self.result_table = None, None + self.bbox_media_panel_images = [] + self.val_table_path_map = None + self.max_imgs_to_log = 16 + self.wandb_artifact_data_dict = None + self.data_dict = None + # It's more elegant to stick to 1 wandb.init call, + # but useful config data is overwritten in the WandbLogger's wandb.init call + if isinstance(opt.resume, str): # checks resume from artifact + if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): + entity, project, run_id, model_artifact_name = get_run_info(opt.resume) + model_artifact_name = WANDB_ARTIFACT_PREFIX + model_artifact_name + assert wandb, 'install wandb to resume wandb runs' + # Resume wandb-artifact:// runs here| workaround for not overwriting wandb.config + self.wandb_run = wandb.init(id=run_id, + project=project, + entity=entity, + resume='allow', + allow_val_change=True) + opt.resume = model_artifact_name + elif self.wandb: + self.wandb_run = wandb.init(config=opt, + resume="allow", + project='YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem, + entity=opt.entity, + name=opt.name if opt.name != 'exp' else None, + job_type=job_type, + id=run_id, + allow_val_change=True) if not wandb.run else wandb.run + if self.wandb_run: + if self.job_type == 'Training': + if opt.upload_dataset: + if not opt.resume: + self.wandb_artifact_data_dict = self.check_and_upload_dataset(opt) + self.wandb_run.config.update({ + 'opt': vars(opt), + 'data_dict': self.wandb_artifact_data_dict + }, allow_val_change=True) + + if opt.resume: + # resume from artifact + if isinstance(opt.resume, str) and opt.resume.startswith(WANDB_ARTIFACT_PREFIX): + self.data_dict = dict(self.wandb_run.config.data_dict) + else: # local resume + self.data_dict = check_wandb_dataset(opt.data) + else: + self.data_dict = check_wandb_dataset(opt.data) + self.wandb_artifact_data_dict = self.wandb_artifact_data_dict or self.data_dict + + # write data_dict to config. useful for resuming from artifacts. Do this only when not resuming. + self.wandb_run.config.update({'data_dict': self.wandb_artifact_data_dict}, + allow_val_change=True) + self.setup_training(opt) + + if self.job_type == 'Dataset Creation': + self.wandb_run.config.update({"upload_dataset": True}) + self.data_dict = self.check_and_upload_dataset(opt) + + def check_and_upload_dataset(self, opt): + """ + Check if the dataset format is compatible and upload it as W&B artifact + + arguments: + opt (namespace)-- Commandline arguments for current run + + returns: + Updated dataset info dictionary where local dataset paths are replaced by WAND_ARFACT_PREFIX links. + """ + assert wandb, 'Install wandb to upload dataset' + config_path = self.log_dataset_artifact(opt.data, + opt.single_cls, + 'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem) + with open(config_path, errors='ignore') as f: + wandb_data_dict = yaml.safe_load(f) + return wandb_data_dict + + def setup_training(self, opt): + """ + Setup the necessary processes for training YOLO models: + - Attempt to download model checkpoint and dataset artifacts if opt.resume stats with WANDB_ARTIFACT_PREFIX + - Update data_dict, to contain info of previous run if resumed and the paths of dataset artifact if downloaded + - Setup log_dict, initialize bbox_interval + + arguments: + opt (namespace) -- commandline arguments for this run + + """ + self.log_dict, self.current_epoch = {}, 0 + self.bbox_interval = opt.bbox_interval + if isinstance(opt.resume, str): + modeldir, _ = self.download_model_artifact(opt) + if modeldir: + self.weights = Path(modeldir) / "last.pt" + config = self.wandb_run.config + opt.weights, opt.save_period, opt.batch_size, opt.bbox_interval, opt.epochs, opt.hyp, opt.imgsz = str( + self.weights), config.save_period, config.batch_size, config.bbox_interval, config.epochs,\ + config.hyp, config.imgsz + data_dict = self.data_dict + if self.val_artifact is None: # If --upload_dataset is set, use the existing artifact, don't download + self.train_artifact_path, self.train_artifact = self.download_dataset_artifact(data_dict.get('train'), + opt.artifact_alias) + self.val_artifact_path, self.val_artifact = self.download_dataset_artifact(data_dict.get('val'), + opt.artifact_alias) + + if self.train_artifact_path is not None: + train_path = Path(self.train_artifact_path) / 'data/images/' + data_dict['train'] = str(train_path) + if self.val_artifact_path is not None: + val_path = Path(self.val_artifact_path) / 'data/images/' + data_dict['val'] = str(val_path) + + if self.val_artifact is not None: + self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation") + columns = ["epoch", "id", "ground truth", "prediction"] + columns.extend(self.data_dict['names']) + self.result_table = wandb.Table(columns) + self.val_table = self.val_artifact.get("val") + if self.val_table_path_map is None: + self.map_val_table_path() + if opt.bbox_interval == -1: + self.bbox_interval = opt.bbox_interval = (opt.epochs // 10) if opt.epochs > 10 else 1 + if opt.evolve: + self.bbox_interval = opt.bbox_interval = opt.epochs + 1 + train_from_artifact = self.train_artifact_path is not None and self.val_artifact_path is not None + # Update the the data_dict to point to local artifacts dir + if train_from_artifact: + self.data_dict = data_dict + + def download_dataset_artifact(self, path, alias): + """ + download the model checkpoint artifact if the path starts with WANDB_ARTIFACT_PREFIX + + arguments: + path -- path of the dataset to be used for training + alias (str)-- alias of the artifact to be download/used for training + + returns: + (str, wandb.Artifact) -- path of the downladed dataset and it's corresponding artifact object if dataset + is found otherwise returns (None, None) + """ + if isinstance(path, str) and path.startswith(WANDB_ARTIFACT_PREFIX): + artifact_path = Path(remove_prefix(path, WANDB_ARTIFACT_PREFIX) + ":" + alias) + dataset_artifact = wandb.use_artifact(artifact_path.as_posix().replace("\\", "/")) + assert dataset_artifact is not None, "'Error: W&B dataset artifact doesn\'t exist'" + datadir = dataset_artifact.download() + return datadir, dataset_artifact + return None, None + + def download_model_artifact(self, opt): + """ + download the model checkpoint artifact if the resume path starts with WANDB_ARTIFACT_PREFIX + + arguments: + opt (namespace) -- Commandline arguments for this run + """ + if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): + model_artifact = wandb.use_artifact(remove_prefix(opt.resume, WANDB_ARTIFACT_PREFIX) + ":latest") + assert model_artifact is not None, 'Error: W&B model artifact doesn\'t exist' + modeldir = model_artifact.download() + # epochs_trained = model_artifact.metadata.get('epochs_trained') + total_epochs = model_artifact.metadata.get('total_epochs') + is_finished = total_epochs is None + assert not is_finished, 'training is finished, can only resume incomplete runs.' + return modeldir, model_artifact + return None, None + + def log_model(self, path, opt, epoch, fitness_score, best_model=False): + """ + Log the model checkpoint as W&B artifact + + arguments: + path (Path) -- Path of directory containing the checkpoints + opt (namespace) -- Command line arguments for this run + epoch (int) -- Current epoch number + fitness_score (float) -- fitness score for current epoch + best_model (boolean) -- Boolean representing if the current checkpoint is the best yet. + """ + model_artifact = wandb.Artifact('run_' + wandb.run.id + '_model', type='model', metadata={ + 'original_url': str(path), + 'epochs_trained': epoch + 1, + 'save period': opt.save_period, + 'project': opt.project, + 'total_epochs': opt.epochs, + 'fitness_score': fitness_score + }) + model_artifact.add_file(str(path / 'last.pt'), name='last.pt') + wandb.log_artifact(model_artifact, + aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), 'best' if best_model else '']) + LOGGER.info(f"Saving model artifact on epoch {epoch + 1}") + + def log_dataset_artifact(self, data_file, single_cls, project, overwrite_config=False): + """ + Log the dataset as W&B artifact and return the new data file with W&B links + + arguments: + data_file (str) -- the .yaml file with information about the dataset like - path, classes etc. + single_class (boolean) -- train multi-class data as single-class + project (str) -- project name. Used to construct the artifact path + overwrite_config (boolean) -- overwrites the data.yaml file if set to true otherwise creates a new + file with _wandb postfix. Eg -> data_wandb.yaml + + returns: + the new .yaml file with artifact links. it can be used to start training directly from artifacts + """ + upload_dataset = self.wandb_run.config.upload_dataset + log_val_only = isinstance(upload_dataset, str) and upload_dataset == 'val' + self.data_dict = check_dataset(data_file) # parse and check + data = dict(self.data_dict) + nc, names = (1, ['item']) if single_cls else (int(data['nc']), data['names']) + names = {k: v for k, v in enumerate(names)} # to index dictionary + + # log train set + if not log_val_only: + self.train_artifact = self.create_dataset_table(LoadImagesAndLabels( + data['train'], rect=True, batch_size=1), names, name='train') if data.get('train') else None + if data.get('train'): + data['train'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'train') + + self.val_artifact = self.create_dataset_table(LoadImagesAndLabels( + data['val'], rect=True, batch_size=1), names, name='val') if data.get('val') else None + if data.get('val'): + data['val'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'val') + + path = Path(data_file) + # create a _wandb.yaml file with artifacts links if both train and test set are logged + if not log_val_only: + path = (path.stem if overwrite_config else path.stem + '_wandb') + '.yaml' # updated data.yaml path + path = ROOT / 'data' / path + data.pop('download', None) + data.pop('path', None) + with open(path, 'w') as f: + yaml.safe_dump(data, f) + LOGGER.info(f"Created dataset config file {path}") + + if self.job_type == 'Training': # builds correct artifact pipeline graph + if not log_val_only: + self.wandb_run.log_artifact( + self.train_artifact) # calling use_artifact downloads the dataset. NOT NEEDED! + self.wandb_run.use_artifact(self.val_artifact) + self.val_artifact.wait() + self.val_table = self.val_artifact.get('val') + self.map_val_table_path() + else: + self.wandb_run.log_artifact(self.train_artifact) + self.wandb_run.log_artifact(self.val_artifact) + return path + + def map_val_table_path(self): + """ + Map the validation dataset Table like name of file -> it's id in the W&B Table. + Useful for - referencing artifacts for evaluation. + """ + self.val_table_path_map = {} + LOGGER.info("Mapping dataset") + for i, data in enumerate(tqdm(self.val_table.data)): + self.val_table_path_map[data[3]] = data[0] + + def create_dataset_table(self, dataset: LoadImagesAndLabels, class_to_id: Dict[int, str], name: str = 'dataset'): + """ + Create and return W&B artifact containing W&B Table of the dataset. + + arguments: + dataset -- instance of LoadImagesAndLabels class used to iterate over the data to build Table + class_to_id -- hash map that maps class ids to labels + name -- name of the artifact + + returns: + dataset artifact to be logged or used + """ + # TODO: Explore multiprocessing to slpit this loop parallely| This is essential for speeding up the the logging + artifact = wandb.Artifact(name=name, type="dataset") + img_files = tqdm([dataset.path]) if isinstance(dataset.path, str) and Path(dataset.path).is_dir() else None + img_files = tqdm(dataset.im_files) if not img_files else img_files + for img_file in img_files: + if Path(img_file).is_dir(): + artifact.add_dir(img_file, name='data/images') + labels_path = 'labels'.join(dataset.path.rsplit('images', 1)) + artifact.add_dir(labels_path, name='data/labels') + else: + artifact.add_file(img_file, name='data/images/' + Path(img_file).name) + label_file = Path(img2label_paths([img_file])[0]) + artifact.add_file(str(label_file), + name='data/labels/' + label_file.name) if label_file.exists() else None + table = wandb.Table(columns=["id", "train_image", "Classes", "name"]) + class_set = wandb.Classes([{'id': id, 'name': name} for id, name in class_to_id.items()]) + for si, (img, labels, paths, shapes) in enumerate(tqdm(dataset)): + box_data, img_classes = [], {} + for cls, *xywh in labels[:, 1:].tolist(): + cls = int(cls) + box_data.append({"position": {"middle": [xywh[0], xywh[1]], "width": xywh[2], "height": xywh[3]}, + "class_id": cls, + "box_caption": "%s" % (class_to_id[cls])}) + img_classes[cls] = class_to_id[cls] + boxes = {"ground_truth": {"box_data": box_data, "class_labels": class_to_id}} # inference-space + table.add_data(si, wandb.Image(paths, classes=class_set, boxes=boxes), list(img_classes.values()), + Path(paths).name) + artifact.add(table, name) + return artifact + + def log_training_progress(self, predn, path, names): + """ + Build evaluation Table. Uses reference from validation dataset table. + + arguments: + predn (list): list of predictions in the native space in the format - [xmin, ymin, xmax, ymax, confidence, class] + path (str): local path of the current evaluation image + names (dict(int, str)): hash map that maps class ids to labels + """ + class_set = wandb.Classes([{'id': id, 'name': name} for id, name in names.items()]) + box_data = [] + avg_conf_per_class = [0] * len(self.data_dict['names']) + pred_class_count = {} + for *xyxy, conf, cls in predn.tolist(): + if conf >= 0.25: + cls = int(cls) + box_data.append( + {"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]}, + "class_id": cls, + "box_caption": f"{names[cls]} {conf:.3f}", + "scores": {"class_score": conf}, + "domain": "pixel"}) + avg_conf_per_class[cls] += conf + + if cls in pred_class_count: + pred_class_count[cls] += 1 + else: + pred_class_count[cls] = 1 + + for pred_class in pred_class_count.keys(): + avg_conf_per_class[pred_class] = avg_conf_per_class[pred_class] / pred_class_count[pred_class] + + boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space + id = self.val_table_path_map[Path(path).name] + self.result_table.add_data(self.current_epoch, + id, + self.val_table.data[id][1], + wandb.Image(self.val_table.data[id][1], boxes=boxes, classes=class_set), + *avg_conf_per_class + ) + + def val_one_image(self, pred, predn, path, names, im): + """ + Log validation data for one image. updates the result Table if validation dataset is uploaded and log bbox media panel + + arguments: + pred (list): list of scaled predictions in the format - [xmin, ymin, xmax, ymax, confidence, class] + predn (list): list of predictions in the native space - [xmin, ymin, xmax, ymax, confidence, class] + path (str): local path of the current evaluation image + """ + if self.val_table and self.result_table: # Log Table if Val dataset is uploaded as artifact + self.log_training_progress(predn, path, names) + + if len(self.bbox_media_panel_images) < self.max_imgs_to_log and self.current_epoch > 0: + if self.current_epoch % self.bbox_interval == 0: + box_data = [{"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]}, + "class_id": int(cls), + "box_caption": f"{names[int(cls)]} {conf:.3f}", + "scores": {"class_score": conf}, + "domain": "pixel"} for *xyxy, conf, cls in pred.tolist()] + boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space + self.bbox_media_panel_images.append(wandb.Image(im, boxes=boxes, caption=path.name)) + + def log(self, log_dict): + """ + save the metrics to the logging dictionary + + arguments: + log_dict (Dict) -- metrics/media to be logged in current step + """ + if self.wandb_run: + for key, value in log_dict.items(): + self.log_dict[key] = value + + def end_epoch(self, best_result=False): + """ + commit the log_dict, model artifacts and Tables to W&B and flush the log_dict. + + arguments: + best_result (boolean): Boolean representing if the result of this evaluation is best or not + """ + if self.wandb_run: + with all_logging_disabled(): + if self.bbox_media_panel_images: + self.log_dict["BoundingBoxDebugger"] = self.bbox_media_panel_images + try: + wandb.log(self.log_dict) + except BaseException as e: + LOGGER.info( + f"An error occurred in wandb logger. The training will proceed without interruption. More info\n{e}") + self.wandb_run.finish() + self.wandb_run = None + + self.log_dict = {} + self.bbox_media_panel_images = [] + if self.result_artifact: + self.result_artifact.add(self.result_table, 'result') + wandb.log_artifact(self.result_artifact, aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), + ('best' if best_result else '')]) + + wandb.log({"evaluation": self.result_table}) + columns = ["epoch", "id", "ground truth", "prediction"] + columns.extend(self.data_dict['names']) + self.result_table = wandb.Table(columns) + self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation") + + def finish_run(self): + """ + Log metrics if any and finish the current W&B run + """ + if self.wandb_run: + if self.log_dict: + with all_logging_disabled(): + wandb.log(self.log_dict) + wandb.run.finish() + + +@contextmanager +def all_logging_disabled(highest_level=logging.CRITICAL): + """ source - https://gist.github.com/simon-weber/7853144 + A context manager that will prevent any logging messages triggered during the body from being processed. + :param highest_level: the maximum logging level in use. + This would only need to be changed if a custom level greater than CRITICAL is defined. + """ + previous_level = logging.root.manager.disable + logging.disable(highest_level) + try: + yield + finally: + logging.disable(previous_level) diff --git a/utils/loss.py b/utils/loss.py index 9e78df17fdf3..bf9b592d4ad2 100644 --- a/utils/loss.py +++ b/utils/loss.py @@ -1,10 +1,13 @@ -# Loss functions +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Loss functions +""" import torch import torch.nn as nn -from utils.general import bbox_iou -from utils.torch_utils import is_parallel +from utils.metrics import bbox_iou +from utils.torch_utils import de_parallel def smooth_BCE(eps=0.1): # https://github.com/ultralytics/yolov3/issues/238#issuecomment-598028441 @@ -15,7 +18,7 @@ def smooth_BCE(eps=0.1): # https://github.com/ultralytics/yolov3/issues/238#iss class BCEBlurWithLogitsLoss(nn.Module): # BCEwithLogitLoss() with reduced missing label effects. def __init__(self, alpha=0.05): - super(BCEBlurWithLogitsLoss, self).__init__() + super().__init__() self.loss_fcn = nn.BCEWithLogitsLoss(reduction='none') # must be nn.BCEWithLogitsLoss() self.alpha = alpha @@ -32,7 +35,7 @@ def forward(self, pred, true): class FocalLoss(nn.Module): # Wraps focal loss around existing loss_fcn(), i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=1.5) def __init__(self, loss_fcn, gamma=1.5, alpha=0.25): - super(FocalLoss, self).__init__() + super().__init__() self.loss_fcn = loss_fcn # must be nn.BCEWithLogitsLoss() self.gamma = gamma self.alpha = alpha @@ -62,7 +65,7 @@ def forward(self, pred, true): class QFocalLoss(nn.Module): # Wraps Quality focal loss around existing loss_fcn(), i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=1.5) def __init__(self, loss_fcn, gamma=1.5, alpha=0.25): - super(QFocalLoss, self).__init__() + super().__init__() self.loss_fcn = loss_fcn # must be nn.BCEWithLogitsLoss() self.gamma = gamma self.alpha = alpha @@ -86,9 +89,10 @@ def forward(self, pred, true): class ComputeLoss: + sort_obj_iou = False + # Compute losses def __init__(self, model, autobalance=False): - super(ComputeLoss, self).__init__() device = next(model.parameters()).device # get model device h = model.hyp # hyperparameters @@ -104,42 +108,53 @@ def __init__(self, model, autobalance=False): if g > 0: BCEcls, BCEobj = FocalLoss(BCEcls, g), FocalLoss(BCEobj, g) - det = model.module.model[-1] if is_parallel(model) else model.model[-1] # Detect() module - self.balance = {3: [4.0, 1.0, 0.4]}.get(det.nl, [4.0, 1.0, 0.25, 0.06, .02]) # P3-P7 - self.ssi = list(det.stride).index(16) if autobalance else 0 # stride 16 index - self.BCEcls, self.BCEobj, self.gr, self.hyp, self.autobalance = BCEcls, BCEobj, model.gr, h, autobalance - for k in 'na', 'nc', 'nl', 'anchors': - setattr(self, k, getattr(det, k)) - - def __call__(self, p, targets): # predictions, targets, model - device = targets.device - lcls, lbox, lobj = torch.zeros(1, device=device), torch.zeros(1, device=device), torch.zeros(1, device=device) + m = de_parallel(model).model[-1] # Detect() module + self.balance = {3: [4.0, 1.0, 0.4]}.get(m.nl, [4.0, 1.0, 0.25, 0.06, 0.02]) # P3-P7 + self.ssi = list(m.stride).index(16) if autobalance else 0 # stride 16 index + self.BCEcls, self.BCEobj, self.gr, self.hyp, self.autobalance = BCEcls, BCEobj, 1.0, h, autobalance + self.na = m.na # number of anchors + self.nc = m.nc # number of classes + self.nl = m.nl # number of layers + self.anchors = m.anchors + self.device = device + + def __call__(self, p, targets): # predictions, targets + lcls = torch.zeros(1, device=self.device) # class loss + lbox = torch.zeros(1, device=self.device) # box loss + lobj = torch.zeros(1, device=self.device) # object loss tcls, tbox, indices, anchors = self.build_targets(p, targets) # targets # Losses for i, pi in enumerate(p): # layer index, layer predictions b, a, gj, gi = indices[i] # image, anchor, gridy, gridx - tobj = torch.zeros_like(pi[..., 0], device=device) # target obj + tobj = torch.zeros(pi.shape[:4], dtype=pi.dtype, device=self.device) # target obj n = b.shape[0] # number of targets if n: - ps = pi[b, a, gj, gi] # prediction subset corresponding to targets + # pxy, pwh, _, pcls = pi[b, a, gj, gi].tensor_split((2, 4, 5), dim=1) # faster, requires torch 1.8.0 + pxy, pwh, _, pcls = pi[b, a, gj, gi].split((2, 2, 1, self.nc), 1) # target-subset of predictions # Regression - pxy = ps[:, :2].sigmoid() * 2. - 0.5 - pwh = (ps[:, 2:4].sigmoid() * 2) ** 2 * anchors[i] + pxy = pxy.sigmoid() * 2 - 0.5 + pwh = (pwh.sigmoid() * 2) ** 2 * anchors[i] pbox = torch.cat((pxy, pwh), 1) # predicted box iou = bbox_iou(pbox.T, tbox[i], x1y1x2y2=False, CIoU=True) # iou(prediction, target) lbox += (1.0 - iou).mean() # iou loss # Objectness - tobj[b, a, gj, gi] = (1.0 - self.gr) + self.gr * iou.detach().clamp(0).type(tobj.dtype) # iou ratio + iou = iou.detach().clamp(0).type(tobj.dtype) + if self.sort_obj_iou: + j = iou.argsort() + b, a, gj, gi, iou = b[j], a[j], gj[j], gi[j], iou[j] + if self.gr < 1: + iou = (1.0 - self.gr) + self.gr * iou + tobj[b, a, gj, gi] = iou # iou ratio # Classification if self.nc > 1: # cls loss (only if multiple classes) - t = torch.full_like(ps[:, 5:], self.cn, device=device) # targets + t = torch.full_like(pcls, self.cn, device=self.device) # targets t[range(n), tcls[i]] = self.cp - lcls += self.BCEcls(ps[:, 5:], t) # BCE + lcls += self.BCEcls(pcls, t) # BCE # Append targets to text file # with open('targets.txt', 'a') as file: @@ -157,22 +172,21 @@ def __call__(self, p, targets): # predictions, targets, model lcls *= self.hyp['cls'] bs = tobj.shape[0] # batch size - loss = lbox + lobj + lcls - return loss * bs, torch.cat((lbox, lobj, lcls, loss)).detach() + return (lbox + lobj + lcls) * bs, torch.cat((lbox, lobj, lcls)).detach() def build_targets(self, p, targets): # Build targets for compute_loss(), input targets(image,class,x,y,w,h) na, nt = self.na, targets.shape[0] # number of anchors, targets tcls, tbox, indices, anch = [], [], [], [] - gain = torch.ones(7, device=targets.device) # normalized to gridspace gain - ai = torch.arange(na, device=targets.device).float().view(na, 1).repeat(1, nt) # same as .repeat_interleave(nt) + gain = torch.ones(7, device=self.device) # normalized to gridspace gain + ai = torch.arange(na, device=self.device).float().view(na, 1).repeat(1, nt) # same as .repeat_interleave(nt) targets = torch.cat((targets.repeat(na, 1, 1), ai[:, :, None]), 2) # append anchor indices g = 0.5 # bias off = torch.tensor([[0, 0], [1, 0], [0, 1], [-1, 0], [0, -1], # j,k,l,m # [1, 1], [1, -1], [-1, 1], [-1, -1], # jk,jm,lk,lm - ], device=targets.device).float() * g # offsets + ], device=self.device).float() * g # offsets for i in range(self.nl): anchors = self.anchors[i] @@ -183,15 +197,15 @@ def build_targets(self, p, targets): if nt: # Matches r = t[:, :, 4:6] / anchors[:, None] # wh ratio - j = torch.max(r, 1. / r).max(2)[0] < self.hyp['anchor_t'] # compare + j = torch.max(r, 1 / r).max(2)[0] < self.hyp['anchor_t'] # compare # j = wh_iou(anchors, t[:, 4:6]) > model.hyp['iou_t'] # iou(3,n)=wh_iou(anchors(3,2), gwh(n,2)) t = t[j] # filter # Offsets gxy = t[:, 2:4] # grid xy gxi = gain[[2, 3]] - gxy # inverse - j, k = ((gxy % 1. < g) & (gxy > 1.)).T - l, m = ((gxi % 1. < g) & (gxi > 1.)).T + j, k = ((gxy % 1 < g) & (gxy > 1)).T + l, m = ((gxi % 1 < g) & (gxi > 1)).T j = torch.stack((torch.ones_like(j), j, k, l, m)) t = t.repeat((5, 1, 1))[j] offsets = (torch.zeros_like(gxy)[None] + off[:, None])[j] @@ -200,14 +214,12 @@ def build_targets(self, p, targets): offsets = 0 # Define - b, c = t[:, :2].long().T # image, class - gxy = t[:, 2:4] # grid xy - gwh = t[:, 4:6] # grid wh + bc, gxy, gwh, a = t.unsafe_chunk(4, dim=1) # (image, class), grid xy, grid wh, anchors + a, (b, c) = a.long().view(-1), bc.long().T # anchors, image, class gij = (gxy - offsets).long() - gi, gj = gij.T # grid xy indices + gi, gj = gij.T # grid indices # Append - a = t[:, 6].long() # anchor indices indices.append((b, a, gj.clamp_(0, gain[3] - 1), gi.clamp_(0, gain[2] - 1))) # image, anchor, grid indices tbox.append(torch.cat((gxy - gij, gwh), 1)) # box anch.append(anchors[a]) # anchors diff --git a/utils/metrics.py b/utils/metrics.py index 323c84b6c873..857fa5d81f91 100644 --- a/utils/metrics.py +++ b/utils/metrics.py @@ -1,13 +1,16 @@ -# Model validation metrics +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Model validation metrics +""" +import math +import warnings from pathlib import Path import matplotlib.pyplot as plt import numpy as np import torch -from . import general - def fitness(x): # Model fitness as a weighted combination of metrics @@ -15,7 +18,7 @@ def fitness(x): return (x[:, :4] * w).sum(1) -def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=()): +def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names=(), eps=1e-16): """ Compute the average precision, given the recall and precision curves. Source: https://github.com/rafaelpadilla/Object-Detection-Metrics. # Arguments @@ -34,7 +37,7 @@ def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names tp, conf, pred_cls = tp[i], conf[i], pred_cls[i] # Find unique classes - unique_classes = np.unique(target_cls) + unique_classes, nt = np.unique(target_cls, return_counts=True) nc = unique_classes.shape[0] # number of classes, number of detections # Create Precision-Recall curve and compute AP for each class @@ -42,7 +45,7 @@ def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names ap, p, r = np.zeros((nc, tp.shape[1])), np.zeros((nc, 1000)), np.zeros((nc, 1000)) for ci, c in enumerate(unique_classes): i = pred_cls == c - n_l = (target_cls == c).sum() # number of labels + n_l = nt[ci] # number of labels n_p = i.sum() # number of predictions if n_p == 0 or n_l == 0: @@ -53,7 +56,7 @@ def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names tpc = tp[i].cumsum(0) # Recall - recall = tpc / (n_l + 1e-16) # recall curve + recall = tpc / (n_l + eps) # recall curve r[ci] = np.interp(-px, -conf[i], recall[:, 0], left=0) # negative x, xp because xp decreases # Precision @@ -67,7 +70,9 @@ def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names py.append(np.interp(px, mrec, mpre)) # precision at mAP@0.5 # Compute F1 (harmonic mean of precision and recall) - f1 = 2 * p * r / (p + r + 1e-16) + f1 = 2 * p * r / (p + r + eps) + names = [v for k, v in names.items() if k in unique_classes] # list: only classes that have data + names = {i: v for i, v in enumerate(names)} # to dict if plot: plot_pr_curve(px, py, ap, Path(save_dir) / 'PR_curve.png', names) plot_mc_curve(px, f1, Path(save_dir) / 'F1_curve.png', names, ylabel='F1') @@ -75,7 +80,10 @@ def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, save_dir='.', names plot_mc_curve(px, r, Path(save_dir) / 'R_curve.png', names, ylabel='Recall') i = f1.mean(0).argmax() # max F1 index - return p[:, i], r[:, i], ap, f1[:, i], unique_classes.astype('int32') + p, r, f1 = p[:, i], r[:, i], f1[:, i] + tp = (r * nt).round() # true positives + fp = (tp / (p + eps) - tp).round() # false positives + return tp, fp, p, r, f1, ap, unique_classes.astype('int32') def compute_ap(recall, precision): @@ -88,8 +96,8 @@ def compute_ap(recall, precision): """ # Append sentinel values to beginning and end - mrec = np.concatenate(([0.], recall, [recall[-1] + 0.01])) - mpre = np.concatenate(([1.], precision, [0.])) + mrec = np.concatenate(([0.0], recall, [1.0])) + mpre = np.concatenate(([1.0], precision, [0.0])) # Compute the precision envelope mpre = np.flip(np.maximum.accumulate(np.flip(mpre))) @@ -127,7 +135,7 @@ def process_batch(self, detections, labels): detections = detections[detections[:, 4] > self.conf] gt_classes = labels[:, 0].int() detection_classes = detections[:, 5].int() - iou = general.box_iou(labels[:, 1:], detections[:, :4]) + iou = box_iou(labels[:, 1:], detections[:, :4]) x = torch.where(iou > self.iou_thres) if x[0].shape[0]: @@ -157,30 +165,139 @@ def process_batch(self, detections, labels): def matrix(self): return self.matrix - def plot(self, save_dir='', names=()): + def tp_fp(self): + tp = self.matrix.diagonal() # true positives + fp = self.matrix.sum(1) - tp # false positives + # fn = self.matrix.sum(0) - tp # false negatives (missed detections) + return tp[:-1], fp[:-1] # remove background class + + def plot(self, normalize=True, save_dir='', names=()): try: import seaborn as sn - array = self.matrix / (self.matrix.sum(0).reshape(1, self.nc + 1) + 1E-6) # normalize + array = self.matrix / ((self.matrix.sum(0).reshape(1, -1) + 1E-9) if normalize else 1) # normalize columns array[array < 0.005] = np.nan # don't annotate (would appear as 0.00) fig = plt.figure(figsize=(12, 9), tight_layout=True) - sn.set(font_scale=1.0 if self.nc < 50 else 0.8) # for label size - labels = (0 < len(names) < 99) and len(names) == self.nc # apply names to ticklabels - sn.heatmap(array, annot=self.nc < 30, annot_kws={"size": 8}, cmap='Blues', fmt='.2f', square=True, - xticklabels=names + ['background FP'] if labels else "auto", - yticklabels=names + ['background FN'] if labels else "auto").set_facecolor((1, 1, 1)) + nc, nn = self.nc, len(names) # number of classes, names + sn.set(font_scale=1.0 if nc < 50 else 0.8) # for label size + labels = (0 < nn < 99) and (nn == nc) # apply names to ticklabels + with warnings.catch_warnings(): + warnings.simplefilter('ignore') # suppress empty matrix RuntimeWarning: All-NaN slice encountered + sn.heatmap(array, annot=nc < 30, annot_kws={"size": 8}, cmap='Blues', fmt='.2f', square=True, vmin=0.0, + xticklabels=names + ['background FP'] if labels else "auto", + yticklabels=names + ['background FN'] if labels else "auto").set_facecolor((1, 1, 1)) fig.axes[0].set_xlabel('True') fig.axes[0].set_ylabel('Predicted') fig.savefig(Path(save_dir) / 'confusion_matrix.png', dpi=250) + plt.close() except Exception as e: - pass + print(f'WARNING: ConfusionMatrix plot failure: {e}') def print(self): for i in range(self.nc + 1): print(' '.join(map(str, self.matrix[i]))) +def bbox_iou(box1, box2, x1y1x2y2=True, GIoU=False, DIoU=False, CIoU=False, eps=1e-7): + # Returns the IoU of box1 to box2. box1 is 4, box2 is nx4 + box2 = box2.T + + # Get the coordinates of bounding boxes + if x1y1x2y2: # x1, y1, x2, y2 = box1 + b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] + b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] + else: # transform from xywh to xyxy + b1_x1, b1_x2 = box1[0] - box1[2] / 2, box1[0] + box1[2] / 2 + b1_y1, b1_y2 = box1[1] - box1[3] / 2, box1[1] + box1[3] / 2 + b2_x1, b2_x2 = box2[0] - box2[2] / 2, box2[0] + box2[2] / 2 + b2_y1, b2_y2 = box2[1] - box2[3] / 2, box2[1] + box2[3] / 2 + + # Intersection area + inter = (torch.min(b1_x2, b2_x2) - torch.max(b1_x1, b2_x1)).clamp(0) * \ + (torch.min(b1_y2, b2_y2) - torch.max(b1_y1, b2_y1)).clamp(0) + + # Union Area + w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1 + eps + w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1 + eps + union = w1 * h1 + w2 * h2 - inter + eps + + iou = inter / union + if CIoU or DIoU or GIoU: + cw = torch.max(b1_x2, b2_x2) - torch.min(b1_x1, b2_x1) # convex (smallest enclosing box) width + ch = torch.max(b1_y2, b2_y2) - torch.min(b1_y1, b2_y1) # convex height + if CIoU or DIoU: # Distance or Complete IoU https://arxiv.org/abs/1911.08287v1 + c2 = cw ** 2 + ch ** 2 + eps # convex diagonal squared + rho2 = ((b2_x1 + b2_x2 - b1_x1 - b1_x2) ** 2 + + (b2_y1 + b2_y2 - b1_y1 - b1_y2) ** 2) / 4 # center distance squared + if CIoU: # https://github.com/Zzh-tju/DIoU-SSD-pytorch/blob/master/utils/box/box_utils.py#L47 + v = (4 / math.pi ** 2) * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) + with torch.no_grad(): + alpha = v / (v - iou + (1 + eps)) + return iou - (rho2 / c2 + v * alpha) # CIoU + return iou - rho2 / c2 # DIoU + c_area = cw * ch + eps # convex area + return iou - (c_area - union) / c_area # GIoU https://arxiv.org/pdf/1902.09630.pdf + return iou # IoU + + +def box_iou(box1, box2): + # https://github.com/pytorch/vision/blob/master/torchvision/ops/boxes.py + """ + Return intersection-over-union (Jaccard index) of boxes. + Both sets of boxes are expected to be in (x1, y1, x2, y2) format. + Arguments: + box1 (Tensor[N, 4]) + box2 (Tensor[M, 4]) + Returns: + iou (Tensor[N, M]): the NxM matrix containing the pairwise + IoU values for every element in boxes1 and boxes2 + """ + + def box_area(box): + # box = 4xn + return (box[2] - box[0]) * (box[3] - box[1]) + + area1 = box_area(box1.T) + area2 = box_area(box2.T) + + # inter(N,M) = (rb(N,M,2) - lt(N,M,2)).clamp(0).prod(2) + inter = (torch.min(box1[:, None, 2:], box2[:, 2:]) - torch.max(box1[:, None, :2], box2[:, :2])).clamp(0).prod(2) + return inter / (area1[:, None] + area2 - inter) # iou = inter / (area1 + area2 - inter) + + +def bbox_ioa(box1, box2, eps=1E-7): + """ Returns the intersection over box2 area given box1, box2. Boxes are x1y1x2y2 + box1: np.array of shape(4) + box2: np.array of shape(nx4) + returns: np.array of shape(n) + """ + + box2 = box2.transpose() + + # Get the coordinates of bounding boxes + b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] + b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] + + # Intersection area + inter_area = (np.minimum(b1_x2, b2_x2) - np.maximum(b1_x1, b2_x1)).clip(0) * \ + (np.minimum(b1_y2, b2_y2) - np.maximum(b1_y1, b2_y1)).clip(0) + + # box2 area + box2_area = (b2_x2 - b2_x1) * (b2_y2 - b2_y1) + eps + + # Intersection over box2 area + return inter_area / box2_area + + +def wh_iou(wh1, wh2): + # Returns the nxm IoU matrix. wh1 is nx2, wh2 is mx2 + wh1 = wh1[:, None] # [N,1,2] + wh2 = wh2[None] # [1,M,2] + inter = torch.min(wh1, wh2).prod(2) # [N,M] + return inter / (wh1.prod(2) + wh2.prod(2) - inter) # iou = inter / (area1 + area2 - inter) + + # Plots ---------------------------------------------------------------------------------------------------------------- def plot_pr_curve(px, py, ap, save_dir='pr_curve.png', names=()): @@ -201,6 +318,7 @@ def plot_pr_curve(px, py, ap, save_dir='pr_curve.png', names=()): ax.set_ylim(0, 1) plt.legend(bbox_to_anchor=(1.04, 1), loc="upper left") fig.savefig(Path(save_dir), dpi=250) + plt.close() def plot_mc_curve(px, py, save_dir='mc_curve.png', names=(), xlabel='Confidence', ylabel='Metric'): @@ -221,3 +339,4 @@ def plot_mc_curve(px, py, save_dir='mc_curve.png', names=(), xlabel='Confidence' ax.set_ylim(0, 1) plt.legend(bbox_to_anchor=(1.04, 1), loc="upper left") fig.savefig(Path(save_dir), dpi=250) + plt.close() diff --git a/utils/plots.py b/utils/plots.py index 8313ef210f90..a30c0faf962a 100644 --- a/utils/plots.py +++ b/utils/plots.py @@ -1,26 +1,29 @@ -# Plotting utils +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Plotting utils +""" -import glob import math import os -import random from copy import copy from pathlib import Path +from urllib.error import URLError import cv2 import matplotlib import matplotlib.pyplot as plt import numpy as np import pandas as pd -import seaborn as sns +import seaborn as sn import torch -import yaml from PIL import Image, ImageDraw, ImageFont -from utils.general import xywh2xyxy, xyxy2xywh +from utils.general import (CONFIG_DIR, FONT, LOGGER, Timeout, check_font, check_requirements, clip_coords, + increment_path, is_ascii, is_chinese, try_except, xywh2xyxy, xyxy2xywh) from utils.metrics import fitness # Settings +RANK = int(os.getenv('RANK', -1)) matplotlib.rc('font', **{'size': 11}) matplotlib.use('Agg') # for writing to files only @@ -46,6 +49,106 @@ def hex2rgb(h): # rgb order (PIL) colors = Colors() # create instance for 'from utils.plots import colors' +def check_pil_font(font=FONT, size=10): + # Return a PIL TrueType Font, downloading to CONFIG_DIR if necessary + font = Path(font) + font = font if font.exists() else (CONFIG_DIR / font.name) + try: + return ImageFont.truetype(str(font) if font.exists() else font.name, size) + except Exception: # download if missing + try: + check_font(font) + return ImageFont.truetype(str(font), size) + except TypeError: + check_requirements('Pillow>=8.4.0') # known issue https://github.com/ultralytics/yolov5/issues/5374 + except URLError: # not online + return ImageFont.load_default() + + +class Annotator: + if RANK in (-1, 0): + check_pil_font() # download TTF if necessary + + # YOLOv5 Annotator for train/val mosaics and jpgs and detect/hub inference annotations + def __init__(self, im, line_width=None, font_size=None, font='Arial.ttf', pil=False, example='abc'): + assert im.data.contiguous, 'Image not contiguous. Apply np.ascontiguousarray(im) to Annotator() input images.' + self.pil = pil or not is_ascii(example) or is_chinese(example) + if self.pil: # use PIL + self.im = im if isinstance(im, Image.Image) else Image.fromarray(im) + self.draw = ImageDraw.Draw(self.im) + self.font = check_pil_font(font='Arial.Unicode.ttf' if is_chinese(example) else font, + size=font_size or max(round(sum(self.im.size) / 2 * 0.035), 12)) + else: # use cv2 + self.im = im + self.lw = line_width or max(round(sum(im.shape) / 2 * 0.003), 2) # line width + + def box_label(self, box, label='', color=(128, 128, 128), txt_color=(255, 255, 255)): + # Add one xyxy box to image with label + if self.pil or not is_ascii(label): + self.draw.rectangle(box, width=self.lw, outline=color) # box + if label: + w, h = self.font.getsize(label) # text width, height + outside = box[1] - h >= 0 # label fits outside box + self.draw.rectangle((box[0], + box[1] - h if outside else box[1], + box[0] + w + 1, + box[1] + 1 if outside else box[1] + h + 1), fill=color) + # self.draw.text((box[0], box[1]), label, fill=txt_color, font=self.font, anchor='ls') # for PIL>8.0 + self.draw.text((box[0], box[1] - h if outside else box[1]), label, fill=txt_color, font=self.font) + else: # cv2 + p1, p2 = (int(box[0]), int(box[1])), (int(box[2]), int(box[3])) + cv2.rectangle(self.im, p1, p2, color, thickness=self.lw, lineType=cv2.LINE_AA) + if label: + tf = max(self.lw - 1, 1) # font thickness + w, h = cv2.getTextSize(label, 0, fontScale=self.lw / 3, thickness=tf)[0] # text width, height + outside = p1[1] - h - 3 >= 0 # label fits outside box + p2 = p1[0] + w, p1[1] - h - 3 if outside else p1[1] + h + 3 + cv2.rectangle(self.im, p1, p2, color, -1, cv2.LINE_AA) # filled + cv2.putText(self.im, label, (p1[0], p1[1] - 2 if outside else p1[1] + h + 2), 0, self.lw / 3, txt_color, + thickness=tf, lineType=cv2.LINE_AA) + + def rectangle(self, xy, fill=None, outline=None, width=1): + # Add rectangle to image (PIL-only) + self.draw.rectangle(xy, fill, outline, width) + + def text(self, xy, text, txt_color=(255, 255, 255)): + # Add text to image (PIL-only) + w, h = self.font.getsize(text) # text width, height + self.draw.text((xy[0], xy[1] - h + 1), text, fill=txt_color, font=self.font) + + def result(self): + # Return annotated image as array + return np.asarray(self.im) + + +def feature_visualization(x, module_type, stage, n=32, save_dir=Path('runs/detect/exp')): + """ + x: Features to be visualized + module_type: Module type + stage: Module stage within model + n: Maximum number of feature maps to plot + save_dir: Directory to save results + """ + if 'Detect' not in module_type: + batch, channels, height, width = x.shape # batch, channels, height, width + if height > 1 and width > 1: + f = save_dir / f"stage{stage}_{module_type.split('.')[-1]}_features.png" # filename + + blocks = torch.chunk(x[0].cpu(), channels, dim=0) # select batch index 0, block by channels + n = min(n, channels) # number of plots + fig, ax = plt.subplots(math.ceil(n / 8), 8, tight_layout=True) # 8 rows x n/8 cols + ax = ax.ravel() + plt.subplots_adjust(wspace=0.05, hspace=0.05) + for i in range(n): + ax[i].imshow(blocks[i].squeeze()) # cmap='gray' + ax[i].axis('off') + + LOGGER.info(f'Saving {f}... ({n}/{channels})') + plt.savefig(f, dpi=300, bbox_inches='tight') + plt.close() + np.save(str(f.with_suffix('.npy')), x[0].cpu().numpy()) # npy save + + def hist2d(x, y, n=100): # 2d histogram used in labels.png and evolve.png xedges, yedges = np.linspace(x.min(), x.max(), n), np.linspace(y.min(), y.max(), n) @@ -68,54 +171,6 @@ def butter_lowpass(cutoff, fs, order): return filtfilt(b, a, data) # forward-backward filter -def plot_one_box(x, im, color=(128, 128, 128), label=None, line_thickness=3): - # Plots one bounding box on image 'im' using OpenCV - assert im.data.contiguous, 'Image not contiguous. Apply np.ascontiguousarray(im) to plot_on_box() input image.' - tl = line_thickness or round(0.002 * (im.shape[0] + im.shape[1]) / 2) + 1 # line/font thickness - c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3])) - cv2.rectangle(im, c1, c2, color, thickness=tl, lineType=cv2.LINE_AA) - if label: - tf = max(tl - 1, 1) # font thickness - t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0] - c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3 - cv2.rectangle(im, c1, c2, color, -1, cv2.LINE_AA) # filled - cv2.putText(im, label, (c1[0], c1[1] - 2), 0, tl / 3, [225, 255, 255], thickness=tf, lineType=cv2.LINE_AA) - - -def plot_one_box_PIL(box, im, color=(128, 128, 128), label=None, line_thickness=None): - # Plots one bounding box on image 'im' using PIL - im = Image.fromarray(im) - draw = ImageDraw.Draw(im) - line_thickness = line_thickness or max(int(min(im.size) / 200), 2) - draw.rectangle(box, width=line_thickness, outline=color) # plot - if label: - font = ImageFont.truetype("Arial.ttf", size=max(round(max(im.size) / 40), 12)) - txt_width, txt_height = font.getsize(label) - draw.rectangle([box[0], box[1] - txt_height + 4, box[0] + txt_width, box[1]], fill=color) - draw.text((box[0], box[1] - txt_height + 1), label, fill=(255, 255, 255), font=font) - return np.asarray(im) - - -def plot_wh_methods(): # from utils.plots import *; plot_wh_methods() - # Compares the two methods for width-height anchor multiplication - # https://github.com/ultralytics/yolov3/issues/168 - x = np.arange(-4.0, 4.0, .1) - ya = np.exp(x) - yb = torch.sigmoid(torch.from_numpy(x)).numpy() * 2 - - fig = plt.figure(figsize=(6, 3), tight_layout=True) - plt.plot(x, ya, '.-', label='YOLOv3') - plt.plot(x, yb ** 2, '.-', label='YOLOv5 ^2') - plt.plot(x, yb ** 1.6, '.-', label='YOLOv5 ^1.6') - plt.xlim(left=-4, right=4) - plt.ylim(bottom=0, top=6) - plt.xlabel('input') - plt.ylabel('output') - plt.grid() - plt.legend() - fig.savefig('comparison.png', dpi=200) - - def output_to_target(output): # Convert model output to target format [batch_id, class_id, x, y, w, h, conf] targets = [] @@ -125,82 +180,65 @@ def output_to_target(output): return np.array(targets) -def plot_images(images, targets, paths=None, fname='images.jpg', names=None, max_size=640, max_subplots=16): +def plot_images(images, targets, paths=None, fname='images.jpg', names=None, max_size=1920, max_subplots=16): # Plot image grid with labels - if isinstance(images, torch.Tensor): images = images.cpu().float().numpy() if isinstance(targets, torch.Tensor): targets = targets.cpu().numpy() - - # un-normalise if np.max(images[0]) <= 1: - images *= 255 - - tl = 3 # line thickness - tf = max(tl - 1, 1) # font thickness + images *= 255 # de-normalise (optional) bs, _, h, w = images.shape # batch size, _, height, width bs = min(bs, max_subplots) # limit plot images ns = np.ceil(bs ** 0.5) # number of subplots (square) - # Check if we should resize - scale_factor = max_size / max(h, w) - if scale_factor < 1: - h = math.ceil(scale_factor * h) - w = math.ceil(scale_factor * w) - + # Build Image mosaic = np.full((int(ns * h), int(ns * w), 3), 255, dtype=np.uint8) # init - for i, img in enumerate(images): + for i, im in enumerate(images): if i == max_subplots: # if last batch has fewer images than we expect break - - block_x = int(w * (i // ns)) - block_y = int(h * (i % ns)) - - img = img.transpose(1, 2, 0) - if scale_factor < 1: - img = cv2.resize(img, (w, h)) - - mosaic[block_y:block_y + h, block_x:block_x + w, :] = img + x, y = int(w * (i // ns)), int(h * (i % ns)) # block origin + im = im.transpose(1, 2, 0) + mosaic[y:y + h, x:x + w, :] = im + + # Resize (optional) + scale = max_size / ns / max(h, w) + if scale < 1: + h = math.ceil(scale * h) + w = math.ceil(scale * w) + mosaic = cv2.resize(mosaic, tuple(int(x * ns) for x in (w, h))) + + # Annotate + fs = int((h + w) * ns * 0.01) # font size + annotator = Annotator(mosaic, line_width=round(fs / 10), font_size=fs, pil=True, example=names) + for i in range(i + 1): + x, y = int(w * (i // ns)), int(h * (i % ns)) # block origin + annotator.rectangle([x, y, x + w, y + h], None, (255, 255, 255), width=2) # borders + if paths: + annotator.text((x + 5, y + 5 + h), text=Path(paths[i]).name[:40], txt_color=(220, 220, 220)) # filenames if len(targets) > 0: - image_targets = targets[targets[:, 0] == i] - boxes = xywh2xyxy(image_targets[:, 2:6]).T - classes = image_targets[:, 1].astype('int') - labels = image_targets.shape[1] == 6 # labels if no conf column - conf = None if labels else image_targets[:, 6] # check for confidence presence (label vs pred) + ti = targets[targets[:, 0] == i] # image targets + boxes = xywh2xyxy(ti[:, 2:6]).T + classes = ti[:, 1].astype('int') + labels = ti.shape[1] == 6 # labels if no conf column + conf = None if labels else ti[:, 6] # check for confidence presence (label vs pred) if boxes.shape[1]: if boxes.max() <= 1.01: # if normalized with tolerance 0.01 boxes[[0, 2]] *= w # scale to pixels boxes[[1, 3]] *= h - elif scale_factor < 1: # absolute coords need scale if image scales - boxes *= scale_factor - boxes[[0, 2]] += block_x - boxes[[1, 3]] += block_y - for j, box in enumerate(boxes.T): - cls = int(classes[j]) + elif scale < 1: # absolute coords need scale if image scales + boxes *= scale + boxes[[0, 2]] += x + boxes[[1, 3]] += y + for j, box in enumerate(boxes.T.tolist()): + cls = classes[j] color = colors(cls) cls = names[cls] if names else cls if labels or conf[j] > 0.25: # 0.25 conf thresh - label = '%s' % cls if labels else '%s %.1f' % (cls, conf[j]) - plot_one_box(box, mosaic, label=label, color=color, line_thickness=tl) - - # Draw image filename labels - if paths: - label = Path(paths[i]).name[:40] # trim to 40 char - t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0] - cv2.putText(mosaic, label, (block_x + 5, block_y + t_size[1] + 5), 0, tl / 3, [220, 220, 220], thickness=tf, - lineType=cv2.LINE_AA) - - # Image border - cv2.rectangle(mosaic, (block_x, block_y), (block_x + w, block_y + h), (255, 255, 255), thickness=3) - - if fname: - r = min(1280. / max(h, w) / ns, 1.0) # ratio to limit image size - mosaic = cv2.resize(mosaic, (int(ns * w * r), int(ns * h * r)), interpolation=cv2.INTER_AREA) - # cv2.imwrite(fname, cv2.cvtColor(mosaic, cv2.COLOR_BGR2RGB)) # cv2 save - Image.fromarray(mosaic).save(fname) # PIL save - return mosaic + label = f'{cls}' if labels else f'{cls} {conf[j]:.1f}' + annotator.box_label(box, label, color=color) + annotator.im.save(fname) # save def plot_lr_scheduler(optimizer, scheduler, epochs=300, save_dir=''): @@ -220,9 +258,9 @@ def plot_lr_scheduler(optimizer, scheduler, epochs=300, save_dir=''): plt.close() -def plot_test_txt(): # from utils.plots import *; plot_test() - # Plot test.txt histograms - x = np.loadtxt('test.txt', dtype=np.float32) +def plot_val_txt(): # from utils.plots import *; plot_val() + # Plot val.txt histograms + x = np.loadtxt('val.txt', dtype=np.float32) box = xyxy2xywh(x[:, :4]) cx, cy = box[:, 0], box[:, 1] @@ -244,29 +282,32 @@ def plot_targets_txt(): # from utils.plots import *; plot_targets_txt() fig, ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True) ax = ax.ravel() for i in range(4): - ax[i].hist(x[i], bins=100, label='%.3g +/- %.3g' % (x[i].mean(), x[i].std())) + ax[i].hist(x[i], bins=100, label=f'{x[i].mean():.3g} +/- {x[i].std():.3g}') ax[i].legend() ax[i].set_title(s[i]) plt.savefig('targets.jpg', dpi=200) -def plot_study_txt(path='', x=None): # from utils.plots import *; plot_study_txt() - # Plot study.txt generated by test.py - fig, ax = plt.subplots(2, 4, figsize=(10, 6), tight_layout=True) - # ax = ax.ravel() +def plot_val_study(file='', dir='', x=None): # from utils.plots import *; plot_val_study() + # Plot file=study.txt generated by val.py (or plot all study*.txt in dir) + save_dir = Path(file).parent if file else Path(dir) + plot2 = False # plot additional results + if plot2: + ax = plt.subplots(2, 4, figsize=(10, 6), tight_layout=True)[1].ravel() fig2, ax2 = plt.subplots(1, 1, figsize=(8, 4), tight_layout=True) - # for f in [Path(path) / f'study_coco_{x}.txt' for x in ['yolov5s6', 'yolov5m6', 'yolov5l6', 'yolov5x6']]: - for f in sorted(Path(path).glob('study*.txt')): + # for f in [save_dir / f'study_coco_{x}.txt' for x in ['yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'yolov5x6']]: + for f in sorted(save_dir.glob('study*.txt')): y = np.loadtxt(f, dtype=np.float32, usecols=[0, 1, 2, 3, 7, 8, 9], ndmin=2).T x = np.arange(y.shape[1]) if x is None else np.array(x) - s = ['P', 'R', 'mAP@.5', 'mAP@.5:.95', 't_inference (ms/img)', 't_NMS (ms/img)', 't_total (ms/img)'] - # for i in range(7): - # ax[i].plot(x, y[i], '.-', linewidth=2, markersize=8) - # ax[i].set_title(s[i]) + if plot2: + s = ['P', 'R', 'mAP@.5', 'mAP@.5:.95', 't_preprocess (ms/img)', 't_inference (ms/img)', 't_NMS (ms/img)'] + for i in range(7): + ax[i].plot(x, y[i], '.-', linewidth=2, markersize=8) + ax[i].set_title(s[i]) j = y[3].argmax() + 1 - ax2.plot(y[6, 1:j], y[3, 1:j] * 1E2, '.-', linewidth=2, markersize=8, + ax2.plot(y[5, 1:j], y[3, 1:j] * 1E2, '.-', linewidth=2, markersize=8, label=f.stem.replace('study_coco_', '').replace('yolo', 'YOLO')) ax2.plot(1E3 / np.array([209, 140, 97, 58, 35, 18]), [34.6, 40.5, 43.0, 47.5, 49.7, 51.5], @@ -275,22 +316,26 @@ def plot_study_txt(path='', x=None): # from utils.plots import *; plot_study_tx ax2.grid(alpha=0.2) ax2.set_yticks(np.arange(20, 60, 5)) ax2.set_xlim(0, 57) - ax2.set_ylim(30, 55) + ax2.set_ylim(25, 55) ax2.set_xlabel('GPU Speed (ms/img)') ax2.set_ylabel('COCO AP val') ax2.legend(loc='lower right') - plt.savefig(str(Path(path).name) + '.png', dpi=300) + f = save_dir / 'study.png' + print(f'Saving {f}...') + plt.savefig(f, dpi=300) -def plot_labels(labels, names=(), save_dir=Path(''), loggers=None): +@try_except # known issue https://github.com/ultralytics/yolov5/issues/5395 +@Timeout(30) # known issue https://github.com/ultralytics/yolov5/issues/5611 +def plot_labels(labels, names=(), save_dir=Path('')): # plot dataset labels - print('Plotting labels... ') + LOGGER.info(f"Plotting labels to {save_dir / 'labels.jpg'}... ") c, b = labels[:, 0], labels[:, 1:].transpose() # classes, boxes nc = int(c.max() + 1) # number of classes x = pd.DataFrame(b.transpose(), columns=['x', 'y', 'width', 'height']) # seaborn correlogram - sns.pairplot(x, corner=True, diag_kind='auto', kind='hist', diag_kws=dict(bins=50), plot_kws=dict(pmax=0.9)) + sn.pairplot(x, corner=True, diag_kind='auto', kind='hist', diag_kws=dict(bins=50), plot_kws=dict(pmax=0.9)) plt.savefig(save_dir / 'labels_correlogram.jpg', dpi=200) plt.close() @@ -298,15 +343,18 @@ def plot_labels(labels, names=(), save_dir=Path(''), loggers=None): matplotlib.use('svg') # faster ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True)[1].ravel() y = ax[0].hist(c, bins=np.linspace(0, nc, nc + 1) - 0.5, rwidth=0.8) - # [y[2].patches[i].set_color([x / 255 for x in colors(i)]) for i in range(nc)] # update colors bug #3195 + try: # color histogram bars by class + [y[2].patches[i].set_color([x / 255 for x in colors(i)]) for i in range(nc)] # known issue #3195 + except Exception: + pass ax[0].set_ylabel('instances') if 0 < len(names) < 30: ax[0].set_xticks(range(len(names))) ax[0].set_xticklabels(names, rotation=90, fontsize=10) else: ax[0].set_xlabel('classes') - sns.histplot(x, x='x', y='y', ax=ax[2], bins=50, pmax=0.9) - sns.histplot(x, x='width', y='height', ax=ax[3], bins=50, pmax=0.9) + sn.histplot(x, x='x', y='y', ax=ax[2], bins=50, pmax=0.9) + sn.histplot(x, x='width', y='height', ax=ax[3], bins=50, pmax=0.9) # rectangles labels[:, 1:3] = 0.5 # center @@ -325,34 +373,58 @@ def plot_labels(labels, names=(), save_dir=Path(''), loggers=None): matplotlib.use('Agg') plt.close() - # loggers - for k, v in loggers.items() or {}: - if k == 'wandb' and v: - v.log({"Labels": [v.Image(str(x), caption=x.name) for x in save_dir.glob('*labels*.jpg')]}, commit=False) - -def plot_evolution(yaml_file='data/hyp.finetune.yaml'): # from utils.plots import *; plot_evolution() - # Plot hyperparameter evolution results in evolve.txt - with open(yaml_file) as f: - hyp = yaml.safe_load(f) - x = np.loadtxt('evolve.txt', ndmin=2) +def plot_evolve(evolve_csv='path/to/evolve.csv'): # from utils.plots import *; plot_evolve() + # Plot evolve.csv hyp evolution results + evolve_csv = Path(evolve_csv) + data = pd.read_csv(evolve_csv) + keys = [x.strip() for x in data.columns] + x = data.values f = fitness(x) - # weights = (f - f.min()) ** 2 # for weighted results + j = np.argmax(f) # max fitness index plt.figure(figsize=(10, 12), tight_layout=True) matplotlib.rc('font', **{'size': 8}) - for i, (k, v) in enumerate(hyp.items()): - y = x[:, i + 7] - # mu = (y * weights).sum() / weights.sum() # best weighted result - mu = y[f.argmax()] # best single result + print(f'Best results from row {j} of {evolve_csv}:') + for i, k in enumerate(keys[7:]): + v = x[:, 7 + i] + mu = v[j] # best single result plt.subplot(6, 5, i + 1) - plt.scatter(y, f, c=hist2d(y, f, 20), cmap='viridis', alpha=.8, edgecolors='none') + plt.scatter(v, f, c=hist2d(v, f, 20), cmap='viridis', alpha=.8, edgecolors='none') plt.plot(mu, f.max(), 'k+', markersize=15) - plt.title('%s = %.3g' % (k, mu), fontdict={'size': 9}) # limit to 40 characters + plt.title(f'{k} = {mu:.3g}', fontdict={'size': 9}) # limit to 40 characters if i % 5 != 0: plt.yticks([]) - print('%15s: %.3g' % (k, mu)) - plt.savefig('evolve.png', dpi=200) - print('\nPlot saved as evolve.png') + print(f'{k:>15}: {mu:.3g}') + f = evolve_csv.with_suffix('.png') # filename + plt.savefig(f, dpi=200) + plt.close() + print(f'Saved {f}') + + +def plot_results(file='path/to/results.csv', dir=''): + # Plot training results.csv. Usage: from utils.plots import *; plot_results('path/to/results.csv') + save_dir = Path(file).parent if file else Path(dir) + fig, ax = plt.subplots(2, 5, figsize=(12, 6), tight_layout=True) + ax = ax.ravel() + files = list(save_dir.glob('results*.csv')) + assert len(files), f'No results.csv files found in {save_dir.resolve()}, nothing to plot.' + for fi, f in enumerate(files): + try: + data = pd.read_csv(f) + s = [x.strip() for x in data.columns] + x = data.values[:, 0] + for i, j in enumerate([1, 2, 3, 4, 5, 8, 9, 10, 6, 7]): + y = data.values[:, j] + # y[y == 0] = np.nan # don't show zero values + ax[i].plot(x, y, marker='.', label=f.stem, linewidth=2, markersize=8) + ax[i].set_title(s[j], fontsize=12) + # if j in [8, 9, 10]: # share train and val loss y axes + # ax[i].get_shared_y_axes().join(ax[i], ax[i - 5]) + except Exception as e: + LOGGER.info(f'Warning: Plotting error for {f}: {e}') + ax[1].legend() + fig.savefig(save_dir / 'results.png', dpi=200) + plt.close() def profile_idetection(start=0, stop=0, labels=(), save_dir=''): @@ -381,66 +453,24 @@ def profile_idetection(start=0, stop=0, labels=(), save_dir=''): else: a.remove() except Exception as e: - print('Warning: Plotting error for %s; %s' % (f, e)) - + print(f'Warning: Plotting error for {f}; {e}') ax[1].legend() plt.savefig(Path(save_dir) / 'idetection_profile.png', dpi=200) -def plot_results_overlay(start=0, stop=0): # from utils.plots import *; plot_results_overlay() - # Plot training 'results*.txt', overlaying train and val losses - s = ['train', 'train', 'train', 'Precision', 'mAP@0.5', 'val', 'val', 'val', 'Recall', 'mAP@0.5:0.95'] # legends - t = ['Box', 'Objectness', 'Classification', 'P-R', 'mAP-F1'] # titles - for f in sorted(glob.glob('results*.txt') + glob.glob('../../Downloads/results*.txt')): - results = np.loadtxt(f, usecols=[2, 3, 4, 8, 9, 12, 13, 14, 10, 11], ndmin=2).T - n = results.shape[1] # number of rows - x = range(start, min(stop, n) if stop else n) - fig, ax = plt.subplots(1, 5, figsize=(14, 3.5), tight_layout=True) - ax = ax.ravel() - for i in range(5): - for j in [i, i + 5]: - y = results[j, x] - ax[i].plot(x, y, marker='.', label=s[j]) - # y_smooth = butter_lowpass_filtfilt(y) - # ax[i].plot(x, np.gradient(y_smooth), marker='.', label=s[j]) - - ax[i].set_title(t[i]) - ax[i].legend() - ax[i].set_ylabel(f) if i == 0 else None # add filename - fig.savefig(f.replace('.txt', '.png'), dpi=200) - - -def plot_results(start=0, stop=0, bucket='', id=(), labels=(), save_dir=''): - # Plot training 'results*.txt'. from utils.plots import *; plot_results(save_dir='runs/train/exp') - fig, ax = plt.subplots(2, 5, figsize=(12, 6), tight_layout=True) - ax = ax.ravel() - s = ['Box', 'Objectness', 'Classification', 'Precision', 'Recall', - 'val Box', 'val Objectness', 'val Classification', 'mAP@0.5', 'mAP@0.5:0.95'] - if bucket: - # files = ['https://storage.googleapis.com/%s/results%g.txt' % (bucket, x) for x in id] - files = ['results%g.txt' % x for x in id] - c = ('gsutil cp ' + '%s ' * len(files) + '.') % tuple('gs://%s/results%g.txt' % (bucket, x) for x in id) - os.system(c) - else: - files = list(Path(save_dir).glob('results*.txt')) - assert len(files), 'No results.txt files found in %s, nothing to plot.' % os.path.abspath(save_dir) - for fi, f in enumerate(files): - try: - results = np.loadtxt(f, usecols=[2, 3, 4, 8, 9, 12, 13, 14, 10, 11], ndmin=2).T - n = results.shape[1] # number of rows - x = range(start, min(stop, n) if stop else n) - for i in range(10): - y = results[i, x] - if i in [0, 1, 2, 5, 6, 7]: - y[y == 0] = np.nan # don't show zero loss values - # y /= y[0] # normalize - label = labels[fi] if len(labels) else f.stem - ax[i].plot(x, y, marker='.', label=label, linewidth=2, markersize=8) - ax[i].set_title(s[i]) - # if i in [5, 6, 7]: # share train and val loss y axes - # ax[i].get_shared_y_axes().join(ax[i], ax[i - 5]) - except Exception as e: - print('Warning: Plotting error for %s; %s' % (f, e)) - - ax[1].legend() - fig.savefig(Path(save_dir) / 'results.png', dpi=200) +def save_one_box(xyxy, im, file=Path('im.jpg'), gain=1.02, pad=10, square=False, BGR=False, save=True): + # Save image crop as {file} with crop size multiple {gain} and {pad} pixels. Save and/or return crop + xyxy = torch.tensor(xyxy).view(-1, 4) + b = xyxy2xywh(xyxy) # boxes + if square: + b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # attempt rectangle to square + b[:, 2:] = b[:, 2:] * gain + pad # box wh * gain + pad + xyxy = xywh2xyxy(b).long() + clip_coords(xyxy, im.shape) + crop = im[int(xyxy[0, 1]):int(xyxy[0, 3]), int(xyxy[0, 0]):int(xyxy[0, 2]), ::(1 if BGR else -1)] + if save: + file.parent.mkdir(parents=True, exist_ok=True) # make directory + f = str(increment_path(file).with_suffix('.jpg')) + # cv2.imwrite(f, crop) # https://github.com/ultralytics/yolov5/issues/7007 chroma subsampling issue + Image.fromarray(cv2.cvtColor(crop, cv2.COLOR_BGR2RGB)).save(f, quality=95, subsampling=0) + return crop diff --git a/utils/sparse.py b/utils/sparse.py index e0f53c186443..6004223e2cc4 100644 --- a/utils/sparse.py +++ b/utils/sparse.py @@ -40,45 +40,50 @@ def check_download_sparsezoo_weights(path): class SparseMLWrapper(object): - def __init__(self, model, recipe): - self.enabled = bool(recipe) + def __init__(self, model, checkpoint_recipe, train_recipe): + self.enabled = bool(train_recipe) self.model = model.module if is_parallel(model) else model - self.recipe = recipe - self.manager = ScheduledModifierManager.from_yaml(recipe) if self.enabled else None + self.checkpoint_manager = ScheduledModifierManager.from_yaml(checkpoint_recipe) if checkpoint_recipe else None + self.manager = ScheduledModifierManager.from_yaml(train_recipe) if train_recipe else None self.logger = None + self.start_epoch = None def state_dict(self): + manager = (ScheduledModifierManager.compose_staged(self.checkpoint_manager, self.manager) + if self.checkpoint_manager and self.enabled else self.manager) + return { - 'recipe': str(self.manager) if self.enabled else None, + 'recipe': str(manager) if self.enabled else None, } - def apply(self): + def apply_checkpoint_structure(self): if not self.enabled: return - self.manager.apply(self.model) + if self.checkpoint_manager: + self.checkpoint_manager.apply_structure(self.model, math.inf) def initialize(self, start_epoch): if not self.enabled: return - self.manager.initialize(self.model, start_epoch) + self.start_epoch = start_epoch - def initialize_loggers(self, logger, tb_writer, wandb_logger, rank): + def initialize_loggers(self, logger, tb_writer, wandb_logger): self.logger = logger - if not self.enabled or rank not in [-1, 0]: + if not self.enabled: return - def _logging_lambda(log_tag, log_val, log_vals, step, walltime): + def _logging_lambda(tag, value, values, step, wall_time, level): if not wandb_logger or not wandb_logger.wandb: return - if log_val is not None: - wandb_logger.log({log_tag: log_val}) + if value is not None: + wandb_logger.log({tag: value}) - if log_vals: - wandb_logger.log(log_vals) + if values: + wandb_logger.log(values) self.manager.initialize_loggers([ SparsificationGroupLogger( @@ -87,7 +92,7 @@ def _logging_lambda(log_tag, log_val, log_vals, step, walltime): ) ]) - if wandb_logger.wandb: + if wandb_logger and wandb_logger.wandb: artifact = wandb_logger.wandb.Artifact('recipe', type='recipe') with artifact.new_file('recipe.yaml') as file: file.write(str(self.manager)) @@ -99,19 +104,21 @@ def modify(self, scaler, optimizer, model, dataloader): return self.manager.modify(model, optimizer, steps_per_epoch=len(dataloader), wrap_optim=scaler) - def check_lr_override(self, scheduler): + def check_lr_override(self, scheduler, rank): # Override lr scheduler if recipe makes any LR updates if self.enabled and self.manager.learning_rate_modifiers: - self.logger.info('Disabling LR scheduler, managing LR using SparseML recipe') + if rank in [0,-1]: + self.logger.info('Disabling LR scheduler, managing LR using SparseML recipe') scheduler = None return scheduler - def check_epoch_override(self, epochs): + def check_epoch_override(self, epochs, rank): # Override num epochs if recipe explicitly modifies epoch range if self.enabled and self.manager.epoch_modifiers and self.manager.max_epochs: - epochs = self.manager.max_epochs or epochs # override num_epochs - self.logger.info(f'Overriding number of epochs from SparseML manager to {epochs}') + if rank in [0,-1]: + self.logger.info(f'Overriding number of epochs from SparseML manager to {epochs}') + epochs = self.manager.max_epochs + self.start_epoch or epochs # override num_epochs return epochs @@ -136,4 +143,4 @@ def reset_best(self, epoch): qat_start = math.floor(max([mod.start_epoch for mod in self.manager.quantization_modifiers])) \ if self.manager.quantization_modifiers else -1 - return (pruning_start <= epoch <= pruning_end) or epoch == qat_start + return (pruning_start <= epoch <= pruning_end) or epoch == qat_start \ No newline at end of file diff --git a/utils/torch_utils.py b/utils/torch_utils.py index 36360136e891..02698e656481 100644 --- a/utils/torch_utils.py +++ b/utils/torch_utils.py @@ -1,145 +1,152 @@ -# YOLOv5 PyTorch utils +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +PyTorch utils +""" -import datetime -import logging import math import os import platform import subprocess import time +import warnings from contextlib import contextmanager from copy import deepcopy from pathlib import Path import torch -import torch.backends.cudnn as cudnn +import torch.distributed as dist import torch.nn as nn import torch.nn.functional as F -import torchvision + +from utils.general import LOGGER, file_update_date, git_describe try: - import thop # for FLOPS computation + import thop # for FLOPs computation except ImportError: thop = None -logger = logging.getLogger(__name__) + +# Suppress PyTorch warnings +warnings.filterwarnings('ignore', message='User provided device_type of \'cuda\', but CUDA is not available. Disabling') @contextmanager def torch_distributed_zero_first(local_rank: int): - """ - Decorator to make all processes in distributed training wait for each local_master to do something. - """ + # Decorator to make all processes in distributed training wait for each local_master to do something if local_rank not in [-1, 0]: - torch.distributed.barrier() + dist.barrier(device_ids=[local_rank]) yield if local_rank == 0: - torch.distributed.barrier() - + dist.barrier(device_ids=[0]) -def init_torch_seeds(seed=0): - # Speed-reproducibility tradeoff https://pytorch.org/docs/stable/notes/randomness.html - torch.manual_seed(seed) - if seed == 0: # slower, more reproducible - cudnn.benchmark, cudnn.deterministic = False, True - else: # faster, less reproducible - cudnn.benchmark, cudnn.deterministic = True, False - -def date_modified(path=__file__): - # return human-readable file modification date, i.e. '2021-3-26' - t = datetime.datetime.fromtimestamp(Path(path).stat().st_mtime) - return f'{t.year}-{t.month}-{t.day}' - - -def git_describe(path=Path(__file__).parent): # path must be a directory - # return human-readable git description, i.e. v5.0-5-g3e25f1e https://git-scm.com/docs/git-describe - s = f'git -C {path} describe --tags --long --always' +def device_count(): + # Returns number of CUDA devices available. Safe version of torch.cuda.device_count(). Only works on Linux. + assert platform.system() == 'Linux', 'device_count() function only works on Linux' try: - return subprocess.check_output(s, shell=True, stderr=subprocess.STDOUT).decode()[:-1] - except subprocess.CalledProcessError as e: - return '' # not a git repository + cmd = 'nvidia-smi -L | wc -l' + return int(subprocess.run(cmd, shell=True, capture_output=True, check=True).stdout.decode().split()[-1]) + except Exception: + return 0 -def select_device(device='', batch_size=None): +def select_device(device='', batch_size=0, newline=True): # device = 'cpu' or '0' or '0,1,2,3' - s = f'YOLOv5 πŸš€ {git_describe() or date_modified()} torch {torch.__version__} ' # string - cpu = device.lower() == 'cpu' + s = f'YOLOv5 πŸš€ {git_describe() or file_update_date()} torch {torch.__version__} ' # string + device = str(device).strip().lower().replace('cuda:', '') # to string, 'cuda:0' to '0' + cpu = device == 'cpu' if cpu: os.environ['CUDA_VISIBLE_DEVICES'] = '-1' # force torch.cuda.is_available() = False elif device: # non-cpu device requested - os.environ['CUDA_VISIBLE_DEVICES'] = device # set environment variable - assert torch.cuda.is_available(), f'CUDA unavailable, invalid device {device} requested' # check availability + os.environ['CUDA_VISIBLE_DEVICES'] = device # set environment variable - must be before assert is_available() + assert torch.cuda.is_available() and torch.cuda.device_count() >= len(device.replace(',', '')), \ + f"Invalid CUDA '--device {device}' requested, use '--device cpu' or pass valid CUDA device(s)" cuda = not cpu and torch.cuda.is_available() if cuda: - devices = device.split(',') if device else range(torch.cuda.device_count()) # i.e. 0,1,6,7 + devices = device.split(',') if device else '0' # range(torch.cuda.device_count()) # i.e. 0,1,6,7 n = len(devices) # device count - if n > 1 and batch_size: # check batch_size is divisible by device_count + if n > 1 and batch_size > 0: # check batch_size is divisible by device_count assert batch_size % n == 0, f'batch-size {batch_size} not multiple of GPU count {n}' - space = ' ' * len(s) + space = ' ' * (len(s) + 1) for i, d in enumerate(devices): p = torch.cuda.get_device_properties(i) - s += f"{'' if i == 0 else space}CUDA:{d} ({p.name}, {p.total_memory / 1024 ** 2}MB)\n" # bytes to MB + s += f"{'' if i == 0 else space}CUDA:{d} ({p.name}, {p.total_memory / (1 << 20):.0f}MiB)\n" # bytes to MB else: s += 'CPU\n' - logger.info(s.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else s) # emoji-safe + if not newline: + s = s.rstrip() + LOGGER.info(s.encode().decode('ascii', 'ignore') if platform.system() == 'Windows' else s) # emoji-safe return torch.device('cuda:0' if cuda else 'cpu') -def time_synchronized(): - # pytorch-accurate time +def time_sync(): + # PyTorch-accurate time if torch.cuda.is_available(): torch.cuda.synchronize() return time.time() -def profile(x, ops, n=100, device=None): - # profile a pytorch module or list of modules. Example usage: - # x = torch.randn(16, 3, 640, 640) # input +def profile(input, ops, n=10, device=None): + # YOLOv5 speed/memory/FLOPs profiler + # + # Usage: + # input = torch.randn(16, 3, 640, 640) # m1 = lambda x: x * torch.sigmoid(x) # m2 = nn.SiLU() - # profile(x, [m1, m2], n=100) # profile speed over 100 iterations - - device = device or torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') - x = x.to(device) - x.requires_grad = True - print(torch.__version__, device.type, torch.cuda.get_device_properties(0) if device.type == 'cuda' else '') - print(f"\n{'Params':>12s}{'GFLOPS':>12s}{'forward (ms)':>16s}{'backward (ms)':>16s}{'input':>24s}{'output':>24s}") - for m in ops if isinstance(ops, list) else [ops]: - m = m.to(device) if hasattr(m, 'to') else m # device - m = m.half() if hasattr(m, 'half') and isinstance(x, torch.Tensor) and x.dtype is torch.float16 else m # type - dtf, dtb, t = 0., 0., [0., 0., 0.] # dt forward, backward - try: - flops = thop.profile(m, inputs=(x,), verbose=False)[0] / 1E9 * 2 # GFLOPS - except: - flops = 0 - - for _ in range(n): - t[0] = time_synchronized() - y = m(x) - t[1] = time_synchronized() + # profile(input, [m1, m2], n=100) # profile over 100 iterations + + results = [] + device = device or select_device() + print(f"{'Params':>12s}{'GFLOPs':>12s}{'GPU_mem (GB)':>14s}{'forward (ms)':>14s}{'backward (ms)':>14s}" + f"{'input':>24s}{'output':>24s}") + + for x in input if isinstance(input, list) else [input]: + x = x.to(device) + x.requires_grad = True + for m in ops if isinstance(ops, list) else [ops]: + m = m.to(device) if hasattr(m, 'to') else m # device + m = m.half() if hasattr(m, 'half') and isinstance(x, torch.Tensor) and x.dtype is torch.float16 else m + tf, tb, t = 0, 0, [0, 0, 0] # dt forward, backward try: - _ = y.sum().backward() - t[2] = time_synchronized() - except: # no backward method - t[2] = float('nan') - dtf += (t[1] - t[0]) * 1000 / n # ms per op forward - dtb += (t[2] - t[1]) * 1000 / n # ms per op backward + flops = thop.profile(m, inputs=(x,), verbose=False)[0] / 1E9 * 2 # GFLOPs + except Exception: + flops = 0 - s_in = tuple(x.shape) if isinstance(x, torch.Tensor) else 'list' - s_out = tuple(y.shape) if isinstance(y, torch.Tensor) else 'list' - p = sum(list(x.numel() for x in m.parameters())) if isinstance(m, nn.Module) else 0 # parameters - print(f'{p:12}{flops:12.4g}{dtf:16.4g}{dtb:16.4g}{str(s_in):>24s}{str(s_out):>24s}') + try: + for _ in range(n): + t[0] = time_sync() + y = m(x) + t[1] = time_sync() + try: + _ = (sum(yi.sum() for yi in y) if isinstance(y, list) else y).sum().backward() + t[2] = time_sync() + except Exception: # no backward method + # print(e) # for debug + t[2] = float('nan') + tf += (t[1] - t[0]) * 1000 / n # ms per op forward + tb += (t[2] - t[1]) * 1000 / n # ms per op backward + mem = torch.cuda.memory_reserved() / 1E9 if torch.cuda.is_available() else 0 # (GB) + s_in = tuple(x.shape) if isinstance(x, torch.Tensor) else 'list' + s_out = tuple(y.shape) if isinstance(y, torch.Tensor) else 'list' + p = sum(list(x.numel() for x in m.parameters())) if isinstance(m, nn.Module) else 0 # parameters + print(f'{p:12}{flops:12.4g}{mem:>14.3f}{tf:14.4g}{tb:14.4g}{str(s_in):>24s}{str(s_out):>24s}') + results.append([p, flops, mem, tf, tb, s_in, s_out]) + except Exception as e: + print(e) + results.append(None) + torch.cuda.empty_cache() + return results def is_parallel(model): + # Returns True if model is of type DP or DDP return type(model) in (nn.parallel.DataParallel, nn.parallel.DistributedDataParallel) -def intersect_dicts(da, db, exclude=()): - # Dictionary intersection of matching keys and shapes, omitting 'exclude' keys, using da values - return {k: v for k, v in da.items() if k in db and not any(x in k for x in exclude) and v.shape == db[k].shape} +def de_parallel(model): + # De-parallelize a model: returns single-GPU model if model is of type DP or DDP + return model.module if is_parallel(model) else model def initialize_weights(model): @@ -150,7 +157,7 @@ def initialize_weights(model): elif t is nn.BatchNorm2d: m.eps = 1e-3 m.momentum = 0.03 - elif t in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6]: + elif t in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6, nn.SiLU]: m.inplace = True @@ -161,7 +168,7 @@ def find_modules(model, mclass=nn.Conv2d): def sparsity(model): # Return global model sparsity - a, b = 0., 0. + a, b = 0, 0 for p in model.parameters(): a += p.numel() b += (p == 0).sum() @@ -180,7 +187,7 @@ def prune(model, amount=0.3): def fuse_conv_and_bn(conv, bn): - # Fuse convolution and batchnorm layers https://tehnokv.com/posts/fusing-batchnorm-and-conv/ + # Fuse Conv2d() and BatchNorm2d() layers https://tehnokv.com/posts/fusing-batchnorm-and-conv/ fusedconv = nn.Conv2d(conv.in_channels, conv.out_channels, kernel_size=conv.kernel_size, @@ -189,12 +196,12 @@ def fuse_conv_and_bn(conv, bn): groups=conv.groups, bias=True).requires_grad_(False).to(conv.weight.device) - # prepare filters + # Prepare filters w_conv = conv.weight.clone().view(conv.out_channels, -1) w_bn = torch.diag(bn.weight.div(torch.sqrt(bn.eps + bn.running_var))) fusedconv.weight.copy_(torch.mm(w_bn, w_conv).view(fusedconv.weight.shape)) - # prepare spatial bias + # Prepare spatial bias b_conv = torch.zeros(conv.weight.size(0), device=conv.weight.device) if conv.bias is None else conv.bias b_bn = bn.bias - bn.weight.mul(bn.running_mean).div(torch.sqrt(bn.running_var + bn.eps)) fusedconv.bias.copy_(torch.mm(w_bn, b_conv.reshape(-1, 1)).reshape(-1) + b_bn) @@ -207,46 +214,28 @@ def model_info(model, verbose=False, img_size=640): n_p = sum(x.numel() for x in model.parameters()) # number parameters n_g = sum(x.numel() for x in model.parameters() if x.requires_grad) # number gradients if verbose: - print('%5s %40s %9s %12s %20s %10s %10s' % ('layer', 'name', 'gradient', 'parameters', 'shape', 'mu', 'sigma')) + print(f"{'layer':>5} {'name':>40} {'gradient':>9} {'parameters':>12} {'shape':>20} {'mu':>10} {'sigma':>10}") for i, (name, p) in enumerate(model.named_parameters()): name = name.replace('module_list.', '') print('%5g %40s %9s %12g %20s %10.3g %10.3g' % (i, name, p.requires_grad, p.numel(), list(p.shape), p.mean(), p.std())) - try: # FLOPS + try: # FLOPs from thop import profile stride = max(int(model.stride.max()), 32) if hasattr(model, 'stride') else 32 img = torch.zeros((1, model.yaml.get('ch', 3), stride, stride), device=next(model.parameters()).device) # input - flops = profile(deepcopy(model), inputs=(img,), verbose=False)[0] / 1E9 * 2 # stride GFLOPS + flops = profile(deepcopy(model), inputs=(img,), verbose=False)[0] / 1E9 * 2 # stride GFLOPs img_size = img_size if isinstance(img_size, list) else [img_size, img_size] # expand if int/float - fs = ', %.1f GFLOPS' % (flops * img_size[0] / stride * img_size[1] / stride) # 640x640 GFLOPS + fs = ', %.1f GFLOPs' % (flops * img_size[0] / stride * img_size[1] / stride) # 640x640 GFLOPs except (ImportError, Exception): fs = '' - logger.info(f"Model Summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}") - - -def load_classifier(name='resnet101', n=2): - # Loads a pretrained model reshaped to n-class output - model = torchvision.models.__dict__[name](pretrained=True) - - # ResNet model properties - # input_size = [3, 224, 224] - # input_space = 'RGB' - # input_range = [0, 1] - # mean = [0.485, 0.456, 0.406] - # std = [0.229, 0.224, 0.225] - - # Reshape output to n classes - filters = model.fc.weight.shape[1] - model.fc.bias = nn.Parameter(torch.zeros(n), requires_grad=True) - model.fc.weight = nn.Parameter(torch.zeros(n, filters), requires_grad=True) - model.fc.out_features = n - return model + name = Path(model.yaml_file).stem.replace('yolov5', 'YOLOv5') if hasattr(model, 'yaml_file') else 'Model' + LOGGER.info(f"{name} summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}") def scale_img(img, ratio=1.0, same_shape=False, gs=32): # img(16,3,256,416) - # scales img(bs,3,y,x) by ratio constrained to gs-multiple + # Scales img(bs,3,y,x) by ratio constrained to gs-multiple if ratio == 1.0: return img else: @@ -254,7 +243,7 @@ def scale_img(img, ratio=1.0, same_shape=False, gs=32): # img(16,3,256,416) s = (int(h * ratio), int(w * ratio)) # new size img = F.interpolate(img, size=s, mode='bilinear', align_corners=False) # resize if not same_shape: # pad/crop img - h, w = [math.ceil(x * ratio / gs) * gs for x in (h, w)] + h, w = (math.ceil(x * ratio / gs) * gs for x in (h, w)) return F.pad(img, [0, w - s[1], 0, h - s[0]], value=0.447) # value = imagenet mean @@ -267,25 +256,44 @@ def copy_attr(a, b, include=(), exclude=()): setattr(a, k, v) +class EarlyStopping: + # YOLOv5 simple early stopper + def __init__(self, patience=30): + self.best_fitness = 0.0 # i.e. mAP + self.best_epoch = 0 + self.patience = patience or float('inf') # epochs to wait after fitness stops improving to stop + self.possible_stop = False # possible stop may occur next epoch + + def __call__(self, epoch, fitness): + if fitness >= self.best_fitness: # >= 0 to allow for early zero-fitness stage of training + self.best_epoch = epoch + self.best_fitness = fitness + delta = epoch - self.best_epoch # epochs without improvement + self.possible_stop = delta >= (self.patience - 1) # possible stop may occur next epoch + stop = delta >= self.patience # stop training if patience exceeded + if stop: + LOGGER.info(f'Stopping training early as no improvement observed in last {self.patience} epochs. ' + f'Best results observed at epoch {self.best_epoch}, best model saved as best.pt.\n' + f'To update EarlyStopping(patience={self.patience}) pass a new patience value, ' + f'i.e. `python train.py --patience 300` or use `--patience 0` to disable EarlyStopping.') + return stop + + class ModelEMA: - """ Model Exponential Moving Average from https://github.com/rwightman/pytorch-image-models - Keep a moving average of everything in the model state_dict (parameters and buffers). - This is intended to allow functionality like - https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage - A smoothed version of the weights is necessary for some training schemes to perform well. - This class is sensitive where it is initialized in the sequence of model init, - GPU assignment and distributed training wrappers. + """ Updated Exponential Moving Average (EMA) from https://github.com/rwightman/pytorch-image-models + Keeps a moving average of everything in the model state_dict (parameters and buffers) + For EMA details see https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage """ - def __init__(self, model, decay=0.9999, updates=0, enabled=True): + def __init__(self, model, decay=0.9999, tau=2000, updates=0, enabled=True): # Create EMA self._model = model - self._ema = deepcopy(model.module if is_parallel(model) else model).eval() # FP32 EMA + self._ema = deepcopy(de_parallel(model)).eval() # FP32 EMA # if next(model.parameters()).device.type != 'cpu': # self.ema.half() # FP16 EMA self.updates = updates # number of EMA updates - self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs) - self.enabled = enabled + self.decay = lambda x: decay * (1 - math.exp(-x / tau)) # decay exponential ramp (to help early epochs) + self.enabled=enabled for p in self._ema.parameters(): p.requires_grad_(False) @@ -302,18 +310,11 @@ def state_dict(self, pickle=True): 'updates': self.updates, } - def load_state_dict(self, state_dict): - if not self.enabled: - return - pickled = isinstance(state_dict['ema'], nn.Module) - self.ema.load_state_dict(state_dict['ema'].float().state_dict() if pickled else state_dict['ema']) - self.updates = state_dict['updates'] - def update(self, model): self._model = model if not self.enabled: return - + # Update EMA parameters with torch.no_grad(): msd = model.module.state_dict() if is_parallel(model) else model.state_dict() # model state_dict self.updates += 1 @@ -328,4 +329,4 @@ def update(self, model): def update_attr(self, model, include=(), exclude=('process_group', 'reducer')): # Update EMA attributes - copy_attr(self.ema, model, include, exclude) \ No newline at end of file + copy_attr(self.ema, model, include, exclude) diff --git a/utils/wandb_logging/__init__.py b/utils/wandb_logging/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/utils/wandb_logging/wandb_utils.py b/utils/wandb_logging/wandb_utils.py deleted file mode 100644 index 76b91aa11291..000000000000 --- a/utils/wandb_logging/wandb_utils.py +++ /dev/null @@ -1,320 +0,0 @@ -"""Utilities and tools for tracking runs with Weights & Biases.""" -import json -import sys -from pathlib import Path - -import torch -import yaml -from tqdm import tqdm - -sys.path.append(str(Path(__file__).parent.parent.parent)) # add utils/ to path -from utils.datasets import LoadImagesAndLabels -from utils.datasets import img2label_paths -from utils.general import colorstr, xywh2xyxy, check_dataset, check_file - -try: - import wandb - from wandb import init, finish -except ImportError: - wandb = None - -WANDB_ARTIFACT_PREFIX = 'wandb-artifact://' - - -def remove_prefix(from_string, prefix=WANDB_ARTIFACT_PREFIX): - return from_string[len(prefix):] - - -def check_wandb_config_file(data_config_file): - wandb_config = '_wandb.'.join(data_config_file.rsplit('.', 1)) # updated data.yaml path - if Path(wandb_config).is_file(): - return wandb_config - return data_config_file - - -def get_run_info(run_path): - run_path = Path(remove_prefix(run_path, WANDB_ARTIFACT_PREFIX)) - run_id = run_path.stem - project = run_path.parent.stem - entity = run_path.parent.parent.stem - model_artifact_name = 'run_' + run_id + '_model' - return entity, project, run_id, model_artifact_name - - -def check_wandb_resume(opt): - process_wandb_config_ddp_mode(opt) if opt.global_rank not in [-1, 0] else None - if isinstance(opt.resume, str): - if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): - if opt.global_rank not in [-1, 0]: # For resuming DDP runs - entity, project, run_id, model_artifact_name = get_run_info(opt.resume) - api = wandb.Api() - artifact = api.artifact(entity + '/' + project + '/' + model_artifact_name + ':latest') - modeldir = artifact.download() - opt.weights = str(Path(modeldir) / "last.pt") - return True - return None - - -def process_wandb_config_ddp_mode(opt): - with open(check_file(opt.data)) as f: - data_dict = yaml.safe_load(f) # data dict - train_dir, val_dir = None, None - if isinstance(data_dict['train'], str) and data_dict['train'].startswith(WANDB_ARTIFACT_PREFIX): - api = wandb.Api() - train_artifact = api.artifact(remove_prefix(data_dict['train']) + ':' + opt.artifact_alias) - train_dir = train_artifact.download() - train_path = Path(train_dir) / 'data/images/' - data_dict['train'] = str(train_path) - - if isinstance(data_dict['val'], str) and data_dict['val'].startswith(WANDB_ARTIFACT_PREFIX): - api = wandb.Api() - val_artifact = api.artifact(remove_prefix(data_dict['val']) + ':' + opt.artifact_alias) - val_dir = val_artifact.download() - val_path = Path(val_dir) / 'data/images/' - data_dict['val'] = str(val_path) - if train_dir or val_dir: - ddp_data_path = str(Path(val_dir) / 'wandb_local_data.yaml') - with open(ddp_data_path, 'w') as f: - yaml.safe_dump(data_dict, f) - opt.data = ddp_data_path - - -class WandbLogger(): - """Log training runs, datasets, models, and predictions to Weights & Biases. - - This logger sends information to W&B at wandb.ai. By default, this information - includes hyperparameters, system configuration and metrics, model metrics, - and basic data metrics and analyses. - - By providing additional command line arguments to train.py, datasets, - models and predictions can also be logged. - - For more on how this logger is used, see the Weights & Biases documentation: - https://docs.wandb.com/guides/integrations/yolov5 - """ - def __init__(self, opt, name, run_id, data_dict, job_type='Training'): - # Pre-training routine -- - self.job_type = job_type - self.wandb, self.wandb_run, self.data_dict = wandb, None if not wandb else wandb.run, data_dict - # It's more elegant to stick to 1 wandb.init call, but useful config data is overwritten in the WandbLogger's wandb.init call - if isinstance(opt.resume, str): # checks resume from artifact - if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): - entity, project, run_id, model_artifact_name = get_run_info(opt.resume) - model_artifact_name = WANDB_ARTIFACT_PREFIX + model_artifact_name - assert wandb, 'install wandb to resume wandb runs' - # Resume wandb-artifact:// runs here| workaround for not overwriting wandb.config - self.wandb_run = wandb.init(id=run_id, project=project, entity=entity, resume='allow') - opt.resume = model_artifact_name - elif self.wandb: - self.wandb_run = wandb.init(config=opt, - resume="allow", - project='YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem, - entity=opt.entity, - name=name, - job_type=job_type, - id=run_id) if not wandb.run else wandb.run - if self.wandb_run: - if self.job_type == 'Training': - if not opt.resume: - wandb_data_dict = self.check_and_upload_dataset(opt) if opt.upload_dataset else data_dict - # Info useful for resuming from artifacts - self.wandb_run.config.update({ - 'opt': vars(opt), - 'data_dict': wandb_data_dict - }, allow_val_change=True) - self.data_dict = self.setup_training(opt, data_dict) - if self.job_type == 'Dataset Creation': - self.data_dict = self.check_and_upload_dataset(opt) - else: - prefix = colorstr('wandb: ') - print(f"{prefix}Install Weights & Biases for YOLOv5 logging with 'pip install wandb' (recommended)") - - def check_and_upload_dataset(self, opt): - assert wandb, 'Install wandb to upload dataset' - check_dataset(self.data_dict) - config_path = self.log_dataset_artifact(check_file(opt.data), - opt.single_cls, - 'YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem) - print("Created dataset config file ", config_path) - with open(config_path) as f: - wandb_data_dict = yaml.safe_load(f) - return wandb_data_dict - - def setup_training(self, opt, data_dict): - self.log_dict, self.current_epoch, self.log_imgs = {}, 0, 16 # Logging Constants - self.bbox_interval = opt.bbox_interval - if isinstance(opt.resume, str): - modeldir, _ = self.download_model_artifact(opt) - if modeldir: - self.weights = Path(modeldir) / "last.pt" - config = self.wandb_run.config - opt.weights, opt.save_period, opt.batch_size, opt.bbox_interval, opt.epochs, opt.hyp = str( - self.weights), config.save_period, config.total_batch_size, config.bbox_interval, config.epochs, \ - config.opt['hyp'] - data_dict = dict(self.wandb_run.config.data_dict) # eliminates the need for config file to resume - if 'val_artifact' not in self.__dict__: # If --upload_dataset is set, use the existing artifact, don't download - self.train_artifact_path, self.train_artifact = self.download_dataset_artifact(data_dict.get('train'), - opt.artifact_alias) - self.val_artifact_path, self.val_artifact = self.download_dataset_artifact(data_dict.get('val'), - opt.artifact_alias) - self.result_artifact, self.result_table, self.val_table, self.weights = None, None, None, None - if self.train_artifact_path is not None: - train_path = Path(self.train_artifact_path) / 'data/images/' - data_dict['train'] = str(train_path) - if self.val_artifact_path is not None: - val_path = Path(self.val_artifact_path) / 'data/images/' - data_dict['val'] = str(val_path) - self.val_table = self.val_artifact.get("val") - self.map_val_table_path() - if self.val_artifact is not None: - self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation") - self.result_table = wandb.Table(["epoch", "id", "prediction", "avg_confidence"]) - if opt.bbox_interval == -1: - self.bbox_interval = opt.bbox_interval = (opt.epochs // 10) if opt.epochs > 10 else 1 - return data_dict - - def download_dataset_artifact(self, path, alias): - if isinstance(path, str) and path.startswith(WANDB_ARTIFACT_PREFIX): - artifact_path = Path(remove_prefix(path, WANDB_ARTIFACT_PREFIX) + ":" + alias) - dataset_artifact = wandb.use_artifact(artifact_path.as_posix()) - assert dataset_artifact is not None, "'Error: W&B dataset artifact doesn\'t exist'" - datadir = dataset_artifact.download() - return datadir, dataset_artifact - return None, None - - def download_model_artifact(self, opt): - if opt.resume.startswith(WANDB_ARTIFACT_PREFIX): - model_artifact = wandb.use_artifact(remove_prefix(opt.resume, WANDB_ARTIFACT_PREFIX) + ":latest") - assert model_artifact is not None, 'Error: W&B model artifact doesn\'t exist' - modeldir = model_artifact.download() - epochs_trained = model_artifact.metadata.get('epochs_trained') - total_epochs = model_artifact.metadata.get('total_epochs') - is_finished = total_epochs is None - assert not is_finished, 'training is finished, can only resume incomplete runs.' - return modeldir, model_artifact - return None, None - - def log_model(self, path, opt, epoch, fitness_score, best_model=False): - model_artifact = wandb.Artifact('run_' + wandb.run.id + '_model', type='model', metadata={ - 'original_url': str(path), - 'epochs_trained': epoch + 1, - 'save period': opt.save_period, - 'project': opt.project, - 'total_epochs': opt.epochs, - 'fitness_score': fitness_score - }) - model_artifact.add_file(str(path / 'last.pt'), name='last.pt') - wandb.log_artifact(model_artifact, - aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), 'best' if best_model else '']) - print("Saving model artifact on epoch ", epoch + 1) - - def log_dataset_artifact(self, data_file, single_cls, project, overwrite_config=False): - with open(data_file) as f: - data = yaml.safe_load(f) # data dict - nc, names = (1, ['item']) if single_cls else (int(data['nc']), data['names']) - names = {k: v for k, v in enumerate(names)} # to index dictionary - self.train_artifact = self.create_dataset_table(LoadImagesAndLabels( - data['train'], rect=True, batch_size=1), names, name='train') if data.get('train') else None - self.val_artifact = self.create_dataset_table(LoadImagesAndLabels( - data['val'], rect=True, batch_size=1), names, name='val') if data.get('val') else None - if data.get('train'): - data['train'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'train') - if data.get('val'): - data['val'] = WANDB_ARTIFACT_PREFIX + str(Path(project) / 'val') - path = data_file if overwrite_config else '_wandb.'.join(data_file.rsplit('.', 1)) # updated data.yaml path - data.pop('download', None) - with open(path, 'w') as f: - yaml.safe_dump(data, f) - - if self.job_type == 'Training': # builds correct artifact pipeline graph - self.wandb_run.use_artifact(self.val_artifact) - self.wandb_run.use_artifact(self.train_artifact) - self.val_artifact.wait() - self.val_table = self.val_artifact.get('val') - self.map_val_table_path() - else: - self.wandb_run.log_artifact(self.train_artifact) - self.wandb_run.log_artifact(self.val_artifact) - return path - - def map_val_table_path(self): - self.val_table_map = {} - print("Mapping dataset") - for i, data in enumerate(tqdm(self.val_table.data)): - self.val_table_map[data[3]] = data[0] - - def create_dataset_table(self, dataset, class_to_id, name='dataset'): - # TODO: Explore multiprocessing to slpit this loop parallely| This is essential for speeding up the the logging - artifact = wandb.Artifact(name=name, type="dataset") - img_files = tqdm([dataset.path]) if isinstance(dataset.path, str) and Path(dataset.path).is_dir() else None - img_files = tqdm(dataset.img_files) if not img_files else img_files - for img_file in img_files: - if Path(img_file).is_dir(): - artifact.add_dir(img_file, name='data/images') - labels_path = 'labels'.join(dataset.path.rsplit('images', 1)) - artifact.add_dir(labels_path, name='data/labels') - else: - artifact.add_file(img_file, name='data/images/' + Path(img_file).name) - label_file = Path(img2label_paths([img_file])[0]) - artifact.add_file(str(label_file), - name='data/labels/' + label_file.name) if label_file.exists() else None - table = wandb.Table(columns=["id", "train_image", "Classes", "name"]) - class_set = wandb.Classes([{'id': id, 'name': name} for id, name in class_to_id.items()]) - for si, (img, labels, paths, shapes) in enumerate(tqdm(dataset)): - box_data, img_classes = [], {} - for cls, *xywh in labels[:, 1:].tolist(): - cls = int(cls) - box_data.append({"position": {"middle": [xywh[0], xywh[1]], "width": xywh[2], "height": xywh[3]}, - "class_id": cls, - "box_caption": "%s" % (class_to_id[cls])}) - img_classes[cls] = class_to_id[cls] - boxes = {"ground_truth": {"box_data": box_data, "class_labels": class_to_id}} # inference-space - table.add_data(si, wandb.Image(paths, classes=class_set, boxes=boxes), json.dumps(img_classes), - Path(paths).name) - artifact.add(table, name) - return artifact - - def log_training_progress(self, predn, path, names): - if self.val_table and self.result_table: - class_set = wandb.Classes([{'id': id, 'name': name} for id, name in names.items()]) - box_data = [] - total_conf = 0 - for *xyxy, conf, cls in predn.tolist(): - if conf >= 0.25: - box_data.append( - {"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]}, - "class_id": int(cls), - "box_caption": "%s %.3f" % (names[cls], conf), - "scores": {"class_score": conf}, - "domain": "pixel"}) - total_conf = total_conf + conf - boxes = {"predictions": {"box_data": box_data, "class_labels": names}} # inference-space - id = self.val_table_map[Path(path).name] - self.result_table.add_data(self.current_epoch, - id, - wandb.Image(self.val_table.data[id][1], boxes=boxes, classes=class_set), - total_conf / max(1, len(box_data)) - ) - - def log(self, log_dict): - if self.wandb_run: - for key, value in log_dict.items(): - self.log_dict[key] = value - - def end_epoch(self, best_result=False): - if self.wandb_run: - wandb.log(self.log_dict) - self.log_dict = {} - if self.result_artifact: - train_results = wandb.JoinedTable(self.val_table, self.result_table, "id") - self.result_artifact.add(train_results, 'result') - wandb.log_artifact(self.result_artifact, aliases=['latest', 'last', 'epoch ' + str(self.current_epoch), - ('best' if best_result else '')]) - self.result_table = wandb.Table(["epoch", "id", "prediction", "avg_confidence"]) - self.result_artifact = wandb.Artifact("run_" + wandb.run.id + "_progress", "evaluation") - - def finish_run(self): - if self.wandb_run: - if self.log_dict: - wandb.log(self.log_dict) - wandb.run.finish() diff --git a/val.py b/val.py new file mode 100644 index 000000000000..a7503a50f247 --- /dev/null +++ b/val.py @@ -0,0 +1,382 @@ +# YOLOv5 πŸš€ by Ultralytics, GPL-3.0 license +""" +Validate a trained YOLOv5 model accuracy on a custom dataset + +Usage: + $ python path/to/val.py --weights yolov5s.pt --data coco128.yaml --img 640 + +Usage - formats: + $ python path/to/val.py --weights yolov5s.pt # PyTorch + yolov5s.torchscript # TorchScript + yolov5s.onnx # ONNX Runtime or OpenCV DNN with --dnn + yolov5s.xml # OpenVINO + yolov5s.engine # TensorRT + yolov5s.mlmodel # CoreML (MacOS-only) + yolov5s_saved_model # TensorFlow SavedModel + yolov5s.pb # TensorFlow GraphDef + yolov5s.tflite # TensorFlow Lite + yolov5s_edgetpu.tflite # TensorFlow Edge TPU +""" + +import argparse +import json +import os +import sys +from pathlib import Path +from threading import Thread + +import numpy as np +import torch +from tqdm import tqdm + +FILE = Path(__file__).resolve() +ROOT = FILE.parents[0] # YOLOv5 root directory +if str(ROOT) not in sys.path: + sys.path.append(str(ROOT)) # add ROOT to PATH +ROOT = Path(os.path.relpath(ROOT, Path.cwd())) # relative + +from export import load_checkpoint +from models.common import DetectMultiBackend +from utils.callbacks import Callbacks +from utils.datasets import create_dataloader +from utils.general import (LOGGER, box_iou, check_dataset, check_img_size, check_requirements, check_yaml, + coco80_to_coco91_class, colorstr, increment_path, non_max_suppression, print_args, + scale_coords, xywh2xyxy, xyxy2xywh) +from utils.metrics import ConfusionMatrix, ap_per_class +from utils.plots import output_to_target, plot_images, plot_val_study +from utils.torch_utils import select_device, time_sync + + +def save_one_txt(predn, save_conf, shape, file): + # Save one txt result + gn = torch.tensor(shape)[[1, 0, 1, 0]] # normalization gain whwh + for *xyxy, conf, cls in predn.tolist(): + xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh + line = (cls, *xywh, conf) if save_conf else (cls, *xywh) # label format + with open(file, 'a') as f: + f.write(('%g ' * len(line)).rstrip() % line + '\n') + + +def save_one_json(predn, jdict, path, class_map): + # Save one JSON result {"image_id": 42, "category_id": 18, "bbox": [258.15, 41.29, 348.26, 243.78], "score": 0.236} + image_id = int(path.stem) if path.stem.isnumeric() else path.stem + box = xyxy2xywh(predn[:, :4]) # xywh + box[:, :2] -= box[:, 2:] / 2 # xy center to top-left corner + for p, b in zip(predn.tolist(), box.tolist()): + jdict.append({'image_id': image_id, + 'category_id': class_map[int(p[5])], + 'bbox': [round(x, 3) for x in b], + 'score': round(p[4], 5)}) + + +def process_batch(detections, labels, iouv): + """ + Return correct predictions matrix. Both sets of boxes are in (x1, y1, x2, y2) format. + Arguments: + detections (Array[N, 6]), x1, y1, x2, y2, conf, class + labels (Array[M, 5]), class, x1, y1, x2, y2 + Returns: + correct (Array[N, 10]), for 10 IoU levels + """ + correct = torch.zeros(detections.shape[0], iouv.shape[0], dtype=torch.bool, device=iouv.device) + iou = box_iou(labels[:, 1:], detections[:, :4]) + x = torch.where((iou >= iouv[0]) & (labels[:, 0:1] == detections[:, 5])) # IoU above threshold and classes match + if x[0].shape[0]: + matches = torch.cat((torch.stack(x, 1), iou[x[0], x[1]][:, None]), 1).cpu().numpy() # [label, detection, iou] + if x[0].shape[0] > 1: + matches = matches[matches[:, 2].argsort()[::-1]] + matches = matches[np.unique(matches[:, 1], return_index=True)[1]] + # matches = matches[matches[:, 2].argsort()[::-1]] + matches = matches[np.unique(matches[:, 0], return_index=True)[1]] + matches = torch.from_numpy(matches).to(iouv.device) + correct[matches[:, 1].long()] = matches[:, 2:3] >= iouv + return correct + + +@torch.no_grad() +def run(data, + weights=None, # model.pt path(s) + batch_size=32, # batch size + imgsz=640, # inference size (pixels) + conf_thres=0.001, # confidence threshold + iou_thres=0.6, # NMS IoU threshold + task='val', # train, val, test, speed or study + device='', # cuda device, i.e. 0 or 0,1,2,3 or cpu + workers=8, # max dataloader workers (per RANK in DDP mode) + single_cls=False, # treat as single-class dataset + augment=False, # augmented inference + verbose=False, # verbose output + save_txt=False, # save results to *.txt + save_hybrid=False, # save label+prediction hybrid results to *.txt + save_conf=False, # save confidences in --save-txt labels + save_json=False, # save a COCO-JSON results file + project=ROOT / 'runs/val', # save to project/name + name='exp', # save to project/name + exist_ok=False, # existing project/name ok, do not increment + half=True, # use FP16 half-precision inference + dnn=False, # use OpenCV DNN for ONNX inference + model=None, + dataloader=None, + save_dir=Path(''), + plots=True, + callbacks=Callbacks(), + compute_loss=None, + ): + # Initialize/load model and set device + training = model is not None + if training: # called by train.py + device, pt, jit, engine = next(model.parameters()).device, True, False, False # get model device, PyTorch model + half &= device.type != 'cpu' # half precision only supported on CUDA + model.half() if half else model.float() + else: # called directly + device = select_device(device, batch_size=batch_size) + + # Directories + save_dir = increment_path(Path(project) / name, exist_ok=exist_ok) # increment run + (save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir + + # Load model + model, extras = load_checkpoint(type_='val', weights=weights, device=device) # load FP32 model + stride, pt, jit, engine = model.stride, model.pt, model.jit, model.engine + imgsz = check_img_size(imgsz, s=stride) # check image size + half = model.fp16 # FP16 supported on limited backends with CUDA + if engine: + batch_size = model.batch_size + else: + device = model.device + if not (pt or jit): + batch_size = 1 # export.py models default to batch-size 1 + LOGGER.info(f'Forcing --batch-size 1 square inference (1,3,{imgsz},{imgsz}) for non-PyTorch models') + + # Data + data = check_dataset(data) # check + + # Configure + model.eval() + cuda = device.type != 'cpu' + is_coco = isinstance(data.get('val'), str) and data['val'].endswith('coco/val2017.txt') # COCO dataset + nc = 1 if single_cls else int(data['nc']) # number of classes + iouv = torch.linspace(0.5, 0.95, 10, device=device) # iou vector for mAP@0.5:0.95 + niou = iouv.numel() + + # Dataloader + if not training: + model.warmup(imgsz=(1 if pt else batch_size, 3, imgsz, imgsz)) # warmup + pad = 0.0 if task in ('speed', 'benchmark') else 0.5 + rect = False if task == 'benchmark' else pt # square inference for benchmarks + task = task if task in ('train', 'val', 'test') else 'val' # path to train/val/test images + dataloader = create_dataloader(data[task], imgsz, batch_size, stride, single_cls, pad=pad, rect=rect, + workers=workers, prefix=colorstr(f'{task}: '))[0] + + seen = 0 + confusion_matrix = ConfusionMatrix(nc=nc) + names = {k: v for k, v in enumerate(model.names if hasattr(model, 'names') else model.module.names)} + class_map = coco80_to_coco91_class() if is_coco else list(range(1000)) + s = ('%20s' + '%11s' * 6) % ('Class', 'Images', 'Labels', 'P', 'R', 'mAP@.5', 'mAP@.5:.95') + dt, p, r, f1, mp, mr, map50, map = [0.0, 0.0, 0.0], 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + loss = torch.zeros(3, device=device) + jdict, stats, ap, ap_class = [], [], [], [] + pbar = tqdm(dataloader, desc=s, bar_format='{l_bar}{bar:10}{r_bar}{bar:-10b}') # progress bar + for batch_i, (im, targets, paths, shapes) in enumerate(pbar): + t1 = time_sync() + if cuda: + im = im.to(device, non_blocking=True) + targets = targets.to(device) + im = im.half() if half else im.float() # uint8 to fp16/32 + im /= 255 # 0 - 255 to 0.0 - 1.0 + nb, _, height, width = im.shape # batch size, channels, height, width + t2 = time_sync() + dt[0] += t2 - t1 + + # Inference + out, train_out = model(im) if training else model(im, augment=augment, val=True) # inference, loss outputs + dt[1] += time_sync() - t2 + + # Loss + if compute_loss: + loss += compute_loss([x.float() for x in train_out], targets)[1] # box, obj, cls + + # NMS + targets[:, 2:] *= torch.tensor((width, height, width, height), device=device) # to pixels + lb = [targets[targets[:, 0] == i, 1:] for i in range(nb)] if save_hybrid else [] # for autolabelling + t3 = time_sync() + out = non_max_suppression(out, conf_thres, iou_thres, labels=lb, multi_label=True, agnostic=single_cls) + dt[2] += time_sync() - t3 + + # Metrics + for si, pred in enumerate(out): + labels = targets[targets[:, 0] == si, 1:] + nl = len(labels) + tcls = labels[:, 0].tolist() if nl else [] # target class + path, shape = Path(paths[si]), shapes[si][0] + seen += 1 + + if len(pred) == 0: + if nl: + stats.append((torch.zeros(0, niou, dtype=torch.bool), torch.Tensor(), torch.Tensor(), tcls)) + continue + + # Predictions + if single_cls: + pred[:, 5] = 0 + predn = pred.clone() + scale_coords(im[si].shape[1:], predn[:, :4], shape, shapes[si][1]) # native-space pred + + # Evaluate + if nl: + tbox = xywh2xyxy(labels[:, 1:5]) # target boxes + scale_coords(im[si].shape[1:], tbox, shape, shapes[si][1]) # native-space labels + labelsn = torch.cat((labels[:, 0:1], tbox), 1) # native-space labels + correct = process_batch(predn, labelsn, iouv) + if plots: + confusion_matrix.process_batch(predn, labelsn) + else: + correct = torch.zeros(pred.shape[0], niou, dtype=torch.bool) + stats.append((correct.cpu(), pred[:, 4].cpu(), pred[:, 5].cpu(), tcls)) # (correct, conf, pcls, tcls) + + # Save/log + if save_txt: + save_one_txt(predn, save_conf, shape, file=save_dir / 'labels' / (path.stem + '.txt')) + if save_json: + save_one_json(predn, jdict, path, class_map) # append to COCO-JSON dictionary + callbacks.run('on_val_image_end', pred, predn, path, names, im[si]) + + # Plot images + if plots and batch_i < 3: + f = save_dir / f'val_batch{batch_i}_labels.jpg' # labels + Thread(target=plot_images, args=(im, targets, paths, f, names), daemon=True).start() + f = save_dir / f'val_batch{batch_i}_pred.jpg' # predictions + Thread(target=plot_images, args=(im, output_to_target(out), paths, f, names), daemon=True).start() + + # Compute metrics + stats = [np.concatenate(x, 0) for x in zip(*stats)] # to numpy + if len(stats) and stats[0].any(): + tp, fp, p, r, f1, ap, ap_class = ap_per_class(*stats, plot=plots, save_dir=save_dir, names=names) + ap50, ap = ap[:, 0], ap.mean(1) # AP@0.5, AP@0.5:0.95 + mp, mr, map50, map = p.mean(), r.mean(), ap50.mean(), ap.mean() + nt = np.bincount(stats[3].astype(np.int64), minlength=nc) # number of targets per class + else: + nt = torch.zeros(1) + + # Print results + pf = '%20s' + '%11i' * 2 + '%11.3g' * 4 # print format + LOGGER.info(pf % ('all', seen, nt.sum(), mp, mr, map50, map)) + + # Print results per class + if (verbose or (nc < 50 and not training)) and nc > 1 and len(stats): + for i, c in enumerate(ap_class): + LOGGER.info(pf % (names[c], seen, nt[c], p[i], r[i], ap50[i], ap[i])) + + # Print speeds + t = tuple(x / seen * 1E3 for x in dt) # speeds per image + if not training: + shape = (batch_size, 3, imgsz, imgsz) + LOGGER.info(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {shape}' % t) + + # Plots + if plots: + confusion_matrix.plot(save_dir=save_dir, names=list(names.values())) + callbacks.run('on_val_end') + + # Save JSON + if save_json and len(jdict): + w = Path(weights[0] if isinstance(weights, list) else weights).stem if weights is not None else '' # weights + anno_json = str(Path(data.get('path', '../coco')) / 'annotations/instances_val2017.json') # annotations json + pred_json = str(save_dir / f"{w}_predictions.json") # predictions json + LOGGER.info(f'\nEvaluating pycocotools mAP... saving {pred_json}...') + with open(pred_json, 'w') as f: + json.dump(jdict, f) + + try: # https://github.com/cocodataset/cocoapi/blob/master/PythonAPI/pycocoEvalDemo.ipynb + check_requirements(['pycocotools']) + from pycocotools.coco import COCO + from pycocotools.cocoeval import COCOeval + + anno = COCO(anno_json) # init annotations api + pred = anno.loadRes(pred_json) # init predictions api + eval = COCOeval(anno, pred, 'bbox') + if is_coco: + eval.params.imgIds = [int(Path(x).stem) for x in dataloader.dataset.im_files] # image IDs to evaluate + eval.evaluate() + eval.accumulate() + eval.summarize() + map, map50 = eval.stats[:2] # update results (mAP@0.5:0.95, mAP@0.5) + except Exception as e: + LOGGER.info(f'pycocotools unable to run: {e}') + + # Return results + model.float() # for training + if not training: + s = f"\n{len(list(save_dir.glob('labels/*.txt')))} labels saved to {save_dir / 'labels'}" if save_txt else '' + LOGGER.info(f"Results saved to {colorstr('bold', save_dir)}{s}") + maps = np.zeros(nc) + map + for i, c in enumerate(ap_class): + maps[c] = ap[i] + return (mp, mr, map50, map, *(loss.cpu() / len(dataloader)).tolist()), maps, t + + +def parse_opt(): + parser = argparse.ArgumentParser() + parser.add_argument('--data', type=str, default=ROOT / 'data/coco128.yaml', help='dataset.yaml path') + parser.add_argument('--weights', nargs='+', type=str, default=ROOT / 'yolov5s.pt', help='model.pt path(s)') + parser.add_argument('--batch-size', type=int, default=32, help='batch size') + parser.add_argument('--imgsz', '--img', '--img-size', type=int, default=640, help='inference size (pixels)') + parser.add_argument('--conf-thres', type=float, default=0.001, help='confidence threshold') + parser.add_argument('--iou-thres', type=float, default=0.6, help='NMS IoU threshold') + parser.add_argument('--task', default='val', help='train, val, test, speed or study') + parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--workers', type=int, default=8, help='max dataloader workers (per RANK in DDP mode)') + parser.add_argument('--single-cls', action='store_true', help='treat as single-class dataset') + parser.add_argument('--augment', action='store_true', help='augmented inference') + parser.add_argument('--verbose', action='store_true', help='report mAP by class') + parser.add_argument('--save-txt', action='store_true', help='save results to *.txt') + parser.add_argument('--save-hybrid', action='store_true', help='save label+prediction hybrid results to *.txt') + parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels') + parser.add_argument('--save-json', action='store_true', help='save a COCO-JSON results file') + parser.add_argument('--project', default=ROOT / 'runs/val', help='save to project/name') + parser.add_argument('--name', default='exp', help='save to project/name') + parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') + parser.add_argument('--half', action='store_true', help='use FP16 half-precision inference') + parser.add_argument('--dnn', action='store_true', help='use OpenCV DNN for ONNX inference') + opt = parser.parse_args() + opt.data = check_yaml(opt.data) # check YAML + opt.save_json |= opt.data.endswith('coco.yaml') + opt.save_txt |= opt.save_hybrid + print_args(FILE.stem, opt) + return opt + + +def main(opt): + check_requirements(requirements=ROOT / 'requirements.txt', exclude=('tensorboard', 'thop')) + + if opt.task in ('train', 'val', 'test'): # run normally + if opt.conf_thres > 0.001: # https://github.com/ultralytics/yolov5/issues/1466 + LOGGER.info(f'WARNING: confidence threshold {opt.conf_thres} >> 0.001 will produce invalid mAP values.') + run(**vars(opt)) + + else: + weights = opt.weights if isinstance(opt.weights, list) else [opt.weights] + opt.half = True # FP16 for fastest results + if opt.task == 'speed': # speed benchmarks + # python val.py --task speed --data coco.yaml --batch 1 --weights yolov5n.pt yolov5s.pt... + opt.conf_thres, opt.iou_thres, opt.save_json = 0.25, 0.45, False + for opt.weights in weights: + run(**vars(opt), plots=False) + + elif opt.task == 'study': # speed vs mAP benchmarks + # python val.py --task study --data coco.yaml --iou 0.7 --weights yolov5n.pt yolov5s.pt... + for opt.weights in weights: + f = f'study_{Path(opt.data).stem}_{Path(opt.weights).stem}.txt' # filename to save to + x, y = list(range(256, 1536 + 128, 128)), [] # x axis (image sizes), y axis + for opt.imgsz in x: # img-size + LOGGER.info(f'\nRunning {f} --imgsz {opt.imgsz}...') + r, _, t = run(**vars(opt), plots=False) + y.append(r + t) # results and times + np.savetxt(f, y, fmt='%10.4g') # save + os.system('zip -r study.zip study_*.txt') + plot_val_study(x=x) # plot + + +if __name__ == "__main__": + opt = parse_opt() + main(opt) diff --git a/weights/download_weights.sh b/weights/download_weights.sh deleted file mode 100755 index 43c8e31d80fd..000000000000 --- a/weights/download_weights.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -# Download latest models from https://github.com/ultralytics/yolov5/releases -# Usage: -# $ bash weights/download_weights.sh - -python - <