Add files using upload-large-folder tool
Browse files- .gitattributes +1 -33
- .gitignore +1 -0
- .idea/.gitignore +8 -0
- .idea/Deep Learning Computer Vision CNN, OpenCV, YOLO, SSD & GANs.iml +8 -0
- .idea/misc.xml +7 -0
- .idea/modules.xml +8 -0
- .idea/vcs.xml +4 -0
- .idea/workspace.xml +78 -0
- 10. Data Augmentation Cats vs Dogs/1. Data Augmentation Chapter Overview.srt +51 -0
- 10. Data Augmentation Cats vs Dogs/2. Splitting Data into Test and Training Datasets.srt +623 -0
- 10. Data Augmentation Cats vs Dogs/2.1 Dataset.html +1 -0
- 10. Data Augmentation Cats vs Dogs/3. Train a Cats vs. Dogs Classifier.srt +267 -0
- 10. Data Augmentation Cats vs Dogs/4. Boosting Accuracy with Data Augmentation.srt +283 -0
- 10. Data Augmentation Cats vs Dogs/5. Types of Data Augmentation.srt +351 -0
- 10. Data Augmentation/10.1 - 10.3 - Data Augmentation - Cats vs Dogs.ipynb +802 -0
- 10. Data Augmentation/10.4 - Data Augmentation Demos.ipynb +420 -0
- 10. Data Augmentation/dog.jpeg +0 -0
- 11. Assessing Model Performance/1. Introduction to the Confusion Matrix & Viewing Misclassifications.srt +35 -0
- 11. Assessing Model Performance/2. Understanding the Confusion Matrix.srt +723 -0
- 11. Assessing Model Performance/3. Finding and Viewing Misclassified Data.srt +375 -0
- 11. Confusion Matrix and Viewing Misclassifications/11.1 - 11.2 - MNIST Confusion Matrix Analysis and Viewing Misclassifications.ipynb +484 -0
- 12. Optimizers, Adaptive Learning Rate & Callbacks/12.2 Checkpointing Models and Early Stopping.ipynb +277 -0
- 12. Optimizers, Adaptive Learning Rate & Callbacks/12.3 Building a Fruit Classifer.ipynb +0 -0
- 12. Optimizers, Learning Rates & Callbacks with Fruit Classification/1. Introduction to the types of Optimizers, Learning Rates & Callbacks.srt +35 -0
- 12. Optimizers, Learning Rates & Callbacks with Fruit Classification/2. Types Optimizers and Adaptive Learning Rate Methods.srt +439 -0
- 12. Optimizers, Learning Rates & Callbacks with Fruit Classification/3. Keras Callbacks and Checkpoint, Early Stopping and Adjust Learning Rates that Pl.srt +379 -0
- 12. Optimizers, Learning Rates & Callbacks with Fruit Classification/4. Build a Fruit Classifier.srt +527 -0
- 12. Optimizers, Learning Rates & Callbacks with Fruit Classification/4.1 Download Fruits Dataset.html +1 -0
- 13. Batch Normalization & LeNet, AlexNet Clothing Classifier/1. Intro to Building LeNet, AlexNet in Keras & Understand Batch Normalization.srt +31 -0
- 13. Batch Normalization & LeNet, AlexNet Clothing Classifier/2. Build LeNet and test on MNIST.srt +187 -0
- 13. Batch Normalization & LeNet, AlexNet Clothing Classifier/3. Build AlexNet and test on CIFAR10.srt +283 -0
- 13. Batch Normalization & LeNet, AlexNet Clothing Classifier/4. Batch Normalization.srt +155 -0
- 13. Batch Normalization & LeNet, AlexNet Clothing Classifier/5. Build a Clothing & Apparel Classifier (Fashion MNIST).srt +351 -0
- 13. Batch Normalization & LeNet, AlexNet Clothing Classifier/5.1 Fashion MNIST dataset.html +1 -0
- 13. Building LeNet and AlexNet in Keras/13.1 Built LeNet and test on MNIST.ipynb +209 -0
- 13. Building LeNet and AlexNet in Keras/13.2 Build AlexNet and test on CIFAR10.ipynb +266 -0
- 13. Building LeNet and AlexNet in Keras/13.4 Fashion MNIST.ipynb +445 -0
- 14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/1. Chapter Introduction.srt +35 -0
- 14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/2. ImageNet - Experimenting with pre-trained Models in Keras (VGG16, ResNet50, Mobi.srt +575 -0
- 14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/3. Understanding VGG16 and VGG19.srt +95 -0
- 14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/4. Understanding ResNet50.srt +83 -0
- 14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/5. Understanding InceptionV3.srt +139 -0
- 14. ImageNet and Pretrained Models VGG16_ResNet50_InceptionV3/14.1 Experimenting with pre-trained Models in Keras.ipynb +227 -0
- 15. Transfer Learning & Fine Tuning/15.2 Using MobileNet to make a Monkey Breed Classifier.ipynb +657 -0
- 15. Transfer Learning & Fine Tuning/15.3 Making a Flower Classifier with VGG16.ipynb +693 -0
- 15. Transfer Learning Build a Flower & Monkey Breed Classifier/1. Chapter Introduction.srt +31 -0
- 15. Transfer Learning Build a Flower & Monkey Breed Classifier/2. What is Transfer Learning and Fine Tuning.srt +355 -0
- Es3VFTNXMAE9uId.jpeg +0 -0
- ListCamera.py +59 -0
- OpenCV.py +31 -0
.gitattributes
CHANGED
|
@@ -1,35 +1,3 @@
|
|
| 1 |
-
*.
|
| 2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 1 |
+
*.ipynb filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
*.h5 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
*.npy filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
.idea
|
.idea/.gitignore
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Default ignored files
|
| 2 |
+
/shelf/
|
| 3 |
+
/workspace.xml
|
| 4 |
+
# Editor-based HTTP Client requests
|
| 5 |
+
/httpRequests/
|
| 6 |
+
# Datasource local storage ignored files
|
| 7 |
+
/dataSources/
|
| 8 |
+
/dataSources.local.xml
|
.idea/Deep Learning Computer Vision CNN, OpenCV, YOLO, SSD & GANs.iml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<module type="PYTHON_MODULE" version="4">
|
| 3 |
+
<component name="NewModuleRootManager">
|
| 4 |
+
<content url="file://$MODULE_DIR$" />
|
| 5 |
+
<orderEntry type="jdk" jdkName="dl" jdkType="Python SDK" />
|
| 6 |
+
<orderEntry type="sourceFolder" forTests="false" />
|
| 7 |
+
</component>
|
| 8 |
+
</module>
|
.idea/misc.xml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="Black">
|
| 4 |
+
<option name="sdkName" value="dl" />
|
| 5 |
+
</component>
|
| 6 |
+
<component name="ProjectRootManager" version="2" project-jdk-name="dl" project-jdk-type="Python SDK" />
|
| 7 |
+
</project>
|
.idea/modules.xml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="ProjectModuleManager">
|
| 4 |
+
<modules>
|
| 5 |
+
<module fileurl="file://$PROJECT_DIR$/.idea/Deep Learning Computer Vision CNN, OpenCV, YOLO, SSD & GANs.iml" filepath="$PROJECT_DIR$/.idea/Deep Learning Computer Vision CNN, OpenCV, YOLO, SSD & GANs.iml" />
|
| 6 |
+
</modules>
|
| 7 |
+
</component>
|
| 8 |
+
</project>
|
.idea/vcs.xml
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="VcsDirectoryMappings" defaultProject="true" />
|
| 4 |
+
</project>
|
.idea/workspace.xml
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
| 2 |
+
<project version="4">
|
| 3 |
+
<component name="AutoImportSettings">
|
| 4 |
+
<option name="autoReloadType" value="SELECTIVE" />
|
| 5 |
+
</component>
|
| 6 |
+
<component name="ChangeListManager">
|
| 7 |
+
<list default="true" id="b03a9f48-c886-4678-9691-37f72b1fd336" name="Changes" comment="" />
|
| 8 |
+
<option name="SHOW_DIALOG" value="false" />
|
| 9 |
+
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
| 10 |
+
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
| 11 |
+
<option name="LAST_RESOLUTION" value="IGNORE" />
|
| 12 |
+
</component>
|
| 13 |
+
<component name="ProjectColorInfo">{
|
| 14 |
+
"associatedIndex": 8
|
| 15 |
+
}</component>
|
| 16 |
+
<component name="ProjectId" id="2tUNPHljTXuBPnb9uDmb0Y2uUGr" />
|
| 17 |
+
<component name="ProjectViewState">
|
| 18 |
+
<option name="hideEmptyMiddlePackages" value="true" />
|
| 19 |
+
<option name="showLibraryContents" value="true" />
|
| 20 |
+
</component>
|
| 21 |
+
<component name="PropertiesComponent">{
|
| 22 |
+
"keyToString": {
|
| 23 |
+
"Python.ListCamera.executor": "Debug",
|
| 24 |
+
"Python.OpenCV.executor": "Run",
|
| 25 |
+
"RunOnceActivity.ShowReadmeOnStart": "true",
|
| 26 |
+
"node.js.detected.package.eslint": "true",
|
| 27 |
+
"node.js.detected.package.tslint": "true",
|
| 28 |
+
"node.js.selected.package.eslint": "(autodetect)",
|
| 29 |
+
"node.js.selected.package.tslint": "(autodetect)",
|
| 30 |
+
"nodejs_package_manager_path": "npm",
|
| 31 |
+
"settings.editor.selected.configurable": "settings.sync",
|
| 32 |
+
"vue.rearranger.settings.migration": "true"
|
| 33 |
+
}
|
| 34 |
+
}</component>
|
| 35 |
+
<component name="SharedIndexes">
|
| 36 |
+
<attachedChunks>
|
| 37 |
+
<set>
|
| 38 |
+
<option value="bundled-js-predefined-d6986cc7102b-1632447f56bf-JavaScript-PY-243.25659.43" />
|
| 39 |
+
<option value="bundled-python-sdk-181015f7ab06-4df51de95216-com.jetbrains.pycharm.pro.sharedIndexes.bundled-PY-243.25659.43" />
|
| 40 |
+
</set>
|
| 41 |
+
</attachedChunks>
|
| 42 |
+
</component>
|
| 43 |
+
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
|
| 44 |
+
<component name="TaskManager">
|
| 45 |
+
<task active="true" id="Default" summary="Default task">
|
| 46 |
+
<changelist id="b03a9f48-c886-4678-9691-37f72b1fd336" name="Changes" comment="" />
|
| 47 |
+
<created>1740402669465</created>
|
| 48 |
+
<option name="number" value="Default" />
|
| 49 |
+
<option name="presentableId" value="Default" />
|
| 50 |
+
<updated>1740402669465</updated>
|
| 51 |
+
<workItem from="1740402670794" duration="15000" />
|
| 52 |
+
<workItem from="1740489587852" duration="1191000" />
|
| 53 |
+
<workItem from="1741881545480" duration="1553000" />
|
| 54 |
+
<workItem from="1742894593591" duration="28000" />
|
| 55 |
+
<workItem from="1742913412039" duration="131000" />
|
| 56 |
+
<workItem from="1742913719977" duration="643000" />
|
| 57 |
+
</task>
|
| 58 |
+
<servers />
|
| 59 |
+
</component>
|
| 60 |
+
<component name="TypeScriptGeneratedFilesManager">
|
| 61 |
+
<option name="version" value="3" />
|
| 62 |
+
</component>
|
| 63 |
+
<component name="XDebuggerManager">
|
| 64 |
+
<breakpoint-manager>
|
| 65 |
+
<breakpoints>
|
| 66 |
+
<line-breakpoint enabled="true" suspend="THREAD" type="python-line">
|
| 67 |
+
<url>file://$PROJECT_DIR$/ListCamera.py</url>
|
| 68 |
+
<line>54</line>
|
| 69 |
+
<option name="timeStamp" value="2" />
|
| 70 |
+
</line-breakpoint>
|
| 71 |
+
</breakpoints>
|
| 72 |
+
</breakpoint-manager>
|
| 73 |
+
</component>
|
| 74 |
+
<component name="com.intellij.coverage.CoverageDataManagerImpl">
|
| 75 |
+
<SUITE FILE_PATH="coverage/Deep_Learning_Computer_Vision_CNN__OpenCV__YOLO__SSD___GANs$OpenCV.coverage" NAME="OpenCV Coverage Results" MODIFIED="1740489854424" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
|
| 76 |
+
<SUITE FILE_PATH="coverage/Deep_Learning_Computer_Vision_CNN__OpenCV__YOLO__SSD___GANs$ListCamera.coverage" NAME="ListCamera Coverage Results" MODIFIED="1740490734219" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
|
| 77 |
+
</component>
|
| 78 |
+
</project>
|
10. Data Augmentation Cats vs Dogs/1. Data Augmentation Chapter Overview.srt
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,530 --> 00:00:06,890
|
| 3 |
+
Hi and welcome back to Chapter 10 where we introduce the concept of data augmentation and the whole
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,890 --> 00:00:10,750
|
| 7 |
+
reason for data augmentation which you will learn soon is how we can use.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:11,000 --> 00:00:15,770
|
| 11 |
+
Now use small data sets instead of super large data sets to train CNN's.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:15,840 --> 00:00:19,050
|
| 15 |
+
So let's take a look at the contents of this chapter.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:19,050 --> 00:00:23,210
|
| 19 |
+
Firstly we took a look at how we split data into tests and training data sets.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:23,210 --> 00:00:27,510
|
| 23 |
+
I'm going to give you a practical example where we start building a Katsaris dogs classify.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:27,680 --> 00:00:32,790
|
| 27 |
+
However previously we use Kurus to basically load our dataset into it.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:32,810 --> 00:00:38,810
|
| 31 |
+
Now we're actually going to start downloading a data set of images and basically we're trying to create
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:39,050 --> 00:00:43,400
|
| 35 |
+
one dataset out of it and then using it in Chris next.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:43,400 --> 00:00:47,480
|
| 39 |
+
That's actually what we're going to do right at that point in attempting to build a simple cuts for
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:47,480 --> 00:00:52,730
|
| 43 |
+
Stobbs classify and then we're going to boost the accuracy of that classifier using data augmentation
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:53,300 --> 00:00:57,410
|
| 47 |
+
and then I'm going to discuss the different types of data augmentation that you will basically be able
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:57,410 --> 00:00:59,110
|
| 51 |
+
to use inside of Chris.
|
10. Data Augmentation Cats vs Dogs/2. Splitting Data into Test and Training Datasets.srt
ADDED
|
@@ -0,0 +1,623 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,390 --> 00:00:00,820
|
| 3 |
+
OK.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:00,870 --> 00:00:06,400
|
| 7 |
+
So in this section we're actually going to learn how to actually bring a data images into all parts
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:06,400 --> 00:00:10,510
|
| 11 |
+
in the book and basically split them up into test and training data set.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:10,740 --> 00:00:15,290
|
| 15 |
+
And by doing this we're actually going to start making a simple Katsaris dogs classify.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:15,720 --> 00:00:21,960
|
| 19 |
+
So let's begin now Firstly if you're using Demy you can actually see there's a resources to title a
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:21,970 --> 00:00:28,470
|
| 23 |
+
chapter with the title theme of this file is Katsaris dogs datasets download that file in your band
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:28,470 --> 00:00:36,420
|
| 27 |
+
2 1 2 video machine and I'll show you where to place this file once we go back to 0 1 to screen and
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:36,420 --> 00:00:37,890
|
| 31 |
+
from there you'll be good to go.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:40,390 --> 00:00:40,840
|
| 35 |
+
OK.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:40,880 --> 00:00:47,660
|
| 39 |
+
So previously when we imported our data sets we use care hysterically and that was it was super easy
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:47,660 --> 00:00:48,050
|
| 43 |
+
to use.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:48,050 --> 00:00:50,840
|
| 47 |
+
We just use Kurus import Safar.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:51,170 --> 00:00:56,660
|
| 51 |
+
However now we actually want to build or basically create own data set from some images.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:56,990 --> 00:01:04,750
|
| 55 |
+
So I as you downloaded the cats dogs their assets and it was probably as if you use Mozilla a lot into
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:04,760 --> 00:01:05,370
|
| 59 |
+
your Demmy.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:05,420 --> 00:01:08,550
|
| 63 |
+
It's going to be in your Downloads folder here.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:08,710 --> 00:01:09,420
|
| 67 |
+
All right.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:09,710 --> 00:01:15,880
|
| 71 |
+
So this is a file I want you to extracts a double click on file and extract it to go to.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:15,920 --> 00:01:22,830
|
| 75 |
+
Whom are deepening CV which is our home directory and go to this put the extracted file into a deep
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:22,830 --> 00:01:28,870
|
| 79 |
+
plannings folder and let's place this file into a data augmentation directory here.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:29,280 --> 00:01:29,730
|
| 83 |
+
All right.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:29,780 --> 00:01:31,470
|
| 87 |
+
And that's.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:31,850 --> 00:01:34,170
|
| 91 |
+
I already have this for the named data sets here.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:34,310 --> 00:01:37,750
|
| 95 |
+
However I want you to actually create a new folder here.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:38,200 --> 00:01:41,300
|
| 99 |
+
So before we do this let's go to a file manager here.
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:41,630 --> 00:01:51,760
|
| 103 |
+
Go to deep linning CV just so you know how to make folders and to and let me delete this since I already
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:51,760 --> 00:01:52,450
|
| 107 |
+
have it here.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:53,310 --> 00:01:59,140
|
| 111 |
+
I'll leave that and I'll create a new folder called datasets and this is where I want you to place that
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:59,140 --> 00:02:02,770
|
| 115 |
+
file that we extracted to go back to our archive manager here.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:03,370 --> 00:02:13,670
|
| 119 |
+
And that's reloaders put highlight data sets and press extracts and Cha-Ching to be accessed successfully.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:13,970 --> 00:02:15,440
|
| 123 |
+
Should those files SUNO.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:15,470 --> 00:02:23,230
|
| 127 |
+
You can see you have data sets you have a directory called images and you just have a few thousand images.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:23,240 --> 00:02:29,990
|
| 131 |
+
You right click and press properties as you can see it's right here it's three dozen images selected
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:29,990 --> 00:02:30,450
|
| 135 |
+
here.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:30,680 --> 00:02:31,600
|
| 139 |
+
All right.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:32,090 --> 00:02:34,490
|
| 143 |
+
So I probably shouldn't have done that right clicking thing there.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:40,550 --> 00:02:41,770
|
| 147 |
+
Anyway here we go.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:42,110 --> 00:02:43,340
|
| 151 |
+
It's back here.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:43,820 --> 00:02:45,240
|
| 155 |
+
So what we need to do now.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:45,290 --> 00:02:51,980
|
| 159 |
+
You can see how do we actually get this these files here into a training data set with extra and why
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:51,980 --> 00:02:57,730
|
| 163 |
+
Treen why labels test labels all of those things X tests y label.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:58,040 --> 00:03:00,520
|
| 167 |
+
So let's go to the book here.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:03:00,560 --> 00:03:02,380
|
| 171 |
+
This is in the book we were going to load up here.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:03:02,720 --> 00:03:05,450
|
| 175 |
+
So go back to Mazola early have it open here.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:03:05,930 --> 00:03:11,300
|
| 179 |
+
So this is the pat to the dataset we just extracted or roll data images.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:03:11,370 --> 00:03:17,760
|
| 183 |
+
Are not in any order images are labeled like this cat x x x being number dog x x x be a number as well.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:03:17,900 --> 00:03:19,700
|
| 187 |
+
They're all pigs and they're all different sizes.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:03:19,700 --> 00:03:23,290
|
| 191 |
+
By the way there are not one square uniform size.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:23,330 --> 00:03:28,680
|
| 195 |
+
So what we do is some simple code wrote here that basically this just gets all the files.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:28,670 --> 00:03:31,210
|
| 199 |
+
Another tree and stores and a file name.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:31,430 --> 00:03:33,740
|
| 203 |
+
And then we print linctus to make sure we get all of the images.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:33,740 --> 00:03:38,790
|
| 207 |
+
So that's true TELLEMENT images so no we need to extract the labels.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:38,840 --> 00:03:44,440
|
| 211 |
+
So as you can see the structure we had before was files or cat or dog.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:44,510 --> 00:03:46,830
|
| 215 |
+
If you scroll along to the bottom you'll see a dog.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:46,910 --> 00:03:52,210
|
| 219 |
+
So we need to actually now start a cat and dog names as class labels.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:52,220 --> 00:03:56,660
|
| 223 |
+
So go back to Python that book will want you all to read this carefully.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:56,660 --> 00:03:57,210
|
| 227 |
+
OK.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:57,500 --> 00:04:00,140
|
| 231 |
+
So we all go with you first.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:04:00,140 --> 00:04:04,610
|
| 235 |
+
So we need to store the labels into white tree with us the label files.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:04:04,820 --> 00:04:08,320
|
| 239 |
+
Then we need to resize images to 150 by 150.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:04:08,390 --> 00:04:12,110
|
| 243 |
+
That's the standard dimension we're going to use for this test experiment.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:04:12,200 --> 00:04:14,970
|
| 247 |
+
You can use any size you want that just the size I chose.
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:04:15,440 --> 00:04:20,720
|
| 251 |
+
And we're going to use a thousand images of dogs and a thousand images of cats as treating data and
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:04:20,720 --> 00:04:24,770
|
| 255 |
+
then our validation testing to certainly get these 500 of each class.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:04:24,800 --> 00:04:32,390
|
| 259 |
+
So you see this adds up to treat hosen Tozan a thousand 500 plus 500 and Dug's will be labeled 1 cat
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:32,480 --> 00:04:37,760
|
| 263 |
+
0 and then we're going to put all these files here into this directory structure.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:37,870 --> 00:04:44,870
|
| 267 |
+
So it's going to be datasets cats dogs which is the name of our dataset here and train a local train
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:45,470 --> 00:04:49,180
|
| 271 |
+
with all the dog images and one called trimmed or the cat images.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:49,220 --> 00:04:55,760
|
| 275 |
+
This will be a dog images and it's got images and then we're going to put 500 dogs and five cats in
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:04:55,760 --> 00:04:57,830
|
| 279 |
+
a folder called validation here.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:04:58,340 --> 00:04:59,700
|
| 283 |
+
So let's see how this works.
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:04:59,720 --> 00:05:07,410
|
| 287 |
+
Now since this isn't a python course I'm not going to actually explain this to you in any super detail.
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:05:07,580 --> 00:05:10,180
|
| 291 |
+
However you can intuitively see what's happening here.
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:05:10,190 --> 00:05:11,940
|
| 295 |
+
I'll try to explain it quickly.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:05:11,960 --> 00:05:16,010
|
| 299 |
+
So what we're doing does have some parameters where we define the training size to test size.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:05:16,220 --> 00:05:19,000
|
| 303 |
+
We create some empty areas we define our size.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:05:19,070 --> 00:05:20,430
|
| 307 |
+
We have some conchs.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:05:20,610 --> 00:05:27,260
|
| 311 |
+
We make some directories here so that the directory names that I mentioned above and we use this function
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:05:27,260 --> 00:05:29,140
|
| 315 |
+
to create directories here.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:05:29,720 --> 00:05:33,490
|
| 319 |
+
This thing is just to get the numbers correct correctly.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:05:33,500 --> 00:05:37,670
|
| 323 |
+
Now this part is a bit tricky it's probably not the best and most efficient code have ever written but
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:05:37,790 --> 00:05:39,250
|
| 327 |
+
just basically what it does.
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:05:39,290 --> 00:05:44,710
|
| 331 |
+
It just goes through all these images to dog images pulls out the first one house and puts them into
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:05:44,710 --> 00:05:48,590
|
| 335 |
+
training for the then looks for the ones that are a thousand to fifteen hundred.
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:05:48,590 --> 00:05:54,890
|
| 339 |
+
And the dogs puts them in the dog validation folder and rename zem and store them correctly use an open
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:05:54,890 --> 00:06:00,300
|
| 343 |
+
C-v and it does that for cats and dogs and giving them the correct label here.
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:06:00,620 --> 00:06:05,940
|
| 347 |
+
And once it's done it stops the loop and says trining and test the action complete.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:06:06,290 --> 00:06:07,600
|
| 351 |
+
So let's run this code.
|
| 352 |
+
|
| 353 |
+
89
|
| 354 |
+
00:06:07,610 --> 00:06:08,620
|
| 355 |
+
It may take a little while.
|
| 356 |
+
|
| 357 |
+
90
|
| 358 |
+
00:06:08,660 --> 00:06:10,080
|
| 359 |
+
Let me just run this line here.
|
| 360 |
+
|
| 361 |
+
91
|
| 362 |
+
00:06:10,090 --> 00:06:17,280
|
| 363 |
+
First this should take about 22 seconds to run when it's running you see an asterix is sure.
|
| 364 |
+
|
| 365 |
+
92
|
| 366 |
+
00:06:17,330 --> 00:06:19,030
|
| 367 |
+
I'm familiar with this by now.
|
| 368 |
+
|
| 369 |
+
93
|
| 370 |
+
00:06:19,670 --> 00:06:22,260
|
| 371 |
+
So just wait here for it to finish.
|
| 372 |
+
|
| 373 |
+
94
|
| 374 |
+
00:06:39,480 --> 00:06:39,940
|
| 375 |
+
There we go.
|
| 376 |
+
|
| 377 |
+
95
|
| 378 |
+
00:06:39,990 --> 00:06:41,070
|
| 379 |
+
So it's done.
|
| 380 |
+
|
| 381 |
+
96
|
| 382 |
+
00:06:41,520 --> 00:06:46,050
|
| 383 |
+
So now what we're going to do we're going to basically First let's look and see if this actually did
|
| 384 |
+
|
| 385 |
+
97
|
| 386 |
+
00:06:46,170 --> 00:06:47,950
|
| 387 |
+
what we said it would do.
|
| 388 |
+
|
| 389 |
+
98
|
| 390 |
+
00:06:48,000 --> 00:06:51,220
|
| 391 |
+
So we have our data sets here that's where we saved the file.
|
| 392 |
+
|
| 393 |
+
99
|
| 394 |
+
00:06:51,220 --> 00:06:51,610
|
| 395 |
+
Good.
|
| 396 |
+
|
| 397 |
+
100
|
| 398 |
+
00:06:51,630 --> 00:06:52,310
|
| 399 |
+
We have cats.
|
| 400 |
+
|
| 401 |
+
101
|
| 402 |
+
00:06:52,300 --> 00:06:56,240
|
| 403 |
+
First of all we have the tree invalidation for this which we wanted.
|
| 404 |
+
|
| 405 |
+
102
|
| 406 |
+
00:06:56,610 --> 00:06:59,240
|
| 407 |
+
We have cats first dogs in each directory.
|
| 408 |
+
|
| 409 |
+
103
|
| 410 |
+
00:06:59,250 --> 00:07:00,290
|
| 411 |
+
So make sure.
|
| 412 |
+
|
| 413 |
+
104
|
| 414 |
+
00:07:00,780 --> 00:07:02,220
|
| 415 |
+
Now let's see if we have images.
|
| 416 |
+
|
| 417 |
+
105
|
| 418 |
+
00:07:02,440 --> 00:07:02,750
|
| 419 |
+
OK.
|
| 420 |
+
|
| 421 |
+
106
|
| 422 |
+
00:07:02,790 --> 00:07:05,180
|
| 423 |
+
That's the important part.
|
| 424 |
+
|
| 425 |
+
107
|
| 426 |
+
00:07:05,220 --> 00:07:05,630
|
| 427 |
+
Great.
|
| 428 |
+
|
| 429 |
+
108
|
| 430 |
+
00:07:05,640 --> 00:07:07,420
|
| 431 |
+
So we have the images here.
|
| 432 |
+
|
| 433 |
+
109
|
| 434 |
+
00:07:07,530 --> 00:07:13,560
|
| 435 |
+
We're supposed to have a 2000 cat images in this directory and we should look about it and we're going
|
| 436 |
+
|
| 437 |
+
110
|
| 438 |
+
00:07:13,560 --> 00:07:15,330
|
| 439 |
+
to highlight it and do that thing again.
|
| 440 |
+
|
| 441 |
+
111
|
| 442 |
+
00:07:16,030 --> 00:07:17,450
|
| 443 |
+
Slow down mate.
|
| 444 |
+
|
| 445 |
+
112
|
| 446 |
+
00:07:17,780 --> 00:07:21,360
|
| 447 |
+
Well you can see here tells images we highlighted here.
|
| 448 |
+
|
| 449 |
+
113
|
| 450 |
+
00:07:21,690 --> 00:07:23,490
|
| 451 |
+
Dogs and images as well.
|
| 452 |
+
|
| 453 |
+
114
|
| 454 |
+
00:07:23,550 --> 00:07:24,740
|
| 455 |
+
Let's just make sure.
|
| 456 |
+
|
| 457 |
+
115
|
| 458 |
+
00:07:25,190 --> 00:07:26,000
|
| 459 |
+
Excellent.
|
| 460 |
+
|
| 461 |
+
116
|
| 462 |
+
00:07:26,180 --> 00:07:28,900
|
| 463 |
+
And now good luck to validation.
|
| 464 |
+
|
| 465 |
+
117
|
| 466 |
+
00:07:28,980 --> 00:07:32,050
|
| 467 |
+
Check those good.
|
| 468 |
+
|
| 469 |
+
118
|
| 470 |
+
00:07:32,180 --> 00:07:35,760
|
| 471 |
+
We have what looks like to be a lot less images should be 500.
|
| 472 |
+
|
| 473 |
+
119
|
| 474 |
+
00:07:35,780 --> 00:07:38,240
|
| 475 |
+
And it is 500.
|
| 476 |
+
|
| 477 |
+
120
|
| 478 |
+
00:07:38,320 --> 00:07:42,770
|
| 479 |
+
And likewise we're going to have 500 dog images here.
|
| 480 |
+
|
| 481 |
+
121
|
| 482 |
+
00:07:42,970 --> 00:07:48,570
|
| 483 |
+
This is going to be the unseen testing that we're going to use to validate or model accuracy on.
|
| 484 |
+
|
| 485 |
+
122
|
| 486 |
+
00:07:49,090 --> 00:07:51,820
|
| 487 |
+
So we've created a data set here in this structure.
|
| 488 |
+
|
| 489 |
+
123
|
| 490 |
+
00:07:51,910 --> 00:07:53,540
|
| 491 |
+
We've resized all images.
|
| 492 |
+
|
| 493 |
+
124
|
| 494 |
+
00:07:53,950 --> 00:07:56,470
|
| 495 |
+
And now what do we do.
|
| 496 |
+
|
| 497 |
+
125
|
| 498 |
+
00:07:56,920 --> 00:08:01,870
|
| 499 |
+
So basically what we can do we can actually this is something I have done a lot.
|
| 500 |
+
|
| 501 |
+
126
|
| 502 |
+
00:08:02,020 --> 00:08:03,010
|
| 503 |
+
It actually what it does.
|
| 504 |
+
|
| 505 |
+
127
|
| 506 |
+
00:08:03,010 --> 00:08:12,470
|
| 507 |
+
It uses non-pay by CVS feature you basically take the full is bigotry and images here and dump it into.
|
| 508 |
+
|
| 509 |
+
128
|
| 510 |
+
00:08:12,550 --> 00:08:14,390
|
| 511 |
+
Because remember we have the images in areas here.
|
| 512 |
+
|
| 513 |
+
129
|
| 514 |
+
00:08:14,400 --> 00:08:15,820
|
| 515 |
+
And if you knew that.
|
| 516 |
+
|
| 517 |
+
130
|
| 518 |
+
00:08:16,000 --> 00:08:17,880
|
| 519 |
+
That's what I declared is empty race here.
|
| 520 |
+
|
| 521 |
+
131
|
| 522 |
+
00:08:17,930 --> 00:08:20,560
|
| 523 |
+
Dean's List is still images here.
|
| 524 |
+
|
| 525 |
+
132
|
| 526 |
+
00:08:20,560 --> 00:08:25,720
|
| 527 |
+
So what we're doing now we're converting it to some fiery and destroying it doesn't PC file.
|
| 528 |
+
|
| 529 |
+
133
|
| 530 |
+
00:08:25,720 --> 00:08:34,600
|
| 531 |
+
So what this does and it quickly it creates these files here and these files are basically the entire
|
| 532 |
+
|
| 533 |
+
134
|
| 534 |
+
00:08:34,600 --> 00:08:36,640
|
| 535 |
+
image dataset in a PC file.
|
| 536 |
+
|
| 537 |
+
135
|
| 538 |
+
00:08:36,640 --> 00:08:40,350
|
| 539 |
+
It's more like something like a zip file but it still is an umpire.
|
| 540 |
+
|
| 541 |
+
136
|
| 542 |
+
00:08:40,840 --> 00:08:42,100
|
| 543 |
+
So let's go back to it.
|
| 544 |
+
|
| 545 |
+
137
|
| 546 |
+
00:08:42,280 --> 00:08:51,310
|
| 547 |
+
And this is a function here that everyone actually acts exactly like the Chris data action stuff.
|
| 548 |
+
|
| 549 |
+
138
|
| 550 |
+
00:08:51,310 --> 00:08:54,700
|
| 551 |
+
So when you have this stuff load sort of looks exactly the same way.
|
| 552 |
+
|
| 553 |
+
139
|
| 554 |
+
00:08:54,790 --> 00:09:00,420
|
| 555 |
+
So this basically loads a training and test data and returns it into this form.
|
| 556 |
+
|
| 557 |
+
140
|
| 558 |
+
00:09:00,430 --> 00:09:03,500
|
| 559 |
+
So this is what we have done so far.
|
| 560 |
+
|
| 561 |
+
141
|
| 562 |
+
00:09:03,730 --> 00:09:11,630
|
| 563 |
+
So let's just make sure we can actually LoDo images that we've created from the oh no areas sorry.
|
| 564 |
+
|
| 565 |
+
142
|
| 566 |
+
00:09:12,100 --> 00:09:15,690
|
| 567 |
+
So these are some random images that have been resized to 150 by 150.
|
| 568 |
+
|
| 569 |
+
143
|
| 570 |
+
00:09:15,700 --> 00:09:18,880
|
| 571 |
+
That's why some of them may look a bit skewed.
|
| 572 |
+
|
| 573 |
+
144
|
| 574 |
+
00:09:20,050 --> 00:09:20,890
|
| 575 |
+
And there we go.
|
| 576 |
+
|
| 577 |
+
145
|
| 578 |
+
00:09:21,070 --> 00:09:23,980
|
| 579 |
+
So we've viewed some of our loaded images.
|
| 580 |
+
|
| 581 |
+
146
|
| 582 |
+
00:09:23,980 --> 00:09:28,610
|
| 583 |
+
Now let's get the images back in the form expected back for us.
|
| 584 |
+
|
| 585 |
+
147
|
| 586 |
+
00:09:28,630 --> 00:09:34,600
|
| 587 |
+
So this is the function loading load data treating and tests that I've built it actually loads from
|
| 588 |
+
|
| 589 |
+
148
|
| 590 |
+
00:09:34,630 --> 00:09:40,250
|
| 591 |
+
our directory here specifies the directory name of all the assets in the file.
|
| 592 |
+
|
| 593 |
+
149
|
| 594 |
+
00:09:40,300 --> 00:09:42,530
|
| 595 |
+
Sorry non-default but just a file here.
|
| 596 |
+
|
| 597 |
+
150
|
| 598 |
+
00:09:43,090 --> 00:09:44,230
|
| 599 |
+
And what we do.
|
| 600 |
+
|
| 601 |
+
151
|
| 602 |
+
00:09:44,320 --> 00:09:45,210
|
| 603 |
+
We load it.
|
| 604 |
+
|
| 605 |
+
152
|
| 606 |
+
00:09:45,490 --> 00:09:49,700
|
| 607 |
+
It's run this function and we go.
|
| 608 |
+
|
| 609 |
+
153
|
| 610 |
+
00:09:50,140 --> 00:09:53,560
|
| 611 |
+
So now we've reshaped that data exactly like how Chris wants it.
|
| 612 |
+
|
| 613 |
+
154
|
| 614 |
+
00:09:53,650 --> 00:10:01,730
|
| 615 |
+
So we have our training data being 2000 images dimensions and the dept labels test images test labels.
|
| 616 |
+
|
| 617 |
+
155
|
| 618 |
+
00:10:01,900 --> 00:10:02,950
|
| 619 |
+
Perfect.
|
| 620 |
+
|
| 621 |
+
156
|
| 622 |
+
00:10:02,950 --> 00:10:10,560
|
| 623 |
+
So we're now ready to treat a simple CNN to do some classification to learn how to classify cats dogs.
|
10. Data Augmentation Cats vs Dogs/2.1 Dataset.html
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
<script type="text/javascript">window.location = "https://drive.google.com/file/d/15duuBYgSiWmy8Ivr1b_ngiydGCNS10t7/view?usp=sharing";</script>
|
10. Data Augmentation Cats vs Dogs/3. Train a Cats vs. Dogs Classifier.srt
ADDED
|
@@ -0,0 +1,267 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,060 --> 00:00:05,880
|
| 3 |
+
Pay so another We've loaded our data and brought it into the exact form that Chris needs to train on.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,150 --> 00:00:10,240
|
| 7 |
+
We can now start building with dogs or cats dogs classify.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:10,350 --> 00:00:13,300
|
| 11 |
+
So let's begin OK.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:13,410 --> 00:00:19,470
|
| 15 |
+
Just some quick notes in the two previous CNN's we made we use relatively small images.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:19,500 --> 00:00:21,240
|
| 19 |
+
I can say tiny.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:21,390 --> 00:00:24,380
|
| 23 |
+
And this was 20 by 28 and Safar was 2 two by 22.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:24,390 --> 00:00:26,220
|
| 27 |
+
Now we're using 150 by 150.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:26,220 --> 00:00:31,560
|
| 31 |
+
So maybe you don't want to actually use that always when you're treating a large image of the sets because
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:31,560 --> 00:00:33,600
|
| 35 |
+
it's going to be quite slow.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:33,720 --> 00:00:35,890
|
| 39 |
+
This one you will find out how fast it turns.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:35,910 --> 00:00:36,510
|
| 43 |
+
And we'll see.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:36,540 --> 00:00:38,330
|
| 47 |
+
Because now we have a lot less data.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:38,340 --> 00:00:43,000
|
| 51 |
+
We have to tell images when previously we had 60000 50000.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:43,020 --> 00:00:44,320
|
| 55 |
+
So let's see what happens.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:00:46,800 --> 00:00:47,130
|
| 59 |
+
OK.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:00:47,180 --> 00:00:52,360
|
| 63 |
+
So we're back to what I piled on the book and we're going to use a simple CNN similar to the one we
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:00:52,360 --> 00:00:54,690
|
| 67 |
+
use in the last Safar classify.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:00:55,240 --> 00:00:58,650
|
| 71 |
+
Basically it is only going to be two major differences though.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:00:58,830 --> 00:01:03,540
|
| 75 |
+
Well actually tree fiercly is going to be a different shape and also different but size and ebox I'm
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:03,610 --> 00:01:08,230
|
| 79 |
+
going to really count that as a major difference because it was a simple prompters we can always tweak
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:08,300 --> 00:01:09,820
|
| 83 |
+
every model.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:09,820 --> 00:01:15,940
|
| 87 |
+
Secondly though the second major change is that we're no longer going to use a soft Max Feitel activity.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:16,290 --> 00:01:17,290
|
| 91 |
+
We're going to use a sigmoid.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:17,320 --> 00:01:24,610
|
| 95 |
+
And that's because it's a binary classify binary meaning only two classes yes snoozy or one type classifier.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:24,910 --> 00:01:29,830
|
| 99 |
+
And we're also going to use a different type of loss function called the binary cross entropy as opposed
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:29,830 --> 00:01:36,940
|
| 103 |
+
to what we used before was a category of course and toffee and Trabi and that one is used for multiclass
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:37,270 --> 00:01:43,800
|
| 107 |
+
classification problems whereas binary is obviously useful to class or binary class classification problems.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:44,170 --> 00:01:46,980
|
| 111 |
+
So let's go ahead and create this model.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:46,990 --> 00:01:49,420
|
| 115 |
+
I mean the code is already run but it can run again.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:01:49,450 --> 00:01:50,320
|
| 119 |
+
There we go.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:01:50,830 --> 00:01:57,010
|
| 123 |
+
So it's a relatively simple model as you all just about 1.2 million promises and now we're about to
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:01:57,010 --> 00:01:57,800
|
| 127 |
+
treat our model.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:01:57,820 --> 00:01:58,390
|
| 131 |
+
OK.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:01:58,630 --> 00:02:04,960
|
| 135 |
+
So basically I have a model let's That is a good habit to all of us see the model here and I'm not going
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:04,960 --> 00:02:08,720
|
| 139 |
+
to run this while you watch because that's going to be tedious to watch.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:08,800 --> 00:02:12,940
|
| 143 |
+
I've run this before I opened this book and I have the results for you here.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:13,060 --> 00:02:18,850
|
| 147 |
+
So basically only would using a thousand images we're able to get some decent accuracy out of this.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:18,850 --> 00:02:20,530
|
| 151 |
+
Let's see how good it performed.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:20,770 --> 00:02:24,650
|
| 155 |
+
So we ran this a 25 bucks because it just takes about a minute to run in the.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:25,180 --> 00:02:31,870
|
| 159 |
+
And basically the best accuracy we got as you can see something important to note our final accuracy
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:31,960 --> 00:02:36,260
|
| 163 |
+
and loss was 7 to 1 percent points of one here.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:36,580 --> 00:02:41,830
|
| 167 |
+
But if you take a look 25 bucks you can actually see sometimes the accuracy was higher.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:42,130 --> 00:02:47,350
|
| 171 |
+
It actually peaked around here to 3.6 percent.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:47,380 --> 00:02:55,210
|
| 175 |
+
So technically we had actually sent 3.7 percent actually seventy four point six percent here 27 percent
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:02:55,260 --> 00:02:55,630
|
| 179 |
+
here.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:02:55,680 --> 00:02:57,160
|
| 183 |
+
After a few bucks.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:02:57,190 --> 00:03:03,460
|
| 187 |
+
So what that means is that basically it's not converging it's improving results basically which is feeding
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:03:03,460 --> 00:03:06,880
|
| 191 |
+
back into this stuff and it wasn't getting much better.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:07,510 --> 00:03:10,390
|
| 195 |
+
So as our peak accuracy we got there.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:10,390 --> 00:03:11,810
|
| 199 |
+
So we see if our model.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:11,860 --> 00:03:13,480
|
| 203 |
+
So we can actually review it our model.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:13,480 --> 00:03:15,280
|
| 207 |
+
Now this is a model I just trained.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:15,310 --> 00:03:17,290
|
| 211 |
+
Hopefully no it's not.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:17,320 --> 00:03:19,500
|
| 215 |
+
Let's actually get this model here.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:21,440 --> 00:03:23,040
|
| 219 |
+
And let's see how it performs.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:23,350 --> 00:03:23,940
|
| 223 |
+
Yep.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:24,130 --> 00:03:24,700
|
| 227 |
+
Got it right.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:24,700 --> 00:03:34,350
|
| 231 |
+
It's a dog that's actually a dog not a cat cat cat cat dog Dog Dog dog dog is actually pretty good.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:34,400 --> 00:03:36,970
|
| 235 |
+
But like eight or nine on a tank correct.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:03:37,300 --> 00:03:40,910
|
| 239 |
+
So I was also pretty good but could they be better.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:03:40,960 --> 00:03:42,350
|
| 243 |
+
And yes they can be better.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:03:42,400 --> 00:03:47,620
|
| 247 |
+
And now we're going to introduce the concept of data augmentation because this solves the problem of
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:03:47,650 --> 00:03:52,080
|
| 251 |
+
using less images because remember depleting.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:03:52,270 --> 00:03:54,610
|
| 255 |
+
Basically everyone says the building needs Tulsans images.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:03:54,610 --> 00:03:57,130
|
| 259 |
+
I'm going to show you how you don't need dozens of images.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:03:57,130 --> 00:04:02,620
|
| 263 |
+
We just simply need a thousand images at least for each of us and we can create a pretty good classify
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:02,710 --> 00:04:03,410
|
| 267 |
+
out of it.
|
10. Data Augmentation Cats vs Dogs/4. Boosting Accuracy with Data Augmentation.srt
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,740 --> 00:00:05,760
|
| 3 |
+
Hi welcome back to chapter ten point three where we're actually going to learn how to boost accuracy
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:05,760 --> 00:00:07,260
|
| 7 |
+
with data augmentation.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:07,260 --> 00:00:12,810
|
| 11 |
+
So let's first talk a bit about data and mentation in the previous two CNN's where we treated with simple
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:12,810 --> 00:00:18,210
|
| 15 |
+
classifiers we had Towson's and dozens of images because I agree how even deep listening to examples
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:18,210 --> 00:00:20,460
|
| 19 |
+
we have usually to that's a model.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:20,490 --> 00:00:24,790
|
| 23 |
+
But what if we only had a token example superclass Let's see what happens here.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:25,200 --> 00:00:28,590
|
| 27 |
+
So this is what the augmentation actually is.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:28,590 --> 00:00:34,470
|
| 31 |
+
If we had a thousand images but we didn't have much much variation in the image we can use Terriss data
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:34,470 --> 00:00:38,350
|
| 35 |
+
manipulation to basically generate all different types of versions of this image.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:38,390 --> 00:00:44,010
|
| 39 |
+
Here you can see these sharings skewing rotations zooming and shifting left and right and you can see
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:44,400 --> 00:00:47,830
|
| 43 |
+
it doesn't leave a blank space in these images where it's got empty.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:47,910 --> 00:00:52,590
|
| 47 |
+
It actually does something which kind of fills in the last color here and we'll talk about that soon.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:52,590 --> 00:00:58,050
|
| 51 |
+
So this is how curious actually creates data manipulation so we have one simple image and now we get
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:58,110 --> 00:01:00,090
|
| 55 |
+
to these six images out of it.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:00,090 --> 00:01:07,710
|
| 59 |
+
So it helps basically not Hovell classify overfit to the original data set especially when we have less
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:07,710 --> 00:01:14,720
|
| 63 |
+
data overfitting becomes quite common because there's less variety in the data set so the benefits of
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:14,720 --> 00:01:18,950
|
| 67 |
+
data augmentation is that it takes a small data set and makes it much larger.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:18,950 --> 00:01:25,010
|
| 71 |
+
It's very easy to use as much less effort in creating a data set to help tell you why after it adds
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:25,010 --> 00:01:30,800
|
| 75 |
+
and tons of variations and images that we just saw and it reduces overfitting by increasing the variety
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:31,220 --> 00:01:33,430
|
| 79 |
+
in our treating dataset.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:33,440 --> 00:01:37,480
|
| 83 |
+
So this is how we use Chris's data augmentation API.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:37,510 --> 00:01:39,700
|
| 87 |
+
It's just in time augmented data sets.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:39,700 --> 00:01:46,270
|
| 91 |
+
What it means is that it doesn't actually take 8000 images and make two to six thousand images whole
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:46,360 --> 00:01:49,130
|
| 95 |
+
of how much different augmentation patients we want.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:49,310 --> 00:01:55,430
|
| 99 |
+
What it does is that during each batch process it doesn't manipulation in real time so that even though
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:55,430 --> 00:01:56,030
|
| 103 |
+
it's finished.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:56,060 --> 00:02:01,610
|
| 107 |
+
IPAC the first block basically will never be the same as last book even though it's the same dataset
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:02:02,060 --> 00:02:09,080
|
| 111 |
+
because each image is probably randomly is and probably it is randomly manipulated to generate manipulations.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:02:09,080 --> 00:02:12,020
|
| 115 |
+
We just saw here.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:12,220 --> 00:02:14,190
|
| 119 |
+
So let's see how he used us.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:14,200 --> 00:02:19,540
|
| 123 |
+
So basically it's quite simple You'll see it in the code but this is the essential part of it here.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:19,600 --> 00:02:26,880
|
| 127 |
+
So we create image generator here and we specify the types of augmentations we want with rescaling is
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:26,920 --> 00:02:27,920
|
| 131 |
+
basically normalizing.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:27,940 --> 00:02:34,660
|
| 135 |
+
We did before we bring it between 0 and 1 Cherian sharing is basically sort of like twisting the image
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:34,660 --> 00:02:38,680
|
| 139 |
+
slightly or showing an example zoom zoom range zooming in and out.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:38,670 --> 00:02:44,400
|
| 143 |
+
So you have some data that's lost because it's zoomed in and cropped and we have horizontal flips.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:44,440 --> 00:02:50,590
|
| 147 |
+
And for the test set we just simply rescale here because we don't want to manipulate it testator's.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:50,650 --> 00:02:53,150
|
| 151 |
+
We wanted to see the set to remain as is.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:53,290 --> 00:02:58,830
|
| 155 |
+
We just want to add in many positions to treating dataset.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:59,080 --> 00:03:00,520
|
| 159 |
+
So this is the second part here.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:03:00,550 --> 00:03:01,980
|
| 163 |
+
This is the actual generator.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:03:01,970 --> 00:03:07,350
|
| 167 |
+
Now this is where we actually pull the images and it generates returns images during training.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:03:07,380 --> 00:03:13,510
|
| 171 |
+
This was just specifying how we wanted our manipulations to be so we tick what we defined here which
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:03:13,510 --> 00:03:21,030
|
| 175 |
+
is a trend that Jen and I were tested again and we basically tell it to take images from what our victory.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:03:21,080 --> 00:03:22,430
|
| 179 |
+
This is one way you can do it.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:03:22,440 --> 00:03:28,520
|
| 183 |
+
But I actually find the best way and we specify to follow this can be the part to images here.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:03:28,540 --> 00:03:35,530
|
| 187 |
+
So in our case it was datasets slash Katsaris dogs slash train and it automatically picks to classes
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:03:35,530 --> 00:03:36,120
|
| 191 |
+
there.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:36,250 --> 00:03:40,340
|
| 195 |
+
We know it's a binary class because it's two classes but we still have to specify it here.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:40,810 --> 00:03:46,350
|
| 199 |
+
And we basically tell that the target size which is 150 by 150 which we defined with outside of this
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:46,410 --> 00:03:48,520
|
| 203 |
+
year and about size.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:48,640 --> 00:03:51,730
|
| 207 |
+
And we do the same for a tested again here as well.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:52,300 --> 00:03:59,350
|
| 211 |
+
So basically read what would flow from directory function does it takes the image data creates an iterator
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:59,800 --> 00:04:05,230
|
| 215 |
+
which is a memory efficient method of returning a sequence of data is also a fluke when we actually
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:04:05,230 --> 00:04:09,920
|
| 219 |
+
have two files differently so.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:04:10,010 --> 00:04:18,010
|
| 223 |
+
Lastly this is how we actually fit the model now instead of actually using test data and training data
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:04:18,010 --> 00:04:19,320
|
| 227 |
+
and all those things.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:04:19,450 --> 00:04:20,300
|
| 231 |
+
It's a bit different now.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:04:20,320 --> 00:04:26,640
|
| 235 |
+
When we doing model fit generator we specify a tree and it generates a Treen generator.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:04:26,650 --> 00:04:29,730
|
| 239 |
+
I should say this is where we get our images from here.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:04:35,600 --> 00:04:36,600
|
| 243 |
+
Actually this is named drawn.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:04:36,600 --> 00:04:41,230
|
| 247 |
+
This should actually be named tree and it's the generator not tested generator.
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:04:41,630 --> 00:04:44,110
|
| 251 |
+
That's my back.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:04:44,110 --> 00:04:46,800
|
| 255 |
+
This here we find steps back.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:04:46,930 --> 00:04:48,820
|
| 259 |
+
Now this is this is different.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:48,820 --> 00:04:54,750
|
| 263 |
+
What do you think this is a number of examples here which would be a thousand divided by about size.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:54,970 --> 00:04:55,260
|
| 267 |
+
OK.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:55,290 --> 00:04:59,650
|
| 271 |
+
That's those steps but IPAC parameter and the same for the validation steps here.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:59,950 --> 00:05:07,090
|
| 275 |
+
Don't over think this is just a particular way our generators actually work and that's it we can start
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:05:07,090 --> 00:05:09,430
|
| 279 |
+
training our data sets.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:05:09,640 --> 00:05:12,820
|
| 283 |
+
What did they do with it data augmentation.
|
10. Data Augmentation Cats vs Dogs/5. Types of Data Augmentation.srt
ADDED
|
@@ -0,0 +1,351 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,690 --> 00:00:02,640
|
| 3 |
+
I woke up to ten point four.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:02,690 --> 00:00:07,530
|
| 7 |
+
It's a brief chapter where I talk about the different types of data augmentation that curus offers.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:07,920 --> 00:00:09,580
|
| 11 |
+
So let's quickly look at it here.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:09,620 --> 00:00:14,430
|
| 15 |
+
So if you go to this link here you can actually see a number of types of augmentation abilities that
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:14,430 --> 00:00:18,680
|
| 19 |
+
Chris has in practice the ones I found most useful are just here.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:19,020 --> 00:00:20,970
|
| 23 |
+
After we go to that link and check out all.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:21,030 --> 00:00:26,580
|
| 27 |
+
But for now I wanted to explain what sharing was because a lot of people had to ask me what sharing
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:26,580 --> 00:00:27,140
|
| 31 |
+
is.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:27,360 --> 00:00:32,280
|
| 35 |
+
Basically it's a type of distortion where we sort of like manipulate the image perspective we're looking
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:32,280 --> 00:00:38,490
|
| 39 |
+
at it to say imagine this is a square that was here and after some x sharing it actually it looks slanted
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:38,490 --> 00:00:43,810
|
| 43 |
+
like this that's what sharing does and filled wouldn't areas which I explained in the last slide.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:44,010 --> 00:00:46,640
|
| 47 |
+
Basically we can have constant Nereus reflect or up.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:46,650 --> 00:00:48,970
|
| 51 |
+
That's basically how it looks differently.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:48,990 --> 00:00:50,460
|
| 55 |
+
It is always good to use Nereus.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:00:50,470 --> 00:00:55,750
|
| 59 |
+
However you can definitely experiment with these again and see depending on your dataset.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:00:55,770 --> 00:00:59,210
|
| 63 |
+
If it's a real world images I would probably always use Nereus.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:00:59,400 --> 00:01:04,020
|
| 67 |
+
However if it's certain things like numbers or whatever you may want to try something different like
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:04,170 --> 00:01:07,290
|
| 71 |
+
constant Masch be better for digits and characters.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:07,400 --> 00:01:12,640
|
| 75 |
+
All right and what's good to know is that Nereus doesn't really add new features.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:12,780 --> 00:01:15,280
|
| 79 |
+
It isn't going to confuse or classify too much.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:15,630 --> 00:01:20,820
|
| 83 |
+
OK so what I'm going to do quickly now is go back to Python in the book and I'm gonna show you how to
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:20,820 --> 00:01:24,680
|
| 87 |
+
create many different manipulations randomly of an image just for fun.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:24,690 --> 00:01:28,830
|
| 91 |
+
You know actually going to use this much unless you want to actually see and visualize how your manipulations
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:28,860 --> 00:01:30,420
|
| 95 |
+
are taking place.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:30,430 --> 00:01:31,620
|
| 99 |
+
And before we even do that.
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:31,800 --> 00:01:35,360
|
| 103 |
+
Let's actually go back to Chris's site and we can take a look.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:35,390 --> 00:01:39,660
|
| 107 |
+
It's really up here all of the different augmentation techniques.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:39,660 --> 00:01:45,840
|
| 111 |
+
You have different types of pre-processing normalization you have different types of thick ZC and PC-DOS
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:45,840 --> 00:01:48,110
|
| 115 |
+
type of things that do like feature extraction.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:01:48,360 --> 00:01:52,860
|
| 119 |
+
And you see a whitening is quite commonly used or at least used to be quite commonly used in computer
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:01:52,860 --> 00:01:53,600
|
| 123 |
+
vision.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:01:53,730 --> 00:01:55,770
|
| 127 |
+
I don't think with deplaning its use that much anymore.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:01:55,880 --> 00:01:58,770
|
| 131 |
+
But Crystal office at which is nice.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:01:58,980 --> 00:02:05,100
|
| 135 |
+
We have brightness we have zooming channel shift thing different film modes I mentioned vertical flips
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:05,100 --> 00:02:07,930
|
| 139 |
+
rescaling which is what we did before.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:08,430 --> 00:02:12,720
|
| 143 |
+
So it's actually quite useful and you can actually see some example code here and how to actually use
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:13,170 --> 00:02:14,180
|
| 147 |
+
some of these things.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:14,330 --> 00:02:18,100
|
| 151 |
+
OK so now let's go back to I put it in the book.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:18,100 --> 00:02:18,810
|
| 155 |
+
All right.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:18,950 --> 00:02:23,910
|
| 159 |
+
And we're actually going to see this is where is it here.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:24,590 --> 00:02:31,140
|
| 163 |
+
Let's open it here and let's actually go to augmentation here.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:31,360 --> 00:02:33,320
|
| 167 |
+
And let's open up the Stehman slide here.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:33,330 --> 00:02:38,030
|
| 171 |
+
Stomaching notebook.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:38,330 --> 00:02:41,800
|
| 175 |
+
So what the stuff is here just basically shows you it's load status at
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:02:46,050 --> 00:02:50,920
|
| 179 |
+
that happens the first time you run something with my lips sometimes doesn't actually show the graph.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:02:50,940 --> 00:02:54,220
|
| 183 |
+
All right so now we see some random digits here.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:02:54,220 --> 00:02:57,490
|
| 187 |
+
This is with no manipulations right now untouched data set.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:02:57,600 --> 00:02:59,980
|
| 191 |
+
Now how about we do some random rotations.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:00,130 --> 00:03:01,020
|
| 195 |
+
OK.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:01,150 --> 00:03:06,460
|
| 199 |
+
I'm going to actually I can run it doesn't take that long to run.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:06,640 --> 00:03:08,080
|
| 203 |
+
So here we see some random mutations.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:08,080 --> 00:03:14,170
|
| 207 |
+
You can see some of these here in the middle actually don't look with it at all the two and seven definitely
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:14,170 --> 00:03:15,280
|
| 211 |
+
are rotated.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:15,280 --> 00:03:19,750
|
| 215 |
+
What about random sharing and zooming zooming definitely you can tell this is zoomed.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:19,750 --> 00:03:20,860
|
| 219 |
+
This is zoomed.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:20,860 --> 00:03:26,020
|
| 223 |
+
This looks slightly zoomed from that mistaken sharing isn't as obvious to see but definitely I think
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:26,380 --> 00:03:30,040
|
| 227 |
+
for is shared slightly was 9 she had slightly for that shit.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:30,190 --> 00:03:31,620
|
| 231 |
+
Definitely two.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:31,720 --> 00:03:36,830
|
| 235 |
+
But you can see how sharing can take one person's handwritten digit and turn it into maybe someone else's
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:03:37,000 --> 00:03:37,650
|
| 239 |
+
one.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:03:37,870 --> 00:03:41,260
|
| 243 |
+
So you're creating more data from existing data.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:03:41,290 --> 00:03:43,840
|
| 247 |
+
What about horizontal and vertical flips.
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:03:43,870 --> 00:03:46,670
|
| 251 |
+
Got no idea what this is actually should not do.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:03:46,840 --> 00:03:52,450
|
| 255 |
+
Horizontal or vertical flips on digits digits are generally going to always be Standley written on or
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:03:52,450 --> 00:03:54,390
|
| 259 |
+
someone is doing something very weird.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:03:54,640 --> 00:03:56,230
|
| 263 |
+
Not sure why it would do that.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:03:56,560 --> 00:03:58,160
|
| 267 |
+
So that's fine.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:03:58,480 --> 00:04:00,470
|
| 271 |
+
We can do some random shifts as well.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:00,470 --> 00:04:05,350
|
| 275 |
+
I can definitely see this one and it's five actually all of them have been shifted because it's usually
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:04:05,350 --> 00:04:07,060
|
| 279 |
+
centered in the middle.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:04:07,090 --> 00:04:08,540
|
| 283 |
+
None of them seem centered right now.
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:04:09,420 --> 00:04:14,830
|
| 287 |
+
And you can apply all at once to get some very random distortions here which is pretty cool.
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:04:15,240 --> 00:04:20,670
|
| 291 |
+
You can take a look at my code to see how system and now this is what was once the issue you remember
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:04:20,670 --> 00:04:22,940
|
| 295 |
+
the dog image I mentioned here.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:04:23,310 --> 00:04:24,580
|
| 299 |
+
Let's bring it up.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:04:25,050 --> 00:04:31,200
|
| 303 |
+
Let's see how if we run this combination of images here and we do this to you six times.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:04:31,500 --> 00:04:32,390
|
| 307 |
+
What's going to happen.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:04:32,430 --> 00:04:34,060
|
| 311 |
+
So let's run this.
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:04:34,500 --> 00:04:38,330
|
| 315 |
+
And it saves in a folder called up a territory.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:04:38,550 --> 00:04:40,410
|
| 319 |
+
So let's go to the opposite actually here.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:04:42,110 --> 00:04:42,800
|
| 323 |
+
Nice.
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:04:42,810 --> 00:04:45,980
|
| 327 |
+
So now we see we have to six images actually looks like more.
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:04:45,980 --> 00:04:52,410
|
| 331 |
+
Maybe it did not right over my previous images but now we can see exactly how image manipulation is
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:04:52,820 --> 00:04:54,710
|
| 335 |
+
what they did to this poor dog.
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:04:54,890 --> 00:04:57,490
|
| 339 |
+
He is flying all over the place which is quite cool.
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:04:57,890 --> 00:05:03,860
|
| 343 |
+
So you can see how we have one image it has become many and we can actually play with these parameters
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:05:03,860 --> 00:05:07,700
|
| 347 |
+
here and keep generating more and more types of combinations.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:05:07,700 --> 00:05:13,040
|
| 351 |
+
So basically this is a nice way to visualize exactly what our image did data generators doing.
|
10. Data Augmentation/10.1 - 10.3 - Data Augmentation - Cats vs Dogs.ipynb
ADDED
|
@@ -0,0 +1,802 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"# Cats vs Dogs\n",
|
| 8 |
+
"\n",
|
| 9 |
+
"### Loading our images \n",
|
| 10 |
+
"- Images are labeled catxxx.jpg and dogxxx.jpg"
|
| 11 |
+
]
|
| 12 |
+
},
|
| 13 |
+
{
|
| 14 |
+
"cell_type": "code",
|
| 15 |
+
"execution_count": 1,
|
| 16 |
+
"metadata": {},
|
| 17 |
+
"outputs": [
|
| 18 |
+
{
|
| 19 |
+
"name": "stdout",
|
| 20 |
+
"output_type": "stream",
|
| 21 |
+
"text": [
|
| 22 |
+
"3002 images loaded\n"
|
| 23 |
+
]
|
| 24 |
+
}
|
| 25 |
+
],
|
| 26 |
+
"source": [
|
| 27 |
+
"# Get filenames in list\n",
|
| 28 |
+
"from os import listdir\n",
|
| 29 |
+
"from os.path import isfile, join\n",
|
| 30 |
+
"\n",
|
| 31 |
+
"mypath = \"./datasets/catsvsdogs/images/\"\n",
|
| 32 |
+
"\n",
|
| 33 |
+
"file_names = [f for f in listdir(mypath) if isfile(join(mypath, f))]\n",
|
| 34 |
+
"\n",
|
| 35 |
+
"print(str(len(file_names)) + ' images loaded')"
|
| 36 |
+
]
|
| 37 |
+
},
|
| 38 |
+
{
|
| 39 |
+
"cell_type": "markdown",
|
| 40 |
+
"metadata": {},
|
| 41 |
+
"source": [
|
| 42 |
+
"### Splitting our loaded images into a training and test/validation dataset\n",
|
| 43 |
+
"- We also need to store their labels (i.e. y_train and y_test)\n",
|
| 44 |
+
"- We re-size our images here to maintain a constant dimension of 150 x 150\n",
|
| 45 |
+
"- We're going to use 1000 images of dogs and 1000 images of cats as our training data\n",
|
| 46 |
+
"- For our test/validation dataset we're going to use 500 of each class\n",
|
| 47 |
+
"- Dogs will be labels 1 and cats 0\n",
|
| 48 |
+
"- We store our new images in the following directories\n",
|
| 49 |
+
" - /datasets/catsvsdogs/train/dogs\n",
|
| 50 |
+
" - /datasets/catsvsdogs/train/cats\n",
|
| 51 |
+
" - /datasets/catsvsdogs/validation/dogs\n",
|
| 52 |
+
" - /datasets/catsvsdogs/validation/cats"
|
| 53 |
+
]
|
| 54 |
+
},
|
| 55 |
+
{
|
| 56 |
+
"cell_type": "code",
|
| 57 |
+
"execution_count": 2,
|
| 58 |
+
"metadata": {},
|
| 59 |
+
"outputs": [
|
| 60 |
+
{
|
| 61 |
+
"name": "stdout",
|
| 62 |
+
"output_type": "stream",
|
| 63 |
+
"text": [
|
| 64 |
+
"Training and Test Data Extraction Complete\n"
|
| 65 |
+
]
|
| 66 |
+
}
|
| 67 |
+
],
|
| 68 |
+
"source": [
|
| 69 |
+
"import cv2\n",
|
| 70 |
+
"import numpy as np\n",
|
| 71 |
+
"import sys\n",
|
| 72 |
+
"import os\n",
|
| 73 |
+
"import shutil\n",
|
| 74 |
+
"\n",
|
| 75 |
+
"# Extract 1000 for our training data and 500 for our validation set\n",
|
| 76 |
+
"# Takes about ~20 seconds to run\n",
|
| 77 |
+
"dog_count = 0\n",
|
| 78 |
+
"cat_count = 0\n",
|
| 79 |
+
"training_size = 1000\n",
|
| 80 |
+
"test_size = 500\n",
|
| 81 |
+
"training_images = []\n",
|
| 82 |
+
"training_labels = []\n",
|
| 83 |
+
"test_images = []\n",
|
| 84 |
+
"test_labels = []\n",
|
| 85 |
+
"size = 150\n",
|
| 86 |
+
"dog_dir_train = \"./datasets/catsvsdogs/train/dogs/\"\n",
|
| 87 |
+
"cat_dir_train = \"./datasets/catsvsdogs/train/cats/\"\n",
|
| 88 |
+
"dog_dir_val = \"./datasets/catsvsdogs/validation/dogs/\"\n",
|
| 89 |
+
"cat_dir_val = \"./datasets/catsvsdogs/validation/cats/\"\n",
|
| 90 |
+
"\n",
|
| 91 |
+
"def make_dir(directory):\n",
|
| 92 |
+
" if os.path.exists(directory):\n",
|
| 93 |
+
" shutil.rmtree(directory)\n",
|
| 94 |
+
" os.makedirs(directory)\n",
|
| 95 |
+
"\n",
|
| 96 |
+
"make_dir(dog_dir_train)\n",
|
| 97 |
+
"make_dir(cat_dir_train)\n",
|
| 98 |
+
"make_dir(dog_dir_val)\n",
|
| 99 |
+
"make_dir(cat_dir_val)\n",
|
| 100 |
+
"\n",
|
| 101 |
+
"def getZeros(number):\n",
|
| 102 |
+
" if(number > 10 and number < 100):\n",
|
| 103 |
+
" return \"0\"\n",
|
| 104 |
+
" if(number < 10):\n",
|
| 105 |
+
" return \"00\"\n",
|
| 106 |
+
" else:\n",
|
| 107 |
+
" return \"\"\n",
|
| 108 |
+
"\n",
|
| 109 |
+
"for i, file in enumerate(file_names):\n",
|
| 110 |
+
" \n",
|
| 111 |
+
" if file_names[i][0] == \"d\":\n",
|
| 112 |
+
" dog_count += 1\n",
|
| 113 |
+
" image = cv2.imread(mypath+file)\n",
|
| 114 |
+
" image = cv2.resize(image, (size, size), interpolation = cv2.INTER_AREA)\n",
|
| 115 |
+
" if dog_count <= training_size:\n",
|
| 116 |
+
" training_images.append(image)\n",
|
| 117 |
+
" training_labels.append(1)\n",
|
| 118 |
+
" zeros = getZeros(dog_count)\n",
|
| 119 |
+
" cv2.imwrite(dog_dir_train + \"dog\" + str(zeros) + str(dog_count) + \".jpg\", image)\n",
|
| 120 |
+
" if dog_count > training_size and dog_count <= training_size+test_size:\n",
|
| 121 |
+
" test_images.append(image)\n",
|
| 122 |
+
" test_labels.append(1)\n",
|
| 123 |
+
" zeros = getZeros(dog_count-1000)\n",
|
| 124 |
+
" cv2.imwrite(dog_dir_val + \"dog\" + str(zeros) + str(dog_count-1000) + \".jpg\", image)\n",
|
| 125 |
+
" \n",
|
| 126 |
+
" if file_names[i][0] == \"c\":\n",
|
| 127 |
+
" cat_count += 1\n",
|
| 128 |
+
" image = cv2.imread(mypath+file)\n",
|
| 129 |
+
" image = cv2.resize(image, (size, size), interpolation = cv2.INTER_AREA)\n",
|
| 130 |
+
" if cat_count <= training_size:\n",
|
| 131 |
+
" training_images.append(image)\n",
|
| 132 |
+
" training_labels.append(0)\n",
|
| 133 |
+
" zeros = getZeros(cat_count)\n",
|
| 134 |
+
" cv2.imwrite(cat_dir_train + \"cat\" + str(zeros) + str(cat_count) + \".jpg\", image)\n",
|
| 135 |
+
" if cat_count > training_size and cat_count <= training_size+test_size:\n",
|
| 136 |
+
" test_images.append(image)\n",
|
| 137 |
+
" test_labels.append(0)\n",
|
| 138 |
+
" zeros = getZeros(cat_count-1000)\n",
|
| 139 |
+
" cv2.imwrite(cat_dir_val + \"cat\" + str(zeros) + str(cat_count-1000) + \".jpg\", image)\n",
|
| 140 |
+
"\n",
|
| 141 |
+
" if dog_count == training_size+test_size and cat_count == training_size+test_size:\n",
|
| 142 |
+
" break\n",
|
| 143 |
+
"\n",
|
| 144 |
+
"print(\"Training and Test Data Extraction Complete\")"
|
| 145 |
+
]
|
| 146 |
+
},
|
| 147 |
+
{
|
| 148 |
+
"cell_type": "markdown",
|
| 149 |
+
"metadata": {},
|
| 150 |
+
"source": [
|
| 151 |
+
"### Let's save our dataset's to NPZ files"
|
| 152 |
+
]
|
| 153 |
+
},
|
| 154 |
+
{
|
| 155 |
+
"cell_type": "code",
|
| 156 |
+
"execution_count": 4,
|
| 157 |
+
"metadata": {},
|
| 158 |
+
"outputs": [],
|
| 159 |
+
"source": [
|
| 160 |
+
"# Using numpy's savez function to store our loaded data as NPZ files\n",
|
| 161 |
+
"np.savez('cats_vs_dogs_training_data.npz', np.array(training_images))\n",
|
| 162 |
+
"np.savez('cats_vs_dogs_training_labels.npz', np.array(training_labels))\n",
|
| 163 |
+
"np.savez('cats_vs_dogs_test_data.npz', np.array(test_images))\n",
|
| 164 |
+
"np.savez('cats_vs_dogs_test_labels.npz', np.array(test_labels))"
|
| 165 |
+
]
|
| 166 |
+
},
|
| 167 |
+
{
|
| 168 |
+
"cell_type": "code",
|
| 169 |
+
"execution_count": 3,
|
| 170 |
+
"metadata": {},
|
| 171 |
+
"outputs": [],
|
| 172 |
+
"source": [
|
| 173 |
+
"# Loader Function\n",
|
| 174 |
+
"import numpy as np\n",
|
| 175 |
+
"\n",
|
| 176 |
+
"def load_data_training_and_test(datasetname):\n",
|
| 177 |
+
" \n",
|
| 178 |
+
" npzfile = np.load(datasetname + \"_training_data.npz\")\n",
|
| 179 |
+
" train = npzfile['arr_0']\n",
|
| 180 |
+
" \n",
|
| 181 |
+
" npzfile = np.load(datasetname + \"_training_labels.npz\")\n",
|
| 182 |
+
" train_labels = npzfile['arr_0']\n",
|
| 183 |
+
" \n",
|
| 184 |
+
" npzfile = np.load(datasetname + \"_test_data.npz\")\n",
|
| 185 |
+
" test = npzfile['arr_0']\n",
|
| 186 |
+
" \n",
|
| 187 |
+
" npzfile = np.load(datasetname + \"_test_labels.npz\")\n",
|
| 188 |
+
" test_labels = npzfile['arr_0']\n",
|
| 189 |
+
"\n",
|
| 190 |
+
" return (train, train_labels), (test, test_labels)"
|
| 191 |
+
]
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"cell_type": "markdown",
|
| 195 |
+
"metadata": {},
|
| 196 |
+
"source": [
|
| 197 |
+
"### Let's view some of our loaded images"
|
| 198 |
+
]
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"cell_type": "code",
|
| 202 |
+
"execution_count": 5,
|
| 203 |
+
"metadata": {
|
| 204 |
+
"scrolled": true
|
| 205 |
+
},
|
| 206 |
+
"outputs": [
|
| 207 |
+
{
|
| 208 |
+
"name": "stdout",
|
| 209 |
+
"output_type": "stream",
|
| 210 |
+
"text": [
|
| 211 |
+
"1 - Cat\n",
|
| 212 |
+
"2 - Cat\n",
|
| 213 |
+
"3 - Dog\n",
|
| 214 |
+
"4 - Cat\n",
|
| 215 |
+
"5 - Cat\n",
|
| 216 |
+
"6 - Dog\n",
|
| 217 |
+
"7 - Cat\n",
|
| 218 |
+
"8 - Cat\n",
|
| 219 |
+
"9 - Dog\n",
|
| 220 |
+
"10 - Dog\n"
|
| 221 |
+
]
|
| 222 |
+
}
|
| 223 |
+
],
|
| 224 |
+
"source": [
|
| 225 |
+
"for i in range(1,11):\n",
|
| 226 |
+
" random = np.random.randint(0, len(training_images))\n",
|
| 227 |
+
" cv2.imshow(\"image_\"+str(i), training_images[random])\n",
|
| 228 |
+
" if training_labels[random] == 0:\n",
|
| 229 |
+
" print(str(i) + \" - Cat\")\n",
|
| 230 |
+
" else:\n",
|
| 231 |
+
" print(str(i)+ \" - Dog\")\n",
|
| 232 |
+
" cv2.waitKey(0)\n",
|
| 233 |
+
" \n",
|
| 234 |
+
"cv2.destroyAllWindows()"
|
| 235 |
+
]
|
| 236 |
+
},
|
| 237 |
+
{
|
| 238 |
+
"cell_type": "markdown",
|
| 239 |
+
"metadata": {},
|
| 240 |
+
"source": [
|
| 241 |
+
"### Let's get our data ready in the format expected by Keras\n",
|
| 242 |
+
"- We also stick the previous naming convention "
|
| 243 |
+
]
|
| 244 |
+
},
|
| 245 |
+
{
|
| 246 |
+
"cell_type": "code",
|
| 247 |
+
"execution_count": 6,
|
| 248 |
+
"metadata": {},
|
| 249 |
+
"outputs": [
|
| 250 |
+
{
|
| 251 |
+
"name": "stdout",
|
| 252 |
+
"output_type": "stream",
|
| 253 |
+
"text": [
|
| 254 |
+
"(2000, 150, 150, 3)\n",
|
| 255 |
+
"(2000, 1)\n",
|
| 256 |
+
"(1000, 150, 150, 3)\n",
|
| 257 |
+
"(1000, 1)\n"
|
| 258 |
+
]
|
| 259 |
+
}
|
| 260 |
+
],
|
| 261 |
+
"source": [
|
| 262 |
+
"(x_train, y_train), (x_test, y_test) = load_data_training_and_test(\"cats_vs_dogs\")\n",
|
| 263 |
+
"\n",
|
| 264 |
+
"# Reshaping our label data from (2000,) to (2000,1) and test data from (1000,) to (1000,1)\n",
|
| 265 |
+
"y_train = y_train.reshape(y_train.shape[0], 1)\n",
|
| 266 |
+
"y_test = y_test.reshape(y_test.shape[0], 1)\n",
|
| 267 |
+
"\n",
|
| 268 |
+
"# Change our image type to float32 data type\n",
|
| 269 |
+
"x_train = x_train.astype('float32')\n",
|
| 270 |
+
"x_test = x_test.astype('float32')\n",
|
| 271 |
+
"\n",
|
| 272 |
+
"# Normalize our data by changing the range from (0 to 255) to (0 to 1)\n",
|
| 273 |
+
"x_train /= 255\n",
|
| 274 |
+
"x_test /= 255\n",
|
| 275 |
+
"\n",
|
| 276 |
+
"print(x_train.shape)\n",
|
| 277 |
+
"print(y_train.shape)\n",
|
| 278 |
+
"print(x_test.shape)\n",
|
| 279 |
+
"print(y_test.shape)"
|
| 280 |
+
]
|
| 281 |
+
},
|
| 282 |
+
{
|
| 283 |
+
"cell_type": "markdown",
|
| 284 |
+
"metadata": {},
|
| 285 |
+
"source": [
|
| 286 |
+
"### Let's create our model using a simple CNN that similar to what we used for CIFAR10\n",
|
| 287 |
+
"- Except now we use a Sigmoid instead of Softmax\n",
|
| 288 |
+
"- **Sigmoids are used when we're doing binary (i.e. two class) classification\n",
|
| 289 |
+
"- Note the binary_crossentropy loss"
|
| 290 |
+
]
|
| 291 |
+
},
|
| 292 |
+
{
|
| 293 |
+
"cell_type": "code",
|
| 294 |
+
"execution_count": 8,
|
| 295 |
+
"metadata": {},
|
| 296 |
+
"outputs": [
|
| 297 |
+
{
|
| 298 |
+
"name": "stdout",
|
| 299 |
+
"output_type": "stream",
|
| 300 |
+
"text": [
|
| 301 |
+
"Model: \"sequential_1\"\n",
|
| 302 |
+
"_________________________________________________________________\n",
|
| 303 |
+
"Layer (type) Output Shape Param # \n",
|
| 304 |
+
"=================================================================\n",
|
| 305 |
+
"conv2d_3 (Conv2D) (None, 148, 148, 32) 896 \n",
|
| 306 |
+
"_________________________________________________________________\n",
|
| 307 |
+
"activation_5 (Activation) (None, 148, 148, 32) 0 \n",
|
| 308 |
+
"_________________________________________________________________\n",
|
| 309 |
+
"max_pooling2d_3 (MaxPooling2 (None, 74, 74, 32) 0 \n",
|
| 310 |
+
"_________________________________________________________________\n",
|
| 311 |
+
"conv2d_4 (Conv2D) (None, 72, 72, 32) 9248 \n",
|
| 312 |
+
"_________________________________________________________________\n",
|
| 313 |
+
"activation_6 (Activation) (None, 72, 72, 32) 0 \n",
|
| 314 |
+
"_________________________________________________________________\n",
|
| 315 |
+
"max_pooling2d_4 (MaxPooling2 (None, 36, 36, 32) 0 \n",
|
| 316 |
+
"_________________________________________________________________\n",
|
| 317 |
+
"conv2d_5 (Conv2D) (None, 34, 34, 64) 18496 \n",
|
| 318 |
+
"_________________________________________________________________\n",
|
| 319 |
+
"activation_7 (Activation) (None, 34, 34, 64) 0 \n",
|
| 320 |
+
"_________________________________________________________________\n",
|
| 321 |
+
"max_pooling2d_5 (MaxPooling2 (None, 17, 17, 64) 0 \n",
|
| 322 |
+
"_________________________________________________________________\n",
|
| 323 |
+
"flatten_1 (Flatten) (None, 18496) 0 \n",
|
| 324 |
+
"_________________________________________________________________\n",
|
| 325 |
+
"dense_2 (Dense) (None, 64) 1183808 \n",
|
| 326 |
+
"_________________________________________________________________\n",
|
| 327 |
+
"activation_8 (Activation) (None, 64) 0 \n",
|
| 328 |
+
"_________________________________________________________________\n",
|
| 329 |
+
"dropout_1 (Dropout) (None, 64) 0 \n",
|
| 330 |
+
"_________________________________________________________________\n",
|
| 331 |
+
"dense_3 (Dense) (None, 1) 65 \n",
|
| 332 |
+
"_________________________________________________________________\n",
|
| 333 |
+
"activation_9 (Activation) (None, 1) 0 \n",
|
| 334 |
+
"=================================================================\n",
|
| 335 |
+
"Total params: 1,212,513\n",
|
| 336 |
+
"Trainable params: 1,212,513\n",
|
| 337 |
+
"Non-trainable params: 0\n",
|
| 338 |
+
"_________________________________________________________________\n",
|
| 339 |
+
"None\n"
|
| 340 |
+
]
|
| 341 |
+
}
|
| 342 |
+
],
|
| 343 |
+
"source": [
|
| 344 |
+
"from __future__ import print_function\n",
|
| 345 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 346 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 347 |
+
"from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten\n",
|
| 348 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D\n",
|
| 349 |
+
"import os\n",
|
| 350 |
+
"\n",
|
| 351 |
+
"batch_size = 16\n",
|
| 352 |
+
"epochs = 10\n",
|
| 353 |
+
"\n",
|
| 354 |
+
"img_rows = x_train[0].shape[0]\n",
|
| 355 |
+
"img_cols = x_train[1].shape[0]\n",
|
| 356 |
+
"input_shape = (img_rows, img_cols, 3)\n",
|
| 357 |
+
"\n",
|
| 358 |
+
"model = Sequential()\n",
|
| 359 |
+
"model.add(Conv2D(32, (3, 3), input_shape=input_shape))\n",
|
| 360 |
+
"model.add(Activation('relu'))\n",
|
| 361 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 362 |
+
"\n",
|
| 363 |
+
"model.add(Conv2D(32, (3, 3)))\n",
|
| 364 |
+
"model.add(Activation('relu'))\n",
|
| 365 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 366 |
+
"\n",
|
| 367 |
+
"model.add(Conv2D(64, (3, 3)))\n",
|
| 368 |
+
"model.add(Activation('relu'))\n",
|
| 369 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 370 |
+
"\n",
|
| 371 |
+
"model.add(Flatten())\n",
|
| 372 |
+
"model.add(Dense(64))\n",
|
| 373 |
+
"model.add(Activation('relu'))\n",
|
| 374 |
+
"model.add(Dropout(0.5))\n",
|
| 375 |
+
"model.add(Dense(1))\n",
|
| 376 |
+
"model.add(Activation('sigmoid'))\n",
|
| 377 |
+
"\n",
|
| 378 |
+
"model.compile(loss='binary_crossentropy',\n",
|
| 379 |
+
" optimizer='rmsprop',\n",
|
| 380 |
+
" metrics=['accuracy'])\n",
|
| 381 |
+
"\n",
|
| 382 |
+
"print(model.summary())"
|
| 383 |
+
]
|
| 384 |
+
},
|
| 385 |
+
{
|
| 386 |
+
"cell_type": "markdown",
|
| 387 |
+
"metadata": {},
|
| 388 |
+
"source": [
|
| 389 |
+
"### Training our model"
|
| 390 |
+
]
|
| 391 |
+
},
|
| 392 |
+
{
|
| 393 |
+
"cell_type": "code",
|
| 394 |
+
"execution_count": 9,
|
| 395 |
+
"metadata": {},
|
| 396 |
+
"outputs": [
|
| 397 |
+
{
|
| 398 |
+
"name": "stdout",
|
| 399 |
+
"output_type": "stream",
|
| 400 |
+
"text": [
|
| 401 |
+
"Train on 2000 samples, validate on 1000 samples\n",
|
| 402 |
+
"2000/2000 [==============================] - 42s 21ms/sample - loss: 0.7029 - accuracy: 0.5335 - val_loss: 0.6820 - val_accuracy: 0.5440\n",
|
| 403 |
+
"1000/1000 [==============================] - 5s 5ms/sample - loss: 0.6820 - accuracy: 0.5440\n",
|
| 404 |
+
"Test loss: 0.6819891605377197\n",
|
| 405 |
+
"Test accuracy: 0.544\n"
|
| 406 |
+
]
|
| 407 |
+
}
|
| 408 |
+
],
|
| 409 |
+
"source": [
|
| 410 |
+
"history = model.fit(x_train, y_train,\n",
|
| 411 |
+
" batch_size=batch_size,\n",
|
| 412 |
+
" epochs=epochs,\n",
|
| 413 |
+
" validation_data=(x_test, y_test),\n",
|
| 414 |
+
" shuffle=True)\n",
|
| 415 |
+
"\n",
|
| 416 |
+
"model.save(\"cats_vs_dogs_V1.h5\")\n",
|
| 417 |
+
"\n",
|
| 418 |
+
"# Evaluate the performance of our trained model\n",
|
| 419 |
+
"scores = model.evaluate(x_test, y_test, verbose=1)\n",
|
| 420 |
+
"print('Test loss:', scores[0])\n",
|
| 421 |
+
"print('Test accuracy:', scores[1])"
|
| 422 |
+
]
|
| 423 |
+
},
|
| 424 |
+
{
|
| 425 |
+
"cell_type": "markdown",
|
| 426 |
+
"metadata": {},
|
| 427 |
+
"source": [
|
| 428 |
+
"### Testing our Classifier"
|
| 429 |
+
]
|
| 430 |
+
},
|
| 431 |
+
{
|
| 432 |
+
"cell_type": "code",
|
| 433 |
+
"execution_count": 10,
|
| 434 |
+
"metadata": {},
|
| 435 |
+
"outputs": [],
|
| 436 |
+
"source": [
|
| 437 |
+
"import cv2\n",
|
| 438 |
+
"import numpy as np\n",
|
| 439 |
+
"from tensorflow.keras.models import load_model\n",
|
| 440 |
+
"\n",
|
| 441 |
+
"classifier = load_model('cats_vs_dogs_V1.h5')\n",
|
| 442 |
+
"\n",
|
| 443 |
+
"def draw_test(name, pred, input_im):\n",
|
| 444 |
+
" BLACK = [0,0,0]\n",
|
| 445 |
+
" if pred == \"[0]\":\n",
|
| 446 |
+
" pred = \"cat\"\n",
|
| 447 |
+
" if pred == \"[1]\":\n",
|
| 448 |
+
" pred = \"dog\"\n",
|
| 449 |
+
" expanded_image = cv2.copyMakeBorder(input_im, 0, 0, 0, imageL.shape[0] ,cv2.BORDER_CONSTANT,value=BLACK)\n",
|
| 450 |
+
" #expanded_image = cv2.cvtColor(expanded_image, cv2.COLOR_GRAY2BGR)\n",
|
| 451 |
+
" cv2.putText(expanded_image, str(pred), (252, 70) , cv2.FONT_HERSHEY_COMPLEX_SMALL,4, (0,255,0), 2)\n",
|
| 452 |
+
" cv2.imshow(name, expanded_image)\n",
|
| 453 |
+
"\n",
|
| 454 |
+
"\n",
|
| 455 |
+
"for i in range(0,10):\n",
|
| 456 |
+
" rand = np.random.randint(0,len(x_test))\n",
|
| 457 |
+
" input_im = x_test[rand]\n",
|
| 458 |
+
"\n",
|
| 459 |
+
" imageL = cv2.resize(input_im, None, fx=2, fy=2, interpolation = cv2.INTER_CUBIC)\n",
|
| 460 |
+
" cv2.imshow(\"Test Image\", imageL)\n",
|
| 461 |
+
"\n",
|
| 462 |
+
" input_im = input_im.reshape(1,150,150,3) \n",
|
| 463 |
+
" \n",
|
| 464 |
+
" ## Get Prediction\n",
|
| 465 |
+
" res = str(classifier.predict_classes(input_im, 1, verbose = 0)[0])\n",
|
| 466 |
+
"\n",
|
| 467 |
+
" draw_test(\"Prediction\", res, imageL) \n",
|
| 468 |
+
" cv2.waitKey(0)\n",
|
| 469 |
+
"\n",
|
| 470 |
+
"cv2.destroyAllWindows()"
|
| 471 |
+
]
|
| 472 |
+
},
|
| 473 |
+
{
|
| 474 |
+
"cell_type": "markdown",
|
| 475 |
+
"metadata": {},
|
| 476 |
+
"source": [
|
| 477 |
+
"### Analysis\n",
|
| 478 |
+
"- Our results aren't bad, but they could be better"
|
| 479 |
+
]
|
| 480 |
+
},
|
| 481 |
+
{
|
| 482 |
+
"cell_type": "markdown",
|
| 483 |
+
"metadata": {},
|
| 484 |
+
"source": [
|
| 485 |
+
"# Now let's train our Cats vs Dogs Classifier using Data Augmentation"
|
| 486 |
+
]
|
| 487 |
+
},
|
| 488 |
+
{
|
| 489 |
+
"cell_type": "code",
|
| 490 |
+
"execution_count": 11,
|
| 491 |
+
"metadata": {},
|
| 492 |
+
"outputs": [
|
| 493 |
+
{
|
| 494 |
+
"name": "stdout",
|
| 495 |
+
"output_type": "stream",
|
| 496 |
+
"text": [
|
| 497 |
+
"Found 2000 images belonging to 2 classes.\n",
|
| 498 |
+
"Found 1000 images belonging to 2 classes.\n"
|
| 499 |
+
]
|
| 500 |
+
}
|
| 501 |
+
],
|
| 502 |
+
"source": [
|
| 503 |
+
"import os\n",
|
| 504 |
+
"import numpy as np\n",
|
| 505 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 506 |
+
"from tensorflow.keras.layers import Activation, Dropout, Flatten, Dense\n",
|
| 507 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 508 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D\n",
|
| 509 |
+
"from tensorflow.keras import optimizers\n",
|
| 510 |
+
"import scipy\n",
|
| 511 |
+
"import pylab as pl\n",
|
| 512 |
+
"import matplotlib.cm as cm\n",
|
| 513 |
+
"%matplotlib inline\n",
|
| 514 |
+
"\n",
|
| 515 |
+
"input_shape = (150, 150, 3)\n",
|
| 516 |
+
"img_width = 150\n",
|
| 517 |
+
"img_height = 150\n",
|
| 518 |
+
"\n",
|
| 519 |
+
"nb_train_samples = 2000\n",
|
| 520 |
+
"nb_validation_samples = 1000\n",
|
| 521 |
+
"batch_size = 16\n",
|
| 522 |
+
"epochs = 5\n",
|
| 523 |
+
"\n",
|
| 524 |
+
"train_data_dir = './datasets/catsvsdogs/train'\n",
|
| 525 |
+
"validation_data_dir = './datasets/catsvsdogs/validation'\n",
|
| 526 |
+
"\n",
|
| 527 |
+
"# Creating our data generator for our test data\n",
|
| 528 |
+
"validation_datagen = ImageDataGenerator(\n",
|
| 529 |
+
" # used to rescale the pixel values from [0, 255] to [0, 1] interval\n",
|
| 530 |
+
" rescale = 1./255)\n",
|
| 531 |
+
"\n",
|
| 532 |
+
"# Creating our data generator for our training data\n",
|
| 533 |
+
"train_datagen = ImageDataGenerator(\n",
|
| 534 |
+
" rescale = 1./255, # normalize pixel values to [0,1]\n",
|
| 535 |
+
" rotation_range = 30, # randomly applies rotations\n",
|
| 536 |
+
" width_shift_range = 0.3, # randomly applies width shifting\n",
|
| 537 |
+
" height_shift_range = 0.3, # randomly applies height shifting\n",
|
| 538 |
+
" horizontal_flip = True, # randonly flips the image\n",
|
| 539 |
+
" fill_mode = 'nearest') # uses the fill mode nearest to fill gaps created by the above\n",
|
| 540 |
+
"\n",
|
| 541 |
+
"# Specify criteria about our training data, such as the directory, image size, batch size and type \n",
|
| 542 |
+
"# automagically retrieve images and their classes for train and validation sets\n",
|
| 543 |
+
"train_generator = train_datagen.flow_from_directory(\n",
|
| 544 |
+
" train_data_dir,\n",
|
| 545 |
+
" target_size = (img_width, img_height),\n",
|
| 546 |
+
" batch_size = batch_size,\n",
|
| 547 |
+
" class_mode = 'binary',\n",
|
| 548 |
+
" shuffle = True)\n",
|
| 549 |
+
"\n",
|
| 550 |
+
"validation_generator = validation_datagen.flow_from_directory(\n",
|
| 551 |
+
" validation_data_dir,\n",
|
| 552 |
+
" target_size = (img_width, img_height),\n",
|
| 553 |
+
" batch_size = batch_size,\n",
|
| 554 |
+
" class_mode = 'binary',\n",
|
| 555 |
+
" shuffle = False) "
|
| 556 |
+
]
|
| 557 |
+
},
|
| 558 |
+
{
|
| 559 |
+
"cell_type": "markdown",
|
| 560 |
+
"metadata": {},
|
| 561 |
+
"source": [
|
| 562 |
+
"### Create our model, just like we did previously"
|
| 563 |
+
]
|
| 564 |
+
},
|
| 565 |
+
{
|
| 566 |
+
"cell_type": "code",
|
| 567 |
+
"execution_count": 12,
|
| 568 |
+
"metadata": {},
|
| 569 |
+
"outputs": [
|
| 570 |
+
{
|
| 571 |
+
"name": "stdout",
|
| 572 |
+
"output_type": "stream",
|
| 573 |
+
"text": [
|
| 574 |
+
"Model: \"sequential_2\"\n",
|
| 575 |
+
"_________________________________________________________________\n",
|
| 576 |
+
"Layer (type) Output Shape Param # \n",
|
| 577 |
+
"=================================================================\n",
|
| 578 |
+
"conv2d_6 (Conv2D) (None, 148, 148, 32) 896 \n",
|
| 579 |
+
"_________________________________________________________________\n",
|
| 580 |
+
"activation_10 (Activation) (None, 148, 148, 32) 0 \n",
|
| 581 |
+
"_________________________________________________________________\n",
|
| 582 |
+
"max_pooling2d_6 (MaxPooling2 (None, 74, 74, 32) 0 \n",
|
| 583 |
+
"_________________________________________________________________\n",
|
| 584 |
+
"conv2d_7 (Conv2D) (None, 72, 72, 32) 9248 \n",
|
| 585 |
+
"_________________________________________________________________\n",
|
| 586 |
+
"activation_11 (Activation) (None, 72, 72, 32) 0 \n",
|
| 587 |
+
"_________________________________________________________________\n",
|
| 588 |
+
"max_pooling2d_7 (MaxPooling2 (None, 36, 36, 32) 0 \n",
|
| 589 |
+
"_________________________________________________________________\n",
|
| 590 |
+
"conv2d_8 (Conv2D) (None, 34, 34, 64) 18496 \n",
|
| 591 |
+
"_________________________________________________________________\n",
|
| 592 |
+
"activation_12 (Activation) (None, 34, 34, 64) 0 \n",
|
| 593 |
+
"_________________________________________________________________\n",
|
| 594 |
+
"max_pooling2d_8 (MaxPooling2 (None, 17, 17, 64) 0 \n",
|
| 595 |
+
"_________________________________________________________________\n",
|
| 596 |
+
"flatten_2 (Flatten) (None, 18496) 0 \n",
|
| 597 |
+
"_________________________________________________________________\n",
|
| 598 |
+
"dense_4 (Dense) (None, 64) 1183808 \n",
|
| 599 |
+
"_________________________________________________________________\n",
|
| 600 |
+
"activation_13 (Activation) (None, 64) 0 \n",
|
| 601 |
+
"_________________________________________________________________\n",
|
| 602 |
+
"dropout_2 (Dropout) (None, 64) 0 \n",
|
| 603 |
+
"_________________________________________________________________\n",
|
| 604 |
+
"dense_5 (Dense) (None, 1) 65 \n",
|
| 605 |
+
"_________________________________________________________________\n",
|
| 606 |
+
"activation_14 (Activation) (None, 1) 0 \n",
|
| 607 |
+
"=================================================================\n",
|
| 608 |
+
"Total params: 1,212,513\n",
|
| 609 |
+
"Trainable params: 1,212,513\n",
|
| 610 |
+
"Non-trainable params: 0\n",
|
| 611 |
+
"_________________________________________________________________\n",
|
| 612 |
+
"None\n"
|
| 613 |
+
]
|
| 614 |
+
}
|
| 615 |
+
],
|
| 616 |
+
"source": [
|
| 617 |
+
"# Creating out model\n",
|
| 618 |
+
"model = Sequential()\n",
|
| 619 |
+
"model.add(Conv2D(32, (3, 3), input_shape=input_shape))\n",
|
| 620 |
+
"model.add(Activation('relu'))\n",
|
| 621 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 622 |
+
"\n",
|
| 623 |
+
"model.add(Conv2D(32, (3, 3)))\n",
|
| 624 |
+
"model.add(Activation('relu'))\n",
|
| 625 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 626 |
+
"\n",
|
| 627 |
+
"model.add(Conv2D(64, (3, 3)))\n",
|
| 628 |
+
"model.add(Activation('relu'))\n",
|
| 629 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 630 |
+
"\n",
|
| 631 |
+
"model.add(Flatten())\n",
|
| 632 |
+
"model.add(Dense(64))\n",
|
| 633 |
+
"model.add(Activation('relu'))\n",
|
| 634 |
+
"model.add(Dropout(0.5))\n",
|
| 635 |
+
"model.add(Dense(1))\n",
|
| 636 |
+
"model.add(Activation('sigmoid'))\n",
|
| 637 |
+
"\n",
|
| 638 |
+
"print(model.summary())\n",
|
| 639 |
+
"\n",
|
| 640 |
+
"model.compile(loss='binary_crossentropy',\n",
|
| 641 |
+
" optimizer='rmsprop',\n",
|
| 642 |
+
" metrics=['accuracy'])"
|
| 643 |
+
]
|
| 644 |
+
},
|
| 645 |
+
{
|
| 646 |
+
"cell_type": "code",
|
| 647 |
+
"execution_count": 13,
|
| 648 |
+
"metadata": {},
|
| 649 |
+
"outputs": [
|
| 650 |
+
{
|
| 651 |
+
"name": "stdout",
|
| 652 |
+
"output_type": "stream",
|
| 653 |
+
"text": [
|
| 654 |
+
"WARNING:tensorflow:From <ipython-input-13-03f251166bbb>:6: Model.fit_generator (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n",
|
| 655 |
+
"Instructions for updating:\n",
|
| 656 |
+
"Please use Model.fit, which supports generators.\n",
|
| 657 |
+
"WARNING:tensorflow:sample_weight modes were coerced from\n",
|
| 658 |
+
" ...\n",
|
| 659 |
+
" to \n",
|
| 660 |
+
" ['...']\n",
|
| 661 |
+
"WARNING:tensorflow:sample_weight modes were coerced from\n",
|
| 662 |
+
" ...\n",
|
| 663 |
+
" to \n",
|
| 664 |
+
" ['...']\n",
|
| 665 |
+
"Train for 125 steps, validate for 62 steps\n",
|
| 666 |
+
"Epoch 1/5\n",
|
| 667 |
+
"125/125 [==============================] - 64s 511ms/step - loss: 0.7178 - accuracy: 0.5190 - val_loss: 0.6823 - val_accuracy: 0.5887\n",
|
| 668 |
+
"Epoch 2/5\n",
|
| 669 |
+
"125/125 [==============================] - 45s 363ms/step - loss: 0.6883 - accuracy: 0.5555 - val_loss: 0.6647 - val_accuracy: 0.5746\n",
|
| 670 |
+
"Epoch 3/5\n",
|
| 671 |
+
"125/125 [==============================] - 46s 370ms/step - loss: 0.6841 - accuracy: 0.5795 - val_loss: 0.6531 - val_accuracy: 0.6411\n",
|
| 672 |
+
"Epoch 4/5\n",
|
| 673 |
+
"125/125 [==============================] - 47s 373ms/step - loss: 0.6642 - accuracy: 0.6160 - val_loss: 0.7110 - val_accuracy: 0.5151\n",
|
| 674 |
+
"Epoch 5/5\n",
|
| 675 |
+
"125/125 [==============================] - 50s 399ms/step - loss: 0.6643 - accuracy: 0.5815 - val_loss: 0.6231 - val_accuracy: 0.6905\n"
|
| 676 |
+
]
|
| 677 |
+
}
|
| 678 |
+
],
|
| 679 |
+
"source": [
|
| 680 |
+
"history = model.fit_generator(\n",
|
| 681 |
+
" train_generator,\n",
|
| 682 |
+
" steps_per_epoch = nb_train_samples // batch_size,\n",
|
| 683 |
+
" epochs = epochs,\n",
|
| 684 |
+
" validation_data = validation_generator,\n",
|
| 685 |
+
" validation_steps = nb_validation_samples // batch_size)"
|
| 686 |
+
]
|
| 687 |
+
},
|
| 688 |
+
{
|
| 689 |
+
"cell_type": "markdown",
|
| 690 |
+
"metadata": {},
|
| 691 |
+
"source": [
|
| 692 |
+
"## Plotting our Loss and Accuracy Graphs"
|
| 693 |
+
]
|
| 694 |
+
},
|
| 695 |
+
{
|
| 696 |
+
"cell_type": "code",
|
| 697 |
+
"execution_count": 14,
|
| 698 |
+
"metadata": {},
|
| 699 |
+
"outputs": [
|
| 700 |
+
{
|
| 701 |
+
"data": {
|
| 702 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEGCAYAAAB/+QKOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdd1gU1/7H8fdZekdBioDYCygWsMWKiTUxlptorNckxtSbXkw33ZteruYXU68lmuQmllhixS52LGBBpaOAoPTO+f0xK0EFBWQZynk9zz6yu7MzH1fc787Md84RUkoURVEU5VoGvQMoiqIodZMqEIqiKEq5VIFQFEVRyqUKhKIoilIuVSAURVGUcpnrHaCmuLq6ypYtW1b79dnZ2djZ2dVcoBqiclWNylU1KlfVNMRcBw8evCilbFbuk1LKBnELDAyUtyIkJOSWXm8qKlfVqFxVo3JVTUPMBRyQFXyuqkNMiqIoSrlMWiCEECOEEKeEEGeEELPLef4zIUSY8XZaCHHZ+Hg3IcQeIUS4EOKoEGKiKXMqiqIo1zPZOQghhBkwDxgKxAP7hRCrpJQRV5aRUj5TZvl/Ad2Nd3OA6VLKSCFEc+CgEGK9lPKyqfIqiqIoVzPlHkQv4IyU8pyUsgBYBoy5wfKTgKUAUsrTUspI48+JQDJQ/kkURVEUxSSENNFYTEKIe4ARUsqZxvvTgN5SyifKWdYXCAW8pZTF1zzXC/gv4C+lLLnmuVnALAB3d/fAZcuWVTmnT+zvJLkPIa3QAnt7+yq/3tSysrJUripQuapG5aqahpgrODj4oJQyqNwnKzp7fas34F7guzL3pwFfVbDsS+U9B3gCp4A+N9tetbuY1s2Wct3LDbI7wZRUrqpRuapG5aqa+tjFFA/4lLnvDSRWsOx9GA8vXSGEcATWAK9JKUNNkhCg31Nw5Gcs8y+ZbBOKoij1kSkLxH6gnRCilRDCEq0IrLp2ISFEB6AJsKfMY5bAcmChlPI3E2aEkiIoLqTDyS9MuhlFUZT6xmRdTFLKIiHEE8B6wAz4QUoZLoR4G22X5kqxmAQsM+7qXDEBGAi4CCFmGB+bIaUMq/Gg4cuhIAuXgsMwx+nq5wbNhuCXa3yTiqIo9YFJh9qQUq4F1l7z2BvX3J9TzusWA4tNma1U0AOw+ytKslIwUAKtBsG9P4Ft01rZvKIoSl2lrqRe8zy0vYOw7u+DnRtEbYMFgyEpXO9kiqIoumrcBeLwEkg8BKM+IsOpE8zaCs27w+UY+G4oRKzUO6GiKIpuGneByE7RDidZGkdBdPKC+9dBwEQozIZfp8OW96Ck5IarURRFaYgad4Ho/zS4dbr6MQsbGPcNDHsPhAG2fwi/TIG8DH0yKoqi6KRxF4iKCAG3PQFTfwdrZzi1Fr67A1LP6p1MURSl1qgCcSNthsCsEGjWCS6egm+DIXKT3qkURVFqhSoQN9O0NczcCB3vgrx0+Ple2PUFmGgMK0VRlLpCFYjKsHKACYtg8MsgS2DjG/D7TCjI0TuZoiiKyagCUVkGAwyeDROXgKU9HP8f/DAcLsfpnUxRGpzlkQV6R1BQBaLqOt0FMzdBk1Zw4ah2UV3Mbr1TKUqDsvJsod4RFFSBqB63TvDQFmgdDDkX4b+jYf/3eqdSFEWpUapAVJdtU5jyP+j7hDYi7Jpn4c+noEjtGivKrdgRmaJ3BMVIFYhbYWYOw9+DcQvAzAoO/qTtTWQl651MUeqlfVFpTPt+HwAZeeowk95MOppro9F1Iri2g1+mQlyodl5i4mLw6qF3MkWp8z7beJovNkde93jAnA1X3X/q9nY8M7R9bcVSUAWi5nj1gIdCtPGb4kLhx5Ew+kuteCiKUqFnhrbnmaHt+XJzJJ9uPI2tpRk5BcW0aWbHpmcHIYTQO2KjpQ4x1SQHd/jnnxA4A4ryYPksWP8qFBfpnUxR6rSzKVn8Z8sZAL6ZFmh8LJs951L1jNXoqQJR08wtYfQXcOenYDCHPf+BJfdATpreyRSlTpJS8uryYxQUl3BvoDcD2jUrfW7RnhgdkymqQJhKzwdh+iqwdYVzIfDtEEg+oXcqRalzfjsYT+i5NJraWfLKqL9HVzY3CDZEJHEhPU/HdI2bKhCm1LKfNgmRZ1e4FKWNCHtitd6pFKXOuJiVz3trtC9Ob9zlRxM7SwDGtLFguL8HxSWSpfti9YzYqKkCYWrOPnD/X9D5HijI0uaW2DpXTUKkKMC7qyNIzy1kQDtXxnRrXvr4uHaWTO3jC8DSfbEUFqv/L3pQBaI2WNrCP76DO94CBGz9AH6dBvmZeidTFN1sP53CirBErMwNvDu283XdSn1aN6Wdmz3JmflsCE/SKWXjpgpEbRFCm8Fuym9g5QQnV2vzXqed0zuZotS63IJiXl1xDICn72iPr4vddcsIIZjWV9uLWBQaXZvxFCNVIGpbu6HaOE6u7SHlBCwIhrNb9E6lKLXqi82RxKXl0tHDgZkDWlW43LjuXthamhF6Lo3IJLXHXdtUgdCDa1uYuRnaj4S8y7D4H7D7P2oSIqVROHE+g293nEMI+GB8FyzMKv4YcrC2YFx3LwAWh6qW19qmCoRerB3hvp9h4AvaJEQbXoXlD0Nhrt7JFMVkiksks/84RnGJZHofX7q3aHLT11w5Wf37oQSy89VFp7VJFQg9GQww5DW4979gYQtHf9GG6EhP0DuZopjE4tAYjsRdxsPRmueHd6jUazp5OtKzZROy8otYflj936hNqkDUBf5j4cGN4NwCEg9rg/3FhuqdSlFq1Pn0XD5afwqAOXf742BtUenXTuvbEtAKjFSHYmuNKhB1hUdneGgrtBoI2cnw013a8OGK0kC8uTKcrPwihvm5M6KzR5VeO8LfA1d7S05eyORAzCUTJVSupQpEXWLnAlOXQ+9HoaQQ/nyKdqf/T01CpNR768MvsCEiCXsrc94a41/l11uaG7ivZwtAjc9Um1SBqGvMzGHkXBgzH8ws8UpcB4vGQpaaZUupnzLzCnlzZTgAzw9rj6eTTbXWM6l3CwwC1h0/T0pmfk1GVCqgCkRd1X0K3L+OfMsmELNLOy+RGKZ3KkWpso/Xn+JCRh5dfZxLzyVUh5ezDXd0cqewWPLLfjU+U21QBaIu8w7iYOAn4N0TMuLhhxFw7H96p1KUSjsce4mFoTGYGQRzx3fBzHBrk/9cubL6572xFKnxmUzOpAVCCDFCCHFKCHFGCDG7nOc/E0KEGW+nhRCXyzz3TyFEpPH2T1PmrMsKrFxgxhroPhWKcuH3B2HjG1BSrHc0RbmhwuISXv7jGFLCzAGt6OTpeMvr7NfGlVaudiSm57HlpJr73dRMViCEEGbAPGAk4AdMEkL4lV1GSvmMlLKblLIb8BXwh/G1TYE3gd5AL+BNIcTNr6hpqMyt4O7/wMiPQJjBri/g5wmQq7o5lLrrux1RnLyQiU9TG56+vWbmkjYYBFN6G09WqyurTc6UexC9gDNSynNSygJgGTDmBstPApYafx4ObJRSpkkpLwEbgREmzFr3CQG9Z8H0FWDTFM5sgm9vh5RTeidTlOvEpubwxebTALw3tgs2lmY1tu57A32wtjCwI/Ii51Kyamy9yvWEqS46EULcA4yQUs403p8G9JZSPlHOsr5AKOAtpSwWQjwPWEsp3zU+/zqQK6X8+JrXzQJmAbi7uwcuW7as2nmzsrKwt7ev9utNpbxc1rlJdD7+PvbZ0RSZ2XCi07OkuvbSPVddoHJVjSlySSn55EA+x1OL6eNpxiNdrWs81w/H89keX8RwX3MmdbK6lbg1mksvt5IrODj4oJQyqNwnpZQmuQH3At+VuT8N+KqCZV8q+xzwAvBamfuvA8/daHuBgYHyVoSEhNzS602lwlz5WVL+Ml3KNx2lfNNJyq0fSllSon8unalcVWOKXMsPxUvfl1bLgDnrZUpmXrXWcbNcx+IvS9+XVssub/4lc/KLqrWN6miI/47AAVnB56opDzHFAz5l7nsDiRUsex9/H16q6msbJ0s7uPcnuP0N7X7Iu/DrdMhXu9yKfi7nFPDO6ggAXh3VCVd703y77+zlRDcfZzLyivjziPpoMBVTFoj9QDshRCshhCVaEVh17UJCiA5AE2BPmYfXA8OEEE2MJ6eHGR9TyhICBjwHk38BK0c4sQq+HwaXovVOpjRS7689QWp2Ab1bNeXeIG+TbmuacZTXhaHRanwmEzFZgZBSFgFPoH2wnwB+lVKGCyHeFkLcXWbRScAyWeZfWEqZBryDVmT2A28bH1PK0364Nr+ES1tIDtcuqju3Ve9USiOz52wqvx6Ix9LMwPvju1w3hWhNuzPAkya2FhxPyCAs7vLNX6BUmUmvg5BSrpVStpdStpFSvmd87A0p5aoyy8yRUl53jYSU8gcpZVvj7UdT5mwQmrXXikS7YVr766LxEPq1moRIqRV5hcW8ulybQvTx4La0aWb6E7nWFmZM6KkdiVYtr6ahrqRuSGycYdIy6P8syGL4azaseAwK8/ROpjRw87ee5dzFbNo0s+ORwa1rbbtTevkiBKw+ep60bDWoZU1TBaKhMZjBHW/CPT+AuQ0c+Rl+GgUZ5/VOpjRQkUmZfL31DAAfjA/Ayrzmrnm4mRYutgxu34yCohJ+OxBXa9ttLFSBaKg6/wMe3ABOLSDhICwYBHH79E6lNDAlJZJXlh+jsFgyqZcPvVo1rfUMV8ZnWrw3huISdUi1JqkC0ZB5BsCsEPDtD1lJ8NOdcGiR3qmUBuSXA3Hsj76Eq70Vs0d00iXDoPZu+DS1IS4tl+2n1bD4NUkViIbOzlUbnqPXLCgugFVPwNoXoLhQ72RKPZecmcf7a08A8OZoP5xsKz+FaE0yMwim9Nb2ItTJ6pqlCkRjYGYBoz6Cu78CgwXsWwCLxkF2qt7JlHrs7T8jyMwrYnCHZtwV4KlrlglBPliaGwg5lUxcWo6uWRoSVSAakx7T4f61YO8O0Tu06yXOH9U7lVIPhZxMZvXR89hYmPHOmM4mv+bhZpraWXJXF0+khCV71WRCNUUViMbGpxfM2grNe0B6LPwwHI7/oXcqpR7JKSjitRXHAXh2aHt8mtrqnEgz1Xiy+pf9seQVqvlSaoIqEI2RY3O4fx10nQyFOfC/+2HTW2oSIqVSPtt4moTLufg3d+T+fi31jlOqu48znb0cuZRTyNpjqq27JqgC0VhZWMPY+TD8A20Sop2fwtJJkJeudzKlDjuekM73O6MwCPhgfBfMzerOR4gQonR8JnWyumbUnX9dpfYJAX0fg2l/gE0TiFyvTUJ0MVLvZEodVGScQrREwozbWhHg7ax3pOvc3dULB2tzDsde5niC+rJzq1SBUKD1YHgoBNz8IDUSvh0CpzfonUqpY/67J4ZjCek0d7LmuWE1M4VoTbOxNOPeQOP4THvUXsStUgVC0TRtBQ9uhE53Q36GNuf1jk/UYH8KAAmXc/lkgza97dtjOmNnZa5zoopN7aPNWb3ySALpOep6n1uhCoTyNyt7uPe/EPwqIGHz2/C/B6AgW+9kio6klLyx4jg5BcWM6uLBHX7ueke6odbN7BnQzpW8whL+dyhe7zj1mioQytUMBhj0Ity3FCwdIPwP+H44XFK7643VuuMX2HwyGQcrc94c7a93nEqZajxZvTg0hhI1PlO1qQKhlK/jKJi5CZq2hqRj8G0wRO2AnZ9BZpLe6ZRakp5byJxV4QC8OLIj7o7WOieqnNs7uuHpZE3UxWx2n1UjBlSXKhBKxdw6wkNboM3tkJMKC8fAmc2w63O9kym15MO/TpKcmU+gbxOm9Gqhd5xKMzczMNmYd+GeaF2z1GeqQCg3ZtMEpvwGtz2pTUIUvQP2f4d1jroQqaE7GJPGkr2xmBsE74/rgsGg73AaVTWxlw8WZoJNJ5JIvJyrd5x6qe62Iih1h8EMhr0DHl1g1b+gKI8++x6B46+Coxc4eWu3a3928AQz9StWHxUUadc8ADw8qDUdPBx0TlR1bg7WjOjsyZ9HElm6L5bnhnXQO1K9o/73KpUXMAFc22mdTWnntMNOOalwoYIB/4RBKxLlFY8rP9u6aBfsKXXKgu1nOZ2URUsXW/41pJ3ecaptWh9fY4GI419D2mFprg6aVIUqEErVuLYHgwUnOvyLTnf9C9LjISNe+zM9AdLjICNB+znrgvZzRkLF6zO3rqB4eIGTj3bfyr72/n4KURez+XKLNoXoe+O6YG1Re1OI1rSeLZvQwd2BU0mZ/BV+gbu7Ntc7Ur2iCoRSNWueB+8gkpzvoJODOzi4A4HlL1tUAJmJfxePqwqJsbDkpUPaWe1WEWunv4vFtcXDyQscmoO5pUn+uo2NlJJXlx+joKiE8T286NfWVe9It0QIwdS+vry+4jiL98SoAlFFqkAolXd4CSQe0jqbdu+/+fLmltCkpXarSH5mxcXjyv28dO2WdLyClQhtjgsnb/wKLCF/49/Fw8kbHL3Brpl2jYdyQ78fSmD32VSa2Frw2p1+esepEeO6e/HvdSfZF53GyQsZdPRw1DtSvaEKhFJ52Slw709gaVdz67Ry0Npp3TqW/7yU2nmO9LhriseVnxMg87x2OCvrAm4AKbuvX4+ZpTbMuaN3mb0Q76vvWzvV3N+rHkrLLuC9NREAvHanH03tGsZemb2VOeN7eLFwTwyLQ2N4d2wXvSPVG6pAKJXX/+na36YQ2rzadq7QvHv5yxQXaUUiPZ6I0I34eTn+XTyuFJbcNLgUrd0qYulQTvHw+vvciKOXNkx6Vez8TJt3ox54d00El3IK6dfWhfE9vPSOU6Om9vFl4Z4Ylh9K4KURHXGw1mf+7PpGFQil/jMzB2cfcPYhOSofv/6Dr1+mIMdYMOKvLx5XHi/IhJQT2q0ids0q6Mry0YqJvbvWFnxFVjLs+gKsh9X4X7sm7Yy8yB+HErA0N/Du2C66TyFa09q7O9C7VVP2RqWx/HAC0/u21DtSvaAKhNHyyAIGD9Y7hWIylrZai65rBS2bUkLupTLFI/6an42FJDtFuyUeLn89BnPtpPmVPQ9rZwhbgqO/FzDYVH+7W5JXWMyrK7RrHp66vR2tXGvwEGIdMr1vS/ZGpbFoTwzT+vg2uCJoCqpAGK08W8gXeodQ9CME2DbVbp4B5S9TUgxZSeUUjzL3s1O0ub7TY696afewV8BwEga/Ao6etfAXqryvtkQSk5pDe3d7HhrQWu84JjPM351mDlZEJmexNyqNPq1d9I5U56kCoSiVZTAznuhuDj69yl+mMO/vaz+u7HlEbUNE74BDC7XbFYNmQ/DLtZO9AnGZJXyz5xygTSHakC8kszAzMKlXC77cHMmi0BhVICqh4f42KIoeLKzBpQ20GgjdJmtTumYlc7bVdG0ypitsXbWryIsKdItaUiL5b3g+RSWSqX1aEOjbVLcstWVSLx/MDIL1xy+QnJGnd5w6r9EXiLTsAsbM2wVoc+4qSo0yXlgY5/sPmLhIm7XPpw/kXIR1L8D83hC+XJeZ+5bsi+XM5RLcHKx4cUQFbcYNjKeTDUM7uVNUIlm6L07vOHVeozzE9NnG03yxOfK6x9u+uu6q+0/d3o5nhtbNuXeVeqC8Cwt9esEDf8GptbDxTW0O8N9mgFcQDH0bWvarlWhJGXl8uO4kAHPu9sexEbV9Tu/ry1/hF/h5XwyPBbfBwqzRf0+uUKMsEM8MbV/6wV9QVMIfh+KZbRy5EsDL2YZHBrXm3iAfvSIqDUFFFxYKAR3vhHbD4fBCCPkAEg7AT6Ogwyi4Yw40M+3Io3NWhZOZX0S3ZmaM7Oxh0m3VNX3buNC6mR3nUrLZfCKJEZ3rVtNAXWLS0imEGCGEOCWEOCOEmF3BMhOEEBFCiHAhxM9lHv/Q+NgJIcSXwkQ9aZbmBu4zTizy2cSutHWzJ+FyLq+vDGfghyF8t+McOQVFpti00tD1fxrcOlX8vJk5BD0ATx7Wupss7LQ9i/l9YNWTkGGaOTc2RiSx7vgFbC3NmOZn2ejaPYUQTDNOSbooVE2leyMmKxBCCDNgHjAS8AMmCSH8rlmmHfAy0E9K6Q88bXz8NqAfEAB0BnoCg0yV9Ypx3b3Z8PRAvp7SAz9PR5Iz83l3zQn6/zuEeSFnyMgrNHUEpTGysofBL8FTYRD0ICDg0H/hqx6w5V3Iy6ixTWXlF/HGSm1Mq+eGdcDFpnEeXhnfwxsbCzN2nUnlTHKW3nHqLFP+dvQCzkgpz0kpC4BlwJhrlnkImCelvAQgpUw2Pi4Ba8ASsAIsgFqZCNlgEIzs4smaJ/vzw4wguvk4k5ZdwEfrT9Fv7hY+3XCKS9n6dZ4oDZi9G9z1KTy+FzqNhsIc2P4RfNkd9n0Lxbf+BeWTDac4n55HgLcTM25reeuZ6yknGwvGdteGE1ms9iIqJKSJuieEEPcAI6SUM433pwG9pZRPlFlmBXAabW/BDJgjpfzL+NzHwExAAP+RUr5azjZmAbMA3N3dA5ctW1btvL+EZzHR//p5B6SUnEgrYdXZAk6maV1OVmYwpIUFw1ua42xl2m9gWVlZ2NvXvfkQVK6qqU4ux/QTtDn7E04Z2snkHBtPolpNI6XZbdWaZOlcejHv7MlDCHizrzW+jmYN6v2qqpiMYt7cnYeNOXw+2BYr85u/pw3x/QoODj4opQwq90kppUluwL3Ad2XuTwO+umaZ1cBytD2EVkA84Ay0BdYA9sbbHmDgjbYXGBgob0VISMhNl9kXlSqnf79X+r60Wvq+tFq2f3WtfHPlcZlwKeeWtn2rufSgclVNtXOVlEgZ8aeUXwZK+aajdlswRMroXVVaTWFRsRz5+Xbp+9Jq+e7q8FvPZWK1lWv8/F3S96XV8ue9MZVaviG+X8ABWcHnqim//sYDZduAvIHEcpZZKaUslFJGAaeAdsA4IFRKmSWlzALWAX1MmLVSerZsyn8f6MWqJ/oxzM+d/KISftodzaCPQnj5j6PEpGbrHVFpaISATnfBY6Fw12dg56Z1PP04EpZOgpRTlVrND7uiiDifgZezjWrdLqP0ZPWemCtfWpUyTFkg9gPthBCthBCWwH3AqmuWWQEEAwghXIH2wDkgFhgkhDAXQlignaC+wRCbtSvA25kF04P46+kBjO7anGLjRTfBH2/lmV/COJOcqXdEpaG5quPp5as7nv58CjIvVPjSuLQcPt14GoB3x3XG1rJRdreXa2QXD1zsLIk4n8Gh2Et6x6lzTFYgpJRFwBPAerQP91+llOFCiLeFEFfGHFgPpAohIoAQ4AUpZSrwP+AscAw4AhyRUv5pqqzV1dHDka8mdWfTs4O4N9AbgxAsP5zA0M+289iSg4QnpusdUWlorOxh8GytUFzpeDr4k3Yie8t72gx9ZUgpeW3FcfIKS7grwJPgDm66xK6rrMzNmNhTO9CxaI86WX0tk55hlVKulVK2l1K2kVK+Z3zsDSnlKuPPUkr5rJTST0rZRUq5zPh4sZTyYSllJ+Nzz5oy561q3cyej+7tSsjzg5napwUWBgNrj13gzi938uBP+zmsvpkoNc3B/e+Op453GTuePoQvul3V8fTn0fNsO52Co7U5b4xuGFOI1rTJvVsgBKw9doGLWfl6x6lTGmcTtIn4NLXl3bFd2P5iMA/2b4W1hYHNJ5MZN383U7/bS+i5VHWcU6lZru3gviXwwHrw6a2N8bT2eZjXm+zDv/P2Ku2ah5dHdcLNoYqz4TUS3k1sub2jGwXFJfx6QI3PVJYqECbg4WTN63f5sfOlITw2uA32VubsPHOR+xaEMuGbPWw7naIKhVKzWvTRisTExeDSFtLOYrfyARYUvsy05glMVMPG3NBU48nqJaGxFJeo/5tXqAJhQq722iiZu14awjN3tMfJxoL90Zf45w/7GDNvFxvCL1CifhmVmiKEdoHdY6FE9X6HFOlID8MZ3kl7AcMvUyrd8dQYDWzXDF8XWxIu5xJyMvnmL2gkVIGoBU62Fjx1Rzt2zR7C7JEdcbW35Gh8OrMWHWTkFztYdSRRfWtRaky+NPBgRACD8z9jj89Dxo6nNZXqeGqsDAbB1N5qfKZrqQJRi+ytzHlkUBt2vDiEN0f74eFozamkTJ5cepihn27jtwNxFKo5KZRb9PXWs5xLycbd1ZXu0/9t7Hh6gKs6nkLev67jqbG7J9AbK3MD206nqGuajFSB0IGNpRn392vFthcH8/64Lvg0teHcxWxe+N9Rgj/eyuLQGPKLivWOqdRDZ5KzmB9yFoD3x3fB2sLM2PH02dUdT9v+DV92p3nC2hoZ46khaGJnyeiuzQFYsjf2Jks3DpUqEEKINkIIK+PPg4UQTwohnE0breGzMjdjcu8WhDw3mE8ndKV1MzviL+Xy2orjDPwwhO93RpFfrA49KZVTUiJ5ZfkxCopLmBDkff2cy2U7nrx7QXYK7SO/gXm9IWKlLrPa1TVXrqz+9UAceYXqS1pl9yB+B4qFEG2B79HGTfr5xi9RKsvczMD4Ht5sfGYQ8yb3oKOHA0kZ+byzOoLnt+Uwf+sZMtVQ48pN/HYwjn1RabjYWfLKqBvMQ9GiDzy4ASYsIsemOaSdhV+nw/fDIGZP7QWug7r6ONPV24nLOYX8eeTakYEan8oWiBLjldHjgM+llM8AahqmGmZmENwZ4Mm6pwbw3fQguvo4k1kAH/6lDTX+2cbTXM5RQ40r10vJzOf9tdqor2+M9sPZ1vLGLxAC/O5mf8+v4M5PwK4ZxO+DH0fA0smQcroWUtdNV1pe1TDglS8QhUKIScA/0UZgBW0EVsUEhBDc4efOisdu4/kga3q1akpGXhFfbI6k39wtzF13Ul3xqVzlndURpOcWMqCdK3cbj6NXhjSYQ8+Z2onsQbPBwrZMx9PTjbLjaXTX5jjZWHAkPp0jcZf1jqOryhaI+4G+wHtSyighRCtgseliKaAVis6uZvz6cF9+mdWHAe1cyS4o5v+2naX/v7fw1p/hnE/P1TumorOtp5JZdSQRawsD743tUr0pRK0cIPhlrVAE3q89dvDHRtnxZG1hxoQgb0C1vFaqQEgpI6SUT0oplwohmgAOUsq5Js6mlNG7tQuLHuzNisf7cUcnd/IKS/hxVzSDPtzKy38cIzY1R++Iig5yCop4bWwo+m4AACAASURBVIU2nMbTd7SnhYvtra3QwQNGf64NL35NxxP7v2s0HU9TjNdE/HkksVHPIFnZLqatQghHIURTtNFVfxRCfGraaEp5uvk4890/g1j31ADuCvCksKSEpftiCf5kK8/+Gqbm121kvtgUSfylXDp6OPBg/1Y1t+Jm7bWOp/v/Au+ekJ0Ca57TDj1FrGrwHU8tXe0Y1L4Z+UUl/O9gvN5xdFPZQ0xOUsoMYDzwo5QyELjDdLGUm+nk6ch/Jvdg4zOD+EcPbXf4j0MJDP1sG4//fIgT52tuonulbgpPTOe7nVEIAXP/EYCFmQkua/LtCw9uhAkLoWkbSD0Dv07TOp5iQ2t+e3XIlZbXxXtjGu2QOJX9jTIXQngCE/j7JLVSB7R1s+eTCV3Z+vxgJvdugblBsOboeUZ+sYOZ/z1AWCM/ydZQFZdIXvnjGMUlkn/2bUk3HxNeliQE+I3RLrQb9fHfHU8/DIdlUxpsx1NwRze8nG2ISc1hx5mLesfRRWULxNtok/uclVLuF0K0BiJNF0upKp+mtrw/Thtq/P5+LbEyN7DpRBJj5+1i2vd72ReVpndEpQYt3BPNkfh0PByteW5YLU0hamYBvR4ydjy9pHU8nVxdpuMpqXZy1BIzg2By7xYALNoTrWsWvVT2JPVvUsoAKeWjxvvnpJT/MG00pTo8nWx4c7Q/O18awiOD2mBnacaOyItM+GYPE77Zw45INdR4fZd4OZeP12sjs741xh8H61ruOLdygOBXKuh4+qBBdTxN7OmDpZk2r0tcWuNrBKnsSWpvIcRyIUSyECJJCPG7EMLb1OGU6mvmYMXskR3ZNXsIT93eDkdrc/ZFpTHt+32Mnb+bjRFJqlDUQ1JK3lgZTnZBMcP93Rnu76FfmNKOpz3Q4U4ozIZtcxtUx5OrvRWjunggJSzd1/jGZ6rsIaYfgVVAc8AL+NP4mFLHOdta8szQ9uyaPYQXR3TAxc6SI3GXeWjhAUZ+sYM1R8+rocbrkfXhF9h0Igl7K3Peuruz3nE0zTrApJ/L73g68We973ia1lc7Wf3L/jgKG9n/lcoWiGZSyh+llEXG209AMxPmUmqYg7UFjw1uy86XhvD6XX64O1px8kImj/98iGGfbeOPQ/EUqaHG67SMvELeXBUOwAvDO+DhVMemEL2q46m11vH0y1TtZHY97njq0aIJnTwdSc0uYP+FxjWAX2ULxEUhxFQhhJnxNhVINWUwxTRsLM14sH8rtr0QzLtjO+PdxIazKdk8++sRgj/Zys97Y9VQ43XUx+tPkZSRTzcf59Lxguqc0o6nfVrHk60rxO39u+PpYv3rbRFClLa8bomt/4fNqqKyBeIBtBbXC8B54B604TeUesrawoypfXwJeX4wH9/bldaudsSl5fLK8mMM+nArP+6KIreg4kKxPLLxXl2qh4Mxl1gUGoO5QfDB+C6YGaoxnEZtKtvxNPDFvzue5vWG1c/Uu46nsd2b42BlzpnLJYQnpusdp9ZUtospVkp5t5SymZTSTUo5Fu2iOaWeszAzcE+gNxufHcRXk7rTwd2BCxl5vPVnBAM+3ML/bTtLVn7Rda9bebZxfZPSU2FxCa/8cQwpYeaA1nTydNQ7UuVZO8KQV+Ffh6DHPwEJB34o0/FUP678t7U05x+BWl/O4tDGc7L6Vi69fLbGUii6MzMIRndtzrqnBrBgWiAB3k5czCpg7rqT9Ju7hS82RZKeo4qCHr7dcY5TSZm0aGrLU7e30ztO9Th6wt1fwqN7oMOoazqevv+742nnZ3V27+LKYb0VhxPIqCvzs5j4/bqVAlHH93GV6jAYBMP8PVj5eD/++0AvgnybkJ5byGebTtPv31v48K+TpKqhxmtNTGo2X2zSjtu/N64zNpZmOie6RW4dYdJSuH8deAVBdjKsefbvjqfMJNj1hd4py9XWzZ5OTQ3kFhbze10Znykr2aTvl/ktvLZx9Xs1MkIIBrVvxsB2ruyNSuM/W86w88xF5m89yw+7ogDYGJHEgHau2rzHSo2TUvLq8uPkF5UwtltzBrRrQI2DvrfBzE3aVKeb3/q746l5d0g9g2WPnnonLNeQFhacSMtnUWgMM25r+ffQ6lIa23lv9ieVW6ayy3aZAIvGYO/XGuQgrUmgBt2wQAghMim/EAjApkaTKHWSEII9Z1PZWWYsmrxCrR32oYUHrlr2kUGtmT3yBlNdKlWyIiyBnWcu4mxrwWt3+ekdp+YJAf5joeOdcPAn2DoXEg8D0GvfY3DqHcr/YITSj6VKfShXtCxVWFb787mSEp61BpEhEW/Vne/IQYeeg5FTwaJmW59vWCCklA41ujWlXnpmaHueGfr3eD9nkrO449NtBHg7cTT+746O73dGEZ6YwXB/D4b5uePmWMf69OuRtOwC3ll9AoBXRnXC1d5K50QmdKXjKWAi7P4Sdn2JeXGONld2HVN6TL7CL+rC+C3+Bn/CDZahCssa/yzOh5xUeM/96iiDZmuTQN2CWznEpDRSbd3sAVj1RH8SL+eyIfwC68OT2BuVyo7Ii+yIvMjrK4/T3ceZ4f4eDPf3oKWrnc6p65f3154gLbuAPq2bcm9gIxnVxtoR+j8D4cs569iXNnc+xd8fhNzkg5IbPFfesjdbX/nr3bZ9Ox179GHAh1spAXa8OARP51ucpOlWFGTDgmBOtJhKp/vervHVqwKh3JLmzjbM6NeKGf1akZZdwKYTSWwIv8D2yIscir3ModjLfLDuJB3cHRju784wfw/8mztWb1rMRmL3mYv872A8luYG3h9XzSlE66s1z4NPb+Kc76GNa93r2JIGc9yd7Rnq35w1x86zdH88zw6tpdF0y7PmefAOIsl5CKY4uKsKhFJjmtpZMiHIhwlBPmTnF7HtdArrwy+w5WQyp5IyOZWUyZdbzuDdxIZhfh4M93cnqGXTun/RVy3KKyzmVeMUok8Et6V1M3udE9Wiw0sg8RA8tAV279c7zQ1N6+urFYh9sfxrSFvTTNZ0M7XwfqkCoVTLmDY3HmLazsqcUV08GdXFk4KiEkLPpbI+/AIbIpKIv5TLD7ui+GFXFC52ltzRyZ3hnd25rY3qiJoXcoaoi9m0dbPnkUFt9I5Tu7JT4N6fwLLuH47s3aop7dzsiUzOYkN4EncGeNZ+iFp4v1SBUKplXDvLSi9raW5gYPtmDGzfjHfGdOZw3GXjeYsLRKfm8MuBOH45EIedpRmDO7ox3N+D4A7Nan+eA52dTsrk/7ZpJ2Y/GN8FS3MdvpXqqf/TeieoNCEE0/r68sbKcBaFRutTIGrh/TJpgRBCjAC+AMyA76SUc8tZZgIwB62/7IiUcrLx8RbAd4CP8blRUspoU+ZVTM9gEAT6NiHQtwmzR3bkdFIW643FIjwxgzVHz7Pm6HkszQzc1taF4f4e3NHJnWYODbiLBygpkbz8xzEKiyWTerWgZ8umekdSbmJcdy/mrjtJ6Lk0Tidl0t694TV9mqxACCHMgHnAUCAe2C+EWCWljCizTDvgZaCflPKSEMKtzCoWAu9JKTcKIewBNRZ1AyOEoIOHAx08HHjy9nbEpeWwISKJ9eEXOBCdxtZTKWw9lcIr4hiBLZqUdkS1cNGxa8RElu6P5WDMJVztrZg9oqPecZRKcLC2YFx3L5bsjWVxaAxvj6kj83PUIFPuQfQCzkgpzwEIIZYBY4CIMss8BMyTUl4CkFImG5f1A8yllBuNj9ePEb2UW+LT1JYH+7fiwf6tuJiVz+YTSawPT2Jn5EUOxFziQMwl3lt7go4eDqXFopOnQ73v8knOyGPuupMAzLnbDyfbxnVorT6b1teXJXtj+eNQAi+O6Ii9VcM6ai9MNe2kEOIeYISUcqbx/jSgt5TyiTLLrABOA/3QDkPNkVL+JYQYC8wECoBWwCZgtpSy+JptzAJmAbi7uwcuW7as2nmzsrKwt697HSMqF+QWSY6lFHMwqYgjKcXklfktaGYj6OFuRqC7OW2dDeRkZ9e792teWB77LxQT0MyMZ3pY1WrBU79fVVNervf35nL6UgnT/SwZ0kKf4n4r71dwcPBBKWVQec+ZstyV91t+bTUyB9oBgwFvYIcQorPx8QFAdyAW+AWYAXx/1cqkXAAsAAgKCpKDBw+udtitW7dyK683FZVLM9L4Z35RMbvPprIh/AIbI5JIySpgfXQR66OLcLW3xN/ZghlD/bitjQtW5nWnI6qi92vLyST2XziAjYUZ8+4fiE/T2j18pn6/qqa8XBlNEnly6WH2pVnx1rQBuuzRmur9MmWBiEc7wXyFN5BYzjKhUspCIEoIcQqtYMQDh8scnloB9OGaAqE0PlbmZgR3cCO4gxvvjpUcir3E+uMXWB9xgbi0XLZlwbYf9+NgZW7siHJncAe3Ornrn51fxOsrtClEnxvWvtaLg1IzRvh74GqvTeG7P/oSvVo1nAYDU/6v2Q+0E0K0AhKA+4DJ1yyzApgE/CSEcAXaA+eAy0ATIUQzKWUKMAQ4gKKUYWYQ9GzZlJ4tm/LqnZ04cT6Tb9aEcirbmpMXMvnzSCJ/HknE0txA/7auDPd3545O7rjUkXGNPt14moTLuXT2cmTGbS31jqNUk6W5gUm9fPhqyxkWhcaoAlEZUsoiIcQTwHq08ws/SCnDhRBvAweklKuMzw0TQkQAxcALUspUACHE88Bmoe2vHQS+NVVWpf4TQuDX3JFx7SwZPHggsak5pe2zB2MvseVkMltOJmMQxwhq2dR4ktsd7yb6fGs/Fp/Oj7uiMAj4YFwA5npciavUmEm9WjAv5Ax/HT9PcmYn3BwaxkCVJt3vllKuBdZe89gbZX6WaDPTXTc7nbGDKcCU+ZSGq4WLLQ8NbM1DA1uTnJnHpohk1odfYPfZi+yLSmNfVBrvrI7Av7ljaUdUe3f7Wjl+XFRcwsvLj1Ii4cH+reji7WTybSqm1dzZhjs6ubMhIolf98fxxJC6N45UddS9A7OKUsPcHKyZ3LsFk3u3ICOvkJCTyWwITyLkVDLhiRmEJ2bw6cbTtHSx1YYq9/egu48zBhONEfXT7miOJ2Tg5Wyj70BvSo2a1teXDRFJLNkbyyOD2jSIvUJVIJRGxdHagjHdvBjTzYu8wmJ2nbnI+vALbDqRTHRqDt9sP8c328/h5mDFUD93hvt70Ke1S40NexF/KYdPNpwG4O0x/tjVwZPnSvX0a+NKK1c7oi5ms/lkMsP9PfSOdMvUb6fSaFlbmHF7J3du7+ROUXEJB2IuaQMKhieRcDmXJXtjWbI3Fgdrc243jhE1sH2zan+oSyl5Y2U4uYXF3NnFk9s7ud/8RUq9YTAIpvbx5Z3VESwOjVEFQlEaCnMzA31au9CntQtv3OVHeGJG6Unu00lZrAhLZEVYIlbmBga0c2WYcYyopnaVG7RweWQBWU3Ps+VkMg7W5rw5ugFOIapwTw9vPlp/kh2RFzmXklXvh2tXBUJRriGEoLOXE529nHhuWAeiLmaXFovDsZfZdCKZTSeSjW22TUrPW3g5VzxN+8qzhexO1kaZeWlERzUdawPlZGvBmK5e/HIgjiV7Y3m9ns8lrgqEotxEK1c7HhnUhkcGtSEpI48NEdqseXvOphJ6Lo3Qc2m89WcEXbycGO6vnbdo63Z9R1RKZj6Bvk2Y3KuFTn8TpTZM6+vLLwfi+O1AHM8P64CNZd25or+qVIFQlCpwd7RmWh9fpvXxJT1X64haH36BradSOJaQzrGEdD7ecJrWrnYMM15rUVisjTBjYSb4YHwXk3VHKXVDZy8nuvk4ExZ3mVVHEpjYs/5+IVAFQlGqycnGgrHdvRjbXeuI2hF5pSMqiXMXs/m/bWf5v21nubIj8fDANg1yzgDletP7+hIWd5mFe2KYEORTb0ccrv+NuopSB1hbmDHUz52P7+3KtD6+Vz13ZcDk/4ScoeXsNaW3zzae1iGpUhtGdfGkia0F4YkZhMVd1jtOtak9CEWpYc8N68BzwzoAWmvr6aQshn++nei5d+qcTKkt1hZmTOjpwzfbzrFoTwzdWzTRO1K1qD0IRTGhK7PmKY3P1N6+CAGrj54nLbtA7zjVogqEoiiKCfg0tSW4gxsFxSX8eiBO7zjVogqEoiiKiVw5H7VkbwzFJaaZvdOUVIFQlFowpo2aZ7oxGti+GT5NbYhLy2X76RS941SZKhCKUgvGtavckBxKw2JmEEzpre1FLNwTrWuW6lAFQlEUxYQmBPlgaW5g6+kUYlNz9I5TJapAKIqimFBTO0vuCvBESliyL0bvOFWiCoSiKIqJXTlZ/ev+OPIKi3VOU3mqQCiKophYNx9nOns5cimnkDVHz+sdp9JUgVAURTExIUTpXsSi0PpzmEkVCEVRlFpwd1cvHK3NCYu7zLH4dL3jVIoqEIqiKLXAxtKMe4N8AFhcT/YiVIFQFEWpJVN6a3NDrDySQHpOoc5pbk4VCEVRlFrSupk9A9q5kldYwm8H6/74TKpAKIqi1KKppeMzxVJSx8dnUgVCURSlFt3e0Y3mTtZEXcxm19mLese5IVUgFEVRapG5mYHJxnMRi/bU7ZPVqkAoiqLUsgk9fbAwE2w6kUTC5Vy941RIFQhFUZRa5uZgzYjOnpRIWLo3Vu84FVIFQlEURQfT+2onq5ftj6WgqETnNOVTBUJRFEUHQb5N6OjhwMWsAv4Kv6B3nHKpAqEoiqIDIURpy+viOnqy2qQFQggxQghxSghxRggxu4JlJgghIoQQ4UKIn695zlEIkSCE+I8pcyqKouhhbHcv7K3M2RedxskLGXrHuY7JCoQQwgyYB4wE/IBJQgi/a5ZpB7wM9JNS+gNPX7Oad4BtpsqoKIqiJ3src8b38ALqZsurKfcgegFnpJTnpJQFwDJgzDXLPATMk1JeApBSJl95QggRCLgDG0yYUVEURVdXhgFffjiBzLy6NT6TKQuEF1B2sJF442NltQfaCyF2CSFChRAjAIQQBuAT4AUT5lMURdFdO3cH+rRuSk5BMcsPJ+gd5ypCStOMBSKEuBcYLqWcabw/DeglpfxXmWVWA4XABMAb2AF0BqYCtlLKD4UQM4AgKeUT5WxjFjALwN3dPXDZsmXVzpuVlYW9vX21X28qKlfVqFxVo3JVjaly7btQxPywfJrbC97rZ4MQotZyBQcHH5RSBpX7pJTSJDegL7C+zP2XgZevWeb/gBll7m8GegJLgFggGrgIZABzb7S9wMBAeStCQkJu6fWmonJVjcpVNSpX1ZgqV0FRsQx6d6P0fWm13H3mYpVffyu5gAOygs9VUx5i2g+0E0K0EkJYAvcBq65ZZgUQDCCEcEU75HROSjlFStlCStkSeB5YKKUstwtKURSlvrMwMzCplzY+U12aTMhkBUJKWQQ8AawHTgC/SinDhRBvCyHuNi62HkgVQkQAIcALUspUU2VSFEWpqyb3aoGZQbA+/AJJGXl6xwFMfB2ElHKtlLK9lLKNlPI942NvSClXGX+WUspnpZR+UsouUsrrTiJIKX+S5Zx/UBRFaUg8nKwZ5udOUYlk2b66MZmQupJaURSljrjS8vrzvhgKi/Ufn0kVCEVRlDqibxsX2jSzIykjn00RSXrHwVzvAKZUWFhIfHw8eXk3P57n5OTEiRMnaiFV1ahcVVOVXNbW1nh7e2NhYWHiVIpSOUIIpvXxZc6fESwKjWFkF09d8zToAhEfH4+DgwMtW7a8aV9xZmYmDg4OtZSs8lSuqqlsLiklqampxMfH06pVq1pIpiiVMz7Qm3//dYrdZ1M5k5xJWzf9/p816ENMeXl5uLi4VPmiE6XhE0Lg4uJSqb1LRalNjtYWjO2uDTqxOFTfyYQadIEAqlUcPtt42gRJlLpGfXFQ6qqpfbRrIn4/GE92fpFuORp8gaiOLzZH6h1BUZRGzL+5E4G+TcjML2JlWKJuOVSBMKHBgwezfv36qx77/PPPeeyxx274uitjqiQmJjJt2rQK133gwIEbrufzzz8nJyen9P6oUaO4fPlyZaKXa8+ePbRq1Ypu3brRr18/7O3t6dChA926dWP69OlVWldJSQlz586t8Hlvb+9byqoo9d2VKUkX7om+MhRRrVMFwoQmTZrEtQMILlu2jEmTJlXq9c2bN2fRokXV3v61BWLt2rU4OztXe31//fUXH3/8MWFhYezatYugoCCWLFlCWFgYCxcurNK6blYgFKWxG9HZAxc7S05eyORQ7CVdMjToLqayWs5eY5Llo+feWeFz99xzD6+99hr5+flYWVkRHR1NYmIi/fv3JysrizFjxnDp0iUKCwt59913GTPm6ukyoqOjGTVqFBEREeTm5nL//fcTERFBp06dyM3NLV3u0UcfZf/+/eTm5nLPPffw1ltv8eWXX5KYmEhwcDCurq6EhITQsmVLDhw4gKurK59++ik//PADADNnzuTpp58mOjqakSNH0r9/f3bv3o2XlxcrV67ExsYGgM2bN/Pss89W+PctKirixRdfZOfOneTl5fHkk08yc+ZMEhISmDhxIllZWRQVFbFgwQL++OMPMjMz6datGwEBAZUqMBcvXuSBBx4gOjoae3t7FixYQOfOndmyZQvPPPNM6TmFXbt2cfny5eu2edttt910G4pSV1iZmzGxpw/zt55l4Z4YAn2b1noGtQdhQi4uLvTq1Yu//voL0PYeJk6ciBACa2trli9fzqFDhwgJCeG555674W7k119/ja2tLUePHuXVV1/l4MGDpc+99957HDhwgKNHj7Jt2zaOHj3Kk08+SfPmzQkJCSEkJOSqdR08eJAff/yRvXv3Ehoayrfffsvhw4cBiIyM5PHHHyc8PBxnZ2d+//13QPtwtrCwwMnJqcKMCxYswM3NjX379rF//37mzZtHbGwsixcvZvTo0YSFhXHkyBECAgKYO3cuDg4OVdr7eP311+nduzdHjx5lzpw5zJgxA4CPPvqIBQsWEBYWxrp167C2ti53m4pS30zu3QKDgLXHznMxK7/Wt99o9iBu9E0fru6fbzl7zU2Xr6wrh5nGjBnDsmXLSr+1Syl55ZVX2L59OwaDgYSEBJKSkvDw8Ch3Pdu3b+fJJ58EICAg4KoPvF9//ZUFCxZQVFTE+fPniYiIuOEH4s6dOxk3bhx2dnYAjB8/nh07dnD33XeXnmMACAwMJDo6GoANGzYwbNiwG/5dN2zYwIkTJ0oPq6WnpxMZGUnPnj15+OGHycvLY+zYsXTt2pWioqp3ZuzcuZM1a7Q9u2HDhjFjxgyys7Pp168fTz/9NJMnT2b48OF4enqWu01FqW+8m9gypKM7m04k8cv+OB4Pblur21d7ECY2duxYNm/ezKFDh8jNzaVHjx4ALFmyhJSUFA4ePEhYWBju7u437ckvry0zKiqKjz/+mM2bN3P06FHuvPPOm67nRnsqVlZWpT+bmZmVfpCvW7eOESNG3HS98+fPJywsjLCwMKKiorj99tsZMmQIW7duxdPTkylTprBkyZIbrqeyua/cf+211/jmm2/Iyspi8ODBREZG1tg2FUVv04wnq3/eG0txSe2erFYFwsTs7e0ZPHgwDzzwwFUnp9PT03Fzc8PCwoKQkBBiYm48BvzAgQNLP+SOHz/O0aNHAcjIyMDOzg4nJyeSkpJYt25d6WscHBzIzMwsd10rVqwgJyeH7Oxsli9fzoABAyrctpSSo0ePlu5ZVGT48OHMnz+/tKicOnWK3NxcYmJi8PDwYNasWcyYMYPDhw9jbq7tvFZlT6Lse7Bp0ya8vb2xs7Pj7NmzBAQE8PLLLxMQEMCpU6fK3aai1EcD2rri62JLwuVcQk4m1+q2G80hJj1NmjSJ8ePHX9XRNGXKFEaPHk1QUBDdunWjY8eON1zHo48+yv33309AQADdunWjV69eAHTt2pXu3bvj7+9P69at6devX+lrZs2axciRI/H09LzqPESPHj2YMWNG6TpmzpxJ9+7dSw8nXevgwYN07979pheWPfzww8TGxpYWEjc3N1auXMnmzZv59NNPsbCwwN7ensWLFwPw4IMPEhAQQFBQULnnIfz9/Uu3OXnyZN5+++3S98De3p4ff/wRgI8//pgdO3ZgMBjo1KkTw4YNY/HixeVuU1HqG4NBMLW3L++tPcHC0Bju8HOvvY1XNNVcfbuVN+VoRETEDSbau1pGRkbpz59uOFXp15la2Vx6eeedd+TSpUuveqwu5CpPVXNV5XfkVjS2KTRvlcp1tUvZ+bL9q2ul70urZVRK1nXP18cpR+utZ4a21ztCnfLaa69x33336R1DURotZ1tL7u7aHIAle2tvSlJVIBRFUeqBKyerfz0QT15hca1sUxUIRVGUeiDA25mu3k6k5xay6kjtjM+kCoSiKEo9MdU4Jeni0No5zKQKhKIoSj0xumtznG0tOBqfzpE40w9mqQpEWTs/g0z954FVFEUpj7WFGROCfABYVAt7EapAlJWVDLu+qLHVpaam0q1bN7p164aHhwdeXl6l9wsKCiq1jkcffZRTp07dcJl58+bV2JXC/fv3JywsrEbWpShKzZvSW5tM6M8jiVzKrtznSHWpC+XK6vcUzO+j/elw6xejuLi4lH7YzpkzB3t7e55//vmrlintNzaUX6u//vrrm86x/Pjjj99yVkVR6gdfFzsGtW/GttMp/HYwjlkD25hsW42nQMypeBRSgKs+gj+pwnUQc9KrHOXMmTOMHTuW/v37s3fvXlavXs1bb71VOl7TxIkTeeONNwBtULqvv/6azp074+rqyiOPPMK6deuwtbVl5cqVuLm58dprr+Hq6srTTz9N//796d+/P1u2bCE9PZ0ff/yR2267jezsbKZPn86ZM2fw8/MjMjKS77777qbDZwDk5ubyyCOPcOjQISwsLPj888/p3r07x44d44EHHqCwsJCSkhJWrFhBs2bNmDBhAomJiRQXFzNnzhzuueeeKr9HiqJUbFofX7adTmFxaCwz+7c22XbU6DXyGwAAC+lJREFUISadRERE8OCDD3L48GG8vLyYO3cuBw4c4MiRI2zcuJGIiIjrXpOens6gQYM4cuQIffv2LR0Z9lpSSvbt28dHH33E22+/DcBXX32Fh4cHR44cYfbs2VUam+jLL7/E0tKSY8eOsWjRIqZNm0ZBQQHz58/n+eefJywsjP3799O8eXPWrl1Ly5YtOXLkCMePH2fo0KHVe4MURalQcEc3vJxtiE3LYXtkCssjTXOoqRHtQdz4m35mZiYOVgZYEAz9n4Zuk00ap02bNvTs2bP0/tKlS/n+++8pKioiMTGRiIgI/Pz8rnqNjY0NI0eOBLShuHfs2FHuusePH1+6zJXxlXbu3MlLL70EaOM3+fv7Vzrrzp07eeGFFwBtfKTmzZtz7tw5brvtNt59911iYmIYP348bdu2JSAggNmzZzN79mxGjx591dhQivL/7d17jFTlGcfx749ll7UuSAIWoYuiBROu1YVQLGqoFKTQyB/eaBRK09ZLG6UlpcWm6cWg6SWt9UKCl9JKFZDaYikqiFxKWhRdKIK6VZBiim7DpSrd4G3p0z/OuzCMZ5iZ3TlzJuzzSTZ75px35jz77My857xn5nldaVR1EdeMOZOfrXqFh559nadf+5DSXT09xs8gMj3+bagflXjnABydiwGiSXruvPNO1q1bx/bt25k0aVJsye6ampqjy5mluLO1lezObGMdmNM2132nT5/O8uXL6datGxMmTGDjxo0MHjyYxsZGhg4dypw5c7j99tvbvV/nXG5XjepPTVUX1iZY4dU7iKDri8vgza0w+edl3/ehQ4fo3r07PXr0oLm5mdWrV5d8HxdeeCHLli0DYMeOHbFDWLlkltluamqiubmZc845h927dzNw4EBmzZrFlClT2L59O2+88QZ1dXVMnz6d2bNns3Xr1pL/Lc456F3XjcnDz6ADx355dZ4hpjy6HD4AV/4Wak7N27bUGhoaGDJkCMOGDftIye5Suemmm5gxYwYjRoygoaGBYcOG5Zw+9NJLL6W6uhqAiy66iIULF3L99dczfPhwqqurWbRoETU1NSxevJglS5ZQXV1Nv379mDdvHps2bWLu3Ll06dKFmpoaFixYUPK/xTkXmX7BWTy2LSq78X7rEbp1rSrp46sjQw+VZNSoUdbY2HjcuqamJgYPHlzQ/TOnHK0kpYqrtbWV1tZWamtr2blzJxMnTmTnzp1HJ+5JK65SKzauYp4jHbFhwwbGjRuX+H6K5XEVpxLiumPNq9y5dmfedrPGDyqoMrWkLWY2Km6bn0F0Ei0tLYwfP57W1lbMjHvvvbfdnYNzLj3fmnDucW/8bx/+gPNuXcOen0wp+b78HaKT6NmzJ1u2bEk7DOdcifX8WE3+Ru2U6EVqSZMkvSJpl6S5OdpcJellSS9JWhzWnSfpmbBuu6Sr2xvDyTKE5krPnxvOnVhiZxCSqoD5wARgL/C8pBVm9nJGm0HALcBYM3tL0sfDpsPADDPbKakfsEXSajMrqnxhbW0tBw8epFevXnnnU3adi5lx8OBBamtr0w7FuYqV5BDTaGCXme0GkLQUmApkfr7ya8B8M3sLwMz2hd+vtjUwszcl7QNOB4rqIOrr69m7dy/79+/P2/a9996ryDcLj6s4xcRVW1tLfX19whE5l7ypn6xO5HGT7CA+Afwr4/Ze4NNZbc4FkPQ3oAr4kZmtymwgaTRQA7yWvQNJ1wHXAfTp04cNGza0O9iWlhbq6uraff+keFzFKTau118vz8QrLS0tHXp+JsXjKk6lxjWh7wfJxNVWTbTUP8CVwAMZt6cDd2e1WQksB6qBs4k6kZ4Z2/sCrwBj8u1v5MiR1hHr16/v0P2T4nEVx+MqjsdVnJMxLqDRcryvJnmRei/QP+N2PZA9kepe4E9m9qGZ/TN0BoMAJPUAHge+b2bPJhinc865GEl2EM8DgySdLakGmAasyGrzGPBZAEm9iYacdof2y4FFZvb7BGN0zjmXQ6LfpJY0GfgV0fWFhWZ2m6RbiU5pVij6aNEvgEnAEeA2M1sq6VrgN8BLGQ8308xyTnUmaT/QkQHl3sCBDtw/KR5XcTyu4nhcxTkZ4zrLzE6P23DSlNroKEmNluPr5mnyuIrjcRXH4ypOZ4vLq7k655yL5R2Ec865WN5BHHNf2gHk4HEVx+MqjsdVnE4Vl1+DcM45F8vPIJxzzsXyDsI551ysTtVBSFooaZ+kF3Nsl6S7Qnny7ZIaKiSucZLekbQt/PygTHH1l7ReUlMovT4rpk3Zc1ZgXGXPmaRaSc9JeiHE9eOYNt0kPRLytVnSgAqJa6ak/Rn5+mrScWXsu0rS3yWtjNlW9nwVEFOaudojaUfYb2PM9tK+HnPV4DgZf4CLgQbgxRzbJwNPAgLGAJsrJK5xwMoU8tUXaAjL3YFXgSFp56zAuMqes5CDurBcDWwmq44Y8HVgQVieBjxSIXHNBO4p93Ms7Hs2sDju/5VGvgqIKc1c7QF6n2B7SV+PneoMwsw2Av85QZOpROU9zKL6Tz0l9a2AuFJhZs1mtjUs/xdoIqrSm6nsOSswrrILOWgJN6vDT/anQKYCD4blR4HxSniykgLjSoWkemAK8ECOJmXPVwExVbKSvh47VQdRgLgS5am/8QQXhCGCJyUNLffOw6n9+URHn5lSzdkJ4oIUchaGJrYB+4A1ZpYzX2bWCrwD9KqAuAAuD8MSj0rqH7M9Cb8CvgP8L8f2NPKVLyZIJ1cQdexPSdqiaLqDbCV9PXoHcby4I5NKONLaSlQv5VPA3URFDstGUh3wB+CbZnYoe3PMXcqSszxxpZIzMztiZucRVS8eLWlYVpNU8lVAXH8GBpjZCOBpjh21J0bSF4B9ZnaiydLLmq8CYyp7rjKMNbMG4PPANyRdnLW9pPnyDuJ4hZQoLzszO9Q2RGBmTwDViqrfJk5SNdGb8MNm9seYJqnkLF9caeYs7PNtYANRIcpMR/MlqStwGmUcXswVl5kdNLP3w837gZFlCGcscJmkPcBS4BJJD2W1KXe+8saUUq7a9v1m+L2PqOL16KwmJX09egdxvBXAjPBJgDHAO2bWnHZQks5oG3dVNMNeF+BgGfYr4NdAk5n9MkezsueskLjSyJmk0yX1DMunAJ8D/pHVbAXwpbB8BbDOwtXFNOPKGqe+jOi6TqLM7BYzqzezAUQXoNeZ2bVZzcqar0JiSiNXYb+nSuretgxMBLI/+VjS12OSU45WHElLiD7d0lvSXuCHRBfsMLMFwBNEnwLYBRwGvlwhcV0B3CipFXgXmJb0m0owlmgmwB1h/Brge8CZGbGlkbNC4kojZ32BByVVEXVIy8xspTJK3BN1bL+TtIvoSHhawjEVGtfNki4DWkNcM8sQV6wKyFe+mNLKVR9geTju6QosNrNVkm6AZF6PXmrDOedcLB9ics45F8s7COecc7G8g3DOORfLOwjnnHOxvINwzjkXyzsI5/KQdCSjcuc2SXNL+NgDlKOKr3Np61Tfg3Cund4NZSqc61T8DMK5dgq1+X+qaK6F5yQNDOvPkrQ2FHNbK+nMsL6PpOWhgOALkj4THqpK0v2K5mp4KnzbGUk3S3o5PM7SlP5M14l5B+FcfqdkDTFdnbHtkJmNBu4hqgJKWF4Uirk9DNwV1t8F/CUUEGwAXgrrBwHzzWwo8DZweVg/Fzg/PM4NSf1xzuXi36R2Lg9JLWZWF7N+D3CJme0OxQP/bWa9JB0A+prZh2F9s5n1lrQfqM8o9NZWrnyNmQ0Kt78LVJvZPEmrgBaiSrSPZczp4FxZ+BmEcx1jOZZztYnzfsbyEY5dG5wCzCeqFrolVDN1rmy8g3CuY67O+P1MWN7EsaJy1wB/DctrgRvh6AQ+PXI9qKQuQH8zW080eU1P4CNnMc4lyY9InMvvlIyqsQCrzKzto67dJG0mOtj6Ylh3M7BQ0hxgP8cqas4C7pP0FaIzhRuBXKWYq4CHJJ1GNAnMHWEuB+fKxq9BONdO4RrEKDM7kHYsziXBh5icc87F8jMI55xzsfwMwjnnXCzvIJxzzsXyDsI551ws7yCcc87F8g7COedcrP8DAFLYHmF4tIUAAAAASUVORK5CYII=\n",
|
| 703 |
+
"text/plain": [
|
| 704 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 705 |
+
]
|
| 706 |
+
},
|
| 707 |
+
"metadata": {
|
| 708 |
+
"needs_background": "light"
|
| 709 |
+
},
|
| 710 |
+
"output_type": "display_data"
|
| 711 |
+
}
|
| 712 |
+
],
|
| 713 |
+
"source": [
|
| 714 |
+
"# Plotting our loss charts\n",
|
| 715 |
+
"import matplotlib.pyplot as plt\n",
|
| 716 |
+
"\n",
|
| 717 |
+
"history_dict = history.history\n",
|
| 718 |
+
"\n",
|
| 719 |
+
"loss_values = history_dict['loss']\n",
|
| 720 |
+
"val_loss_values = history_dict['val_loss']\n",
|
| 721 |
+
"epochs = range(1, len(loss_values) + 1)\n",
|
| 722 |
+
"\n",
|
| 723 |
+
"line1 = plt.plot(epochs, val_loss_values, label='Validation/Test Loss')\n",
|
| 724 |
+
"line2 = plt.plot(epochs, loss_values, label='Training Loss')\n",
|
| 725 |
+
"plt.setp(line1, linewidth=2.0, marker = '+', markersize=10.0)\n",
|
| 726 |
+
"plt.setp(line2, linewidth=2.0, marker = '4', markersize=10.0)\n",
|
| 727 |
+
"plt.xlabel('Epochs') \n",
|
| 728 |
+
"plt.ylabel('Loss')\n",
|
| 729 |
+
"plt.grid(True)\n",
|
| 730 |
+
"plt.legend()\n",
|
| 731 |
+
"plt.show()"
|
| 732 |
+
]
|
| 733 |
+
},
|
| 734 |
+
{
|
| 735 |
+
"cell_type": "code",
|
| 736 |
+
"execution_count": 15,
|
| 737 |
+
"metadata": {},
|
| 738 |
+
"outputs": [
|
| 739 |
+
{
|
| 740 |
+
"data": {
|
| 741 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEGCAYAAABy53LJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOydeVxVRf/H38OOsgkqLpi44IoIirjgRuZSPVmWa2qZqS3aZpm2/NqenqdsMSvLSktbFNJ8XMp9IzNzwwUVRREVARVF2Xfu/P44lysiu1zuBeb9ep0XnDkzc77ncrnfO/Od+XyFlBKFQqFQKMqLhakNUCgUCkXNQjkOhUKhUFQI5TgUCoVCUSGU41AoFApFhVCOQ6FQKBQVwsrUBlQHDRs2lJ6enpVqm56eTv369avWoCpA2VUxlF0VQ9lVMWqrXWFhYdeklI1uuyClrPVH9+7dZWXZuXNnpdsaE2VXxVB2VQxlV8WorXYBB2Uxn6lqqkqhUCgUFUI5DoVCoVBUCOU4FAqFQlEh6kRwvDhyc3OJjY0lKyur1HrOzs6cPHmymqwqP8quimFOdtnZ2eHh4YG1tbWpTVHUclafyWHgwKrvt846jtjYWBwdHfH09EQIUWK91NRUHB0dq9Gy8qHsqhjmYpeUksTERGJjY2nVqpWpzVHUctaezeVzI/RbZ6eqsrKycHNzK9VpKBRVjRACNze3Mke6CoU5U2cdB6CchsIkqPedojqQRlQ+r9OOozJ8tvW0qU1QKBSKMll5MBaALScuV3nfynFUkM+3n6mSfgYOHMjmzZtvKZs/fz7PPvtsqe0cHBwAuHTpEiNHjiyx74MHD5baz/z588nIyDCc33fffSQlJZXH9GL5559/aNWqFYGBgfj6+uLg4ED79u3x9fXlscceq1BfOp2ODz/8sNQ6Bw4cQAjB9u3bK22zQlGb+GzraTznrDccr64KB2Daz2G3lFfFl986Gxw3NePGjSMkJIShQ4caykJCQvj444/L1b5p06b89ttvlb7//PnzmTBhAvXq1QNgw4YNle4LYNOmTXzyyScMGTIER0dHBg4cyCeffIK/v3+F+ypwHHPmzCmxTnBwMH379iU4OJhBgwbdiemlkpeXh5WV+jdRmD8vDW7HS4PbARCVkMY98/4E4MS7Q6lvW7XvYTXiMBEjR47kjz/+IDs7G4Dz588THx9P3759SUtLY9CgQXTr1o0uXbqwdu3a29pfuHABb29vADIzMxk7diw+Pj6MGTOGzMxMQ71nnnkGf39/OnfuzNtvvw3AF198QXx8PEFBQQQFBQHg6enJtWvXAJg3bx7e3t54e3szf/58g30dO3Zk6tSpdO7cmSFDhtxyn+3bt3PPPfeU+Lx5eXnMnDmTgIAAfHx8WLx4MQBxcXH07dsXX19fvL292bNnD3PmzCE1NbXE0YpOp2PVqlX8+OOPbNy4kZycHMO1JUuW4OPjQ9euXXniiScAuHz5suH16dq1K/v27SMqKgpfX19Duw8//JD3338fgL59+/LGG2/Qv39/FixYwNq1a+nZsyd+fn4MGTKEhIQEQFup9fjjj9OlSxd8fHxYs2YN3377LbNmzTL0u3DhQl599dUSXxeFwhisPHjR8HtVOw1QIw4APOesN0r98x/eX+I1Nzc3AgIC2LRpEw8++CAhISGMGTMGIQR2dnasXr0aJycnrl27Rq9evRg+fHiJQdWFCxdSr149wsPDCQ8Pp1u3boZr//nPf3B1dSU/P59BgwYRHh7O888/z7x589i5cycNGza8pa+wsDCWLFnCvn37kFLSs2dPBgwYQIMGDThz5gzBwcEsWrSI0aNHs2rVKiZMmMC1a9ewtrbG2dmZ1NTUYm387rvvaNy4Mfv37yc7O5tevXoxZMgQgoODeeCBB5g9ezb5+flkZmYSEBDA4sWLOXLkSLF97dq1iw4dOtC6dWsCAwPZtGkTw4cP5+jRo8ydO5c9e/bg6urK9evXAZg+fTpBQUG88sor5OXlkZGRYfjwL4mUlBR27doFwI0bNwyv/zfffMOnn37K3Llzeeedd2jUqBHHjh1DSklSUhJWVlb4+vrywQcfYGVlxZIlS1i6dGmp91IoqpLcfB2rDsUZ9R5qxGFCCqarQJumGjduHKCthnj99dfx8fHhnnvuIS4ujitXrpTYz65du5gwYQIAPj4++Pj4GK6tWLGCbt264efnx4kTJ4iIiCjVpt27dzNixAjq16+Pg4MDDz/8MH/99RcArVq1MnxL7969O+fPnwdgy5YtDBkypNR+t2zZwpIlS/D19aVnz54kJSVx5swZevToweLFi3n33Xc5fvy4IYZTGsHBwYwdOxaAsWPHEhwcDMCOHTsYM2YMrq6uAIafoaGhTJ48GQArKyucnJzKvEdB/wAxMTEMGTKELl26MG/ePE6cOAHAtm3bmD59OqCtlGrQoAGOjo7079+fjRs3cuLECSwtLenUqVOZ91MoqoqdpxK4lpZNm0bGU+tVIw5KHxkU3TjmOWd9qfUrwkMPPcTMmTM5dOgQmZmZhpHCsmXLuHr1KmFhYVhbW+Pp6Vnmuv/iRiPnzp3jk08+4cCBAzRo0IBJkyaV2U9pS/hsbW0Nv1taWhqmqjZu3MjMmTPL7Pfrr78uNh4RGhrK+vXrGT9+PK+99hpjxowpsZ/c3FxWr17Nhg0bePfdd9HpdCQlJZGeno6UssRRWdFyKysrdDqd4TwrK+uWWEZhKerp06fz+uuvc99997Ft2zZD4L6k+02ZMoV58+bh6elpmC5TKKqLFfrVVGN6tOBE5Fmj3EONOEyIg4MDAwcOZPLkyYbRBkBycjKNGzfG2tqanTt3cuHChVL76d+/P8uWLQPg+PHjhIdrqylSUlKoX78+zs7OXLlyhY0bNxraODo6Fjut1L9/f9asWUNGRgbp6emsXr2afv36lXhvKSXh4eG3xAuKY+jQoXz99dfk5eUBEBkZSWZmJhcuXKBJkyZMmzaNSZMmcfjwYcMHeEHdwmzZsoUePXpw8eJFzp8/T0xMDA888ADr1q3jnnvuISQkxDBFVfAzKCiI77//HoD8/HxSUlJo0qQJ8fHx3Lhxg6ysLNavL3n6MTk5mebNmyOl5McffzSUDxkyhAULFhhehxs3bgAQGBjI2bNnWblyZalOUKGoahJSstgZmYCVhWCEnwcjvGyMch/lOEzMuHHjOHr06C1TI+PHj+fgwYP4+/uzbNkyOnToUGofzzzzDGlpafj4+PDRRx8REBAAQNeuXfHz86Nz585MnjyZwMBAQ5tp06Zx7733GoLjBXTr1o1JkyYREBBAz549mTJlCn5+fiXeOywsDD8/vzI3tT311FN4eXkZguDPPPMMeXl5bN++3WDn2rVree655wB48skn8fHxuS04HhwczIgRI24pe+SRR1i+fDk+Pj68+uqr9O/fH19fX0OQesGCBWzfvp0uXbrg7+/PqVOnsLOz4/XXX6dHjx4MHz681Omkd955hxEjRjBgwADc3d0N5W+//TZXrlzB29sbX19fw5QeaIsf+vfvj7Ozc6mvi0JRlfzvcBz5OsndHRrTyNG27AaVpbgkHbXtKC6RU0RERLkSmaSkpNxy3nL2H+VqZ2yK2mUq/v3vf8vg4GDDubnYVZTqtmvo0KEyNDS0xOsF77/amgDIWCi7Skan08mgj3fKlrP/kNsiLkspjZfIScU4KsgLg7xMbYJZ8eabb5raBLMiMTGR3r170717dwYMGGBqcxR1iLALN4i+lk5jR1sGtLs922tVohxHBSnYYKNQFIebmxunTytZGkX18+sBbe/GI909sLI0bhRCxTgUCoWihpOWncf6Y5cAGNXdw+j3U45DoVAoajjrw+PJyMknwNOV1o3K3gt1pyjHoVAoFDWcgmmqUf7GH22AchwKhUJRo4lKSOVQTBL1bSy536dptdzTqI5DCDFMCBEphIgSQhQrdSqEGC2EiBBCnBBCLNeXBQkhjhQ6soQQD+mvLRVCnCt0rfSdZ1XF7s8gtWTZj4qSmJiIr68vvr6+NGnShObNmxvOC4v2lcYTTzxBZGRkqXW++uorw+bAquDKlStYWVkZNtQpFArTUrBT/IGuzahnUz3rnYx2FyGEJfAVMBiIBQ4IIdZJKSMK1fECXgMCpZQ3hBCNAaSUOwFffR1XIArYUqj7WVLKymuKV4a0BPj7cxj23yrpzs3NzSDi98477+Dg4MArr7xyS52CNdMWFsX79yVLlpR5nwItpari119/pXfv3gQHB/Pkk09Wad+FUXLmCkXZ5Obr+N8hzXGM7tGi2u5rzBFHABAlpYyWUuYAIcCDRepMBb6SUt4AkFIWJ1k6Etgopcwo5lr1EfgCHF1epaOO4oiKisLb25unn36abt26cenSJaZNm2aQRn/vvfcMdfv27cuRI0fIy8vDxcWFOXPm0LVrV3r37m1Qf33zzTcN0uh9+/Zlzpw5BAQE0L59e/bs2QNAeno6jzzyCF27dmXcuHH4+/uXqEwbHBzM/PnziY6O5vLlm5nF1q9fT7du3ejTp49B8LA42fECWwsICQlhypQpAEyYMIGXX36ZoKAgXn/9dfbu3Uvv3r3x8/MjMDCQM2e0JFp5eXm89NJLeHt74+Pjw9dff83mzZsZNWqUod+NGzcyevToO/57KBTmzI5TCVxLy6FtYwf8WriU3aCKMOZXuubAxULnsUDPInXaAQgh/gYsgXeklJuK1BkLzCtS9h8hxFvAdmCOlDK76M2FENOAaQDu7u6Ehobecr2wBLjjpyUHlByLFnxa/n0cqS/HlqtednY21tbWpKamkpaWRkREBAsWLDAkdXrjjTdwdXUlLy+P+++/n3vvvRcvLy/y8/NJT08nNTWV5ORkevTowRtvvMFrr73GwoULmTlzJtnZ2WRlZZGamkp+fj7Z2dls376dDRs28NZbb7F69WrmzZuHq6sru3fv5tixY/Tr18/Qb2EuXLhAYmIi7dq1Y/jw4fz0008888wzXLlyhaeffppNmzbRvHlzkpOTSU1N5fXXX8fZ2Zk9e/YYZMcL+iz4mZmZSW5uLqmpqeTm5hIZGcmaNWuwsLAgOTmZjRs3YmlpydatW5kzZw5Lly7lm2++ISYmht27d2Npacn169dxcXFhxowZnD9/Hjc3NxYtWsT48eMN98nPzy9R8t0UZGVlERoaSlpa2m3vTXNA2VUxTGXXwjBNtLR7gxz+/PPP264byy5jOo7ixIuKSq9aAV7AQMAD+EsI4S2lTAIQQjQFugCFc6y+BlwGbIDvgNnAexRBSvmd/jr+/v5y4MCBt1w/efLkLaq3xqC8/dva2mJra4ujoyMODg60adOGwvb+9NNPfP/99+Tl5REfH8+FCxfo0KEDlpaW1K9fH0dHR+zt7XnkkUcA6N27N3/99ReOjo7Y2tpiZ2eHo6MjlpaWjB07FkdHR/r27cvbb7+No6MjBw4cYPbs2Tg6OtKnTx86d+5s6Lcwv//+O+PGjcPR0ZHHHnuM6dOn8+qrr7Jt2zbuvvtuOnfuTGpqKi1btgQ0ufc1a9YY+nFycjIIFxaU2dvbY21tjaOjI9bW1owbN86g73T9+nUef/xxzp69qfDp6OjI7t27efHFFw0jl4K+JkyYwO+//8748eM5evQoK1euxNLSErhd5djU2NnZ4efnR2hoKEXfm+aAsqtimMKuhJQsjm3ZgZWFYNao/jR0uF2bylh2GdNxxAKFJ908gPhi6uyVUuYC54QQkWiO5ID++mhgtf46AFLKS/pfs4UQS4BbAwOV4Z3kEi8ZPnBy0uG7IOj7Ivg+ese3LI3Ckt5nzpzh888/Z//+/bi4uDBhwoRipdFtbG6qYFpaWharLAs3pdEL15GlSKkXJjg4mMTERINCbHx8POfOnStRXry4cgsLi1vuV/RZCj/7G2+8wdChQ3n22WeJiopi2LBhJfYLMHnyZIPzHDNmjMFpKBS1kVWHNEHDoZ3di3UaxsSYMY4DgJcQopUQwgZtymldkTprgCAAIURDtKmr6ELXxwHBhRvoRyEI7ZPjIeC4UawvyvpXwMPf6E6jKCkpKTg6OuLk5MSlS5fYvHlz2Y0qSN++fVmxYgUAx44dKzbZU0REBPn5+cTFxXH+/HnOnz/PrFmzCAkJITAwkB07dhjk3wvkzIuTHbewsDBkE9TpdKxevbpEuwrkzIFbsugNGTKEhQsXkp+ff8v9WrRoQcOGDfnwww+ZNGnSnb0oCoUZI6U0pIcdU41B8QKM5jiklHnADLRpppPACinlCSHEe0KI4fpqm4FEIUQEsBNttVQigBDCE23EUnTibpkQ4hhwDGgIvG+sZzBweBnEH4L7Pjb6rYrSrVs3OnXqhLe3N1OnTr1FGr2qeO6554iLi8PHx4dPP/0Ub2/v2+TAly9fXqKcubu7OwsXLuTBBx+kT58+jB8/HihZdnzu3LkMGzaMQYMG4eFRcnxp9uzZzJo167Znfuqpp2jSpIkhh3iB0wN49NFHadWqFe3aKU0xRe3lYCFBw/5exhU0LJbiJHNr23HHsup/fSbllfLVry6qUiY8NzdXZmZmSimlPH36tPT09JS5ubkmt6syPPXUU3Lp0qW3lZvarqIoWfXKoezSeHnFEdly9h9y7saTpdZTsuqmpO+LprbAqKSlpTFo0CDy8vKQUvLtt9/WyD0Uvr6+NGjQgC+++MLUpigURiM1K5f14XpBQ//qn6YCJauuAFxcXAgLCzO1GXdMSXtPFIraxPrwS2Tm5hPQypVWDeuX3cAI1GmtKlnO1UQKRVWi3neKO+FXfVB8tIlGG1CHHYednR2JiYnqn1hRrUgpSUxMxM7OztSmKGogZ66kcjgmCQdbK+7r0sRkdtTZqSoPDw9iY2O5evVqqfWysrLM8p9c2VUxzMkuOzu7UleTKRQlsUI/2niga9NqEzQsjjrrOKytrWnVqlWZ9UJDQ/Hz86sGiyqGsqtimKtdCkV50QQN4wDTTlNBHZ6qUigUiprE9pMJJKbn4NXYAd9qFDQsDuU4FAqFogZQeKd4cZI71YlyHAqFQmHmXEnJYmdkAlYWghF+zU1tjnIcCoVCYe6sOhSLTsI9Hd1xq2ZBw+JQjkOhUCjMGCklK/XpYU0haFgcynEoFAqFGXPg/A3OXUvH3cmWfl4NTW0OoByHQqFQmDW/HtCC4iO7e2BlaR4f2eZhhUKhUChuIzUrlw3H9IKG3c1jmgqU41AoFAqz5Q+9oGHPVq54mkjQsDiU41AoFAozpWCaytQ7xYuiHIdCoVCYIaevpHLkYoGgYVNTm3MLynEoFAqFGbLiQIGgYTPsbSxNbM2tKMehUCgUZkZOno7VhzVBQ3PZu1EYozoOIcQwIUSkECJKCDGnhDqjhRARQogTQojlhcrzhRBH9Me6QuWthBD7hBBnhBC/CiFsjPkMCoVCUd3sOHWFxPQc2rk70NXD2dTm3IbRHIcQwhL4CrgX6ASME0J0KlLHC3gNCJRSdgYKJ/fOlFL66o/hhcrnAp9JKb2AG8CTxnoGhUKhMAUr9DvFR/ubXtCwOIw54ggAoqSU0VLKHCAEeLBInanAV1LKGwBSyoTSOhTaK3g38Ju+6EfgoSq1WqFQKEzI5eQsQiMTsLY0D0HD4hDGSp0qhBgJDJNSTtGfTwR6SilnFKqzBjgNBAKWwDtSyk36a3nAESAP+FBKuUYI0RDYK6Vsq6/TAtgopfQu5v7TgGkA7u7u3UNCQir1HGlpaTg4OFSqrTFRdlUMZVfFUHZVjKq064+zOfx2Jhd/d0tm+N1Z1so7tSsoKChMSul/2wUppVEOYBSwuND5RODLInX+AFYD1kArIBZw0V9rpv/ZGjgPtAEaoY1iCtq3AI6VZUv37t1lZdm5c2el2xoTZVfFUHZVDGVXxagqu3Q6nRzw0Q7ZcvYfcsepK3fc353aBRyUxXymGnOqKlb/wV6ABxBfTJ21UspcKeU5IBLwApBSxut/RgOhgB9wDXARQliV0qdCoVDUSPafu875xAyaONnR36uRqc0pEWM6jgOAl34VlA0wFlhXpM4aIAhAPw3VDogWQjQQQtgWKg8EIvQecCcwUt/+cWCtEZ9BoVAoqo1fD94UNLS0ML+geAFGcxxSyjxgBrAZOAmskFKeEEK8J4QoWCW1GUgUQkSgOYRZUspEoCNwUAhxVF/+oZQyQt9mNjBTCBEFuAHfG+sZFAqForpIKSxo6O9hYmtKx6rsKpVHSrkB2FCk7K1Cv0tgpv4oXGcP0KWEPqPRVmwpFDWe1WdyGDjQ1FYozIE/jl4iK1dHr9autHQzH0HD4lA7xxUKE7L2bK6pTVCYCQXTVOYmaFgcynEoFAqFiYm8nMrRi0k42lpxr7d5CRoWh3IcCoVCYWJW6EcbD/ian6BhcSjHoVCYgIycPJ5Ysh+A6KtpJrZGYUpuETSsAdNUYOTguEKhuMlnW0/z+fYzt5Xf/emft5y/MMiLlwa3qy6zFCZm+8krXE/Pob27Iz5mKGhYHMpxKBTVxEuD2zG1f2sm/bCfgxdu4O5ky5WUbABWP9sHv7samNhChSkomKYa3cM8BQ2LQ01VKRTVREpWLo99v4+DF27Q1NmOX6f1Nlybu+lUgYyOog5xOTmLP09fNWtBw+JQjkOhqAaSM3OZ+P1+DsUk0dzFnl+n9cazobZW39nemr3R1/nz9FUTW6moblYdikUnYXAnd1zr15zUQspxKBRGJikjhwmL93H0YhIeDewJmdaLu9zqGa5PD2oDwNxNkeh0atRRV9Dp5M1pqhoSFC9AOQ6FwojcSM9h/OJ9HItL5i7Xevz6VG9auN50Gg+2seax3p40dbbj5KUU1h1Vmp11hX3nrnMhMYOmznb0M2NBw+JQjkOhMBLX03N4dPE+TsSn4OlWj5BpvWjuYn9LnRFeNthZW/LSPdoqqk+3RpKTpzOFuYpqZmUNETQsDuU4FAojcC0tm0cX7eXkpRRaN6xPyLTeNCviNArzcLfmeDV24OL1TJbvu1CNlipMQUpWLhuO6wUNu9esaSpQjkOhqHKupmYz7ru9nLqcSptG9QmZ1osmzqVncrOytGDW0PYAfLkjirTsvOowVWEifj8aT1aujt6t3W6Jd9UUlONQKKqQhJQsxn73D2cS0vBq7EDItN40dipf+s/Bndzp3rIBiek5LP4r2siWKkzJigMFezfMWz69JJTjUCiqiMvJWYz9bi9nr6bToYkjwdN60cjRttzthRDMHtYBgEW7ormWlm0sUxUm5NTlFI7GJuNoVzMEDYtDOQ6Fogq4lJzJ2O/+IfpaOh2bOrF8ai8aOpTfaRQQ0MqVuzs0Jj0nnwU7ooxgqcLUrDgQC8Dwrs2wszZ/QcPiUI5DobhD4pIyGfPtXs4nZtC5mRPLp/S8o81crw5rjxCwbN8FYhIzqtBShanRBA01xzGmR80LihegHIdCcQdcvJ7BmG//IeZ6Bj4eziyf0osGd7gDuEMTJ0b4NSc3XzJva2QVWaowB7advMKNjFw6NHGkS/OaIWhYHMpxKBSVJCYxg7Hf7SX2RiZdW7jw85M9ca5nXSV9zxzcDhtLC9YejSciPqVK+lSYnsI7xWuKoGFxGNVxCCGGCSEihRBRQog5JdQZLYSIEEKcEEIs15f5CiH+0ZeFCyHGFKq/VAhxTghxRH/4GvMZFIriOH8tnTHf/UNcUiZ+d7nw85MBONtXjdMA8GhQjwm9WiIlfLT5VJX1qzAdl5Iz2XX6KjaWFjVK0LA4jOY4hBCWwFfAvUAnYJwQolOROl7Aa0CglLIz8KL+UgbwmL5sGDBfCOFSqOksKaWv/jhirGdQKIoj+moaY7/by6XkLPxbNuCnyQE42VWd0yhgxt1tcbC1IjTyKv+cTazy/hXVy6qwm4KGdzqdaWqMOeIIAKKklNFSyhwgBHiwSJ2pwFdSyhsAUsoE/c/TUsoz+t/jgQSgZom5KGolUQma07ickkVAK1d+nByAoxGcBoBrfRum9W8NwIdKdr1GowkaakHx0TU4KF6AMNabUQgxEhgmpZyiP58I9JRSzihUZw1wGggELIF3pJSbivQTAPwIdJZS6oQQS4HeQDawHZgjpbxtwbsQYhowDcDd3b17SEhIpZ4jLS0NBweHSrU1JsquilEVdsWl6Zi7P4uUHElHVwte7GaHrdWdzVOXZVdWnuTVXZmk5Ehm+Nri36R6cq/V5r+jMSjLrpOJ+cw9kIWrneCTAfZYVFN8405fr6CgoDAppf9tF6SURjmAUcDiQucTgS+L1PkDWA1YA62AWMCl0PWmQCTQq0iZAGzRHMpbZdnSvXt3WVl27txZ6bbGRNlVMe7UrlOXUmS397bIlrP/kOMX7ZUZ2XnVZtdPe87JlrP/kEGf7JS5eflVct+yqK1/R2NRll0vhhyWLWf/IT/dfKp6DNJzp68XcFAW85lqzKmqWKDwmMwDKKoZHQuslVLmSinP6Z2EF4AQwglYD7wppdxb0EBKeUn/TNnAErQpMYXCaETEpzBu0V4S03Po59WQxY/7Y29TfRu3xgbcRUu3ekRfTee3sNhqu6+iakjOzGXDMU3QcGQNFDQsDmM6jgOAlxCilRDCBhgLrCtSZw0QBCCEaAi0A6L19VcDP0kpVxZuIIRoqv8pgIeA40Z8BkUd53hcMo8u3sv19BwGtm/Eosf8q323r7WlBS8P0QQQ5287Q2ZOfrXeX3Fn/H40nuw8HX3a1ExBw+IwmuOQUuYBM4DNwElghZTyhBDiPSHEcH21zUCiECIC2Im2WioRGA30ByYVs+x2mRDiGHAMaAi8b6xnUNRtjsUmM37xPpIycrm7Q2O+ndjdZBIR/+rSlM7NnLicksXSPedNYoOictTULH+lUWakTQgxA1gm9SufKoKUcgOwoUjZW4V+l8BM/VG4zi/ALyX0eXdF7VAoKsqRi0lM/H4fqVl5DO7kzoJH/bC1Mp2ukIWFJoD42A/7WRgaxaMBd1XZZkOF8Th5KYVwvaDhMO8mpjanyijPiKMJcEAIsUK/oa/mbndUKMrBoZgbTFysOY1hnZvw1aPdTOo0Cujn1ZA+bdxIycrj6z/rmADi7s8g9YqpragwBaONB31rrqBhcZTpOKSUb6QMSHwAACAASURBVKIFrL8HJgFnhBD/FUK0MbJtCkW1c/D8dR77fj+p2Xnc36UpXz7qh42VeSjzFJZdX/r3eS4lZ5rYomokLQH+/tzUVlSI7Lx81hyOA2CM/10mtqZqKdd/hH5K6bL+yAMaAL8JIT4yom0KRbWy/9x1HvthP2nZeTzQtRmfj/XF2tI8nEYBXVu4cH+XpmTn6fh82xlTm1N9BL4AR5djk13hGXOTsS0igRsZuXRs6oR3cydTm1OllPlfIYR4XggRBnwE/A10kVI+A3QHHjGyfQpFtfDP2UQe/2E/GTn5POTbjM9Gd8XKzJxGAS8PaYelhWDFwYtEJaSZ2pzqwdYJGrbH7/BsOL0FasAu+ptBcY8aLWhYHOXZhtoQeFhKeaFwodR2cf/LOGYpFNXH31HXePLHA2Tl6ni4W3M+HtkVSwvz/Udv3ciBMT1asHxfDB9vPsW3E2/f2FtryM+Do8th5weQGo89wPJRt9YZMAeCXjOFdSUSn5TJrjOaoOFDvjVb0LA4yvOVagNwveBECOEohOgJIKU8aSzDFIrqYNfpq0xeqjmN0f4eZu80CnhhkBd21hZsPnGFQzE1Z/qm3EgJpzbAN4Gw7jlIjQcrW666BUA9t5v1WgZCq36ms7MEVoXFIiUM7lzzBQ2LozyOYyFQeDycri9TKGo0oZEJTPnpINl5OsYFtODDh31qhNMAcHeyY3JgKwDmbqxlAogx++CHYRAyDq6eApeW0LIPdH6EE13egBfCYdDbYOcCF/6GpffDj8Ph4n5TWw7oBQ3DtGmqMbVo70ZhyuM4hCz0rpRS6ijfFJdCYbbsOHWFaT+FkZOnY0Kvu/jPQ12wqCFOo4CnBrTBpZ41+85dJ/T0VVObc+dcjYSQ8fDDELi4VxtZ3PsR9HsZMq7D/Z9o9WwdoN9MePEYDHxdi3+c+xO+Hwy/jIS4QyZ9jL3RiVy8nkkzZzsC2zY0qS3GojyOI1ofILfWHy8A0cY2TKEwFtsirvDUz2Hk5Ot4vHdL/v2gd41zGgDO9tZMH9gWgI82RaLT1dBRR0q8Nh31dS849QdY14P+r8LzR6DnU5B5A0YtBZv6t7azc4KBs+HFcOj3Ctg4QNRWWBQEwePg8jGTPE5BUHykf4saM4KtKOVxHE8DfYA4NFHCnujlyhWKmsam45d5ZlkYufmSJwI9eWd45xq94mVi75Y0c7bj5KUU1h0tqiFq5mQlw7Z34YtucOgnQID/ZM1h3P2G5hgA+r4IjTuW3I99Axj0f9oUVuALYGUPkRvgm76w4jFIqL5QbHJmLhuPXwZgVHePartvdVOeDYAJUsqxUsrGUkp3KeWjUp9wSaGoSWw4dokZyw+Rmy+Z2q8Vb/2rU412GgB21pa8OLgdAJ9siSQ7rwYIIOZlwz9fweddYfc8yMuETg/C9P3wr8/A0b1y/dZ3g8HvaSOQXs+CpS1ErIWve8OqKXDN+Lvt1+kFDQPbutHCtXYIGhZHefZx2AkhpgshvhZC/FBwVIdxCkVVse9SHs8FHyZPJ3l6QBtev69jjXcaBTzSzQOvxg7E3shk+b4YU5tTMjodHP0VvvSHza9rU1AtA2HKdhj9EzRsWzX3cWgMwz6AF45AjylgYQXHVsJXPWD1M3D9XNXcpxhWHKh9gobFUZ6pqp/R9KqGAn+i5dVINaZRCkVVsvZIHN8czSZfJ5kR1JbZw9rXGqcBYGkhmDVUk11fsCOKtOw8E1tUBCkhaht82x9WT4PkGGjcCR5dAZPWg4eR9qE4NYP7P4XnD0O3x0FYaHtCFvjDuuch6WKV3i4iPoVjcck42VkxtHPtETQsjvI4jrZSyv8D0qWUPwL3A12Ma5ZCUTX871AsL/16BAk8P8iLl4e0q1VOo4DBndzp3rIBiek5LNplRmtX4g7BT8Phl0fgyjFwag4Pfg1P74Z2Q6E6/hYuLWD4FzDjIHR9FKQODv0IX/jB+pe14HwVcFPQsHmtEjQsjvI4jlz9zyQhhDfgDHgazSKFoopYefAiL688ik7CiLbWzBxcO50GaAKIc+7VBBAX/xXN1dRs0xqUeBZWTtJWOJ3bpe25GPxveC4M/MaDhQk+WF1bwYiFWiylyyjQ5cGBxfC5L2x6TRNSrCS5OsmaI3pBwx61e5oKyuc4vhNCNADeRMvgFwHMNapVCsUdErI/hldXhSMlzBrangfb1r7du0Xp4enKoA6NSc/JZ8EOEwkgpl2F9a/AVwFwYrUWoA58QYs3BD4P1vamsaswDb3gkcXw7D9aUD4/G/Z+rQXrt74F6YkV7vLwlXySMnLp1NQJ7+bORjDavCjVcQghLIAUKeUNKeUuKWVr/eqqb6vJPoWiwizbd4E5/zuGlDDn3g5MD6qioGsNYNaw9ggBy/fHEJOYUX03zk6D0A/hC184sEibDvKdAM8f0lY62TeoPlvKS+OOWlD+qb+g/X2Qm6FJt3/uA9v/rQXvy8muOC2uNNq/9i7BLUypjkO/S3xGNdmiUNwxP/1znjdWa2no37y/I08PqFtpYzo0cWKEX3Ny8yWfbo00/g3zc2H/Is1hhH4AOWnQ7l54+m946CtwrgEfpE19YFwwTN0BbQdrz/DXJzC/K4TOhayUUpvHJWVy4lq+JmjoV/sEDYujPFNVW4UQrwghWgghXAsOo1umUFSQJX+f4621JwB461+dmNKvtYktMg0zB7fDxtKCtUfiORGfbJybSKlNRX0VABtegfSr4NEDJm2AR0PAvZNx7mtMmneHCb/B5C3QagBkJ0Pof7URyF+faqOqYlgVFosEhnR2x6Ve7Z8ShfI5jsnAdGAXEKY/Dpanc32q2UghRJQQYk4JdUYLISKEECeEEMsLlT8uhDijPx4vVN5dCHFM3+cXKpWtArSA8Lu/RwDw3oOdmdy3lYktMh0eDeoxsXdLQJMiqXLO7YJFd2vB7+vR4NYWRv8MT24Fz8Cqv191c1dPeHydtlT4rj7alNX297QYyJ4vIefmFKBOJw2rqepCULyA8uwcb1XMUeZXOSGEJfAVcC/QCRgnhOhUpI4X8BoQKKXsDLyoL3cF3kaTNwkA3tYH6EFT5p2Gls7WCxhWvkdV1Fa+/fMs76/XZCXef8ibx3p7mtYgM2B6UFscba348/RV9py9VjWdXj6uiQj++ADEHwIHd22n97P7oNPw6llaW5149oUnNsDENdpoKuMabHlTm5bb+w3kZvFPdCKxNzJxsxMEtqmdgobFUabKrRDiseLKpZQ/ldE0AIiSUkbr+wkBHkRblVXAVOArKeUNfZ8F6+GGAlullNf1bbcCw4QQoYCTlPIffflPwEPAxrKeQ1E7+WpnFB9vjkQI+GBEF8YG1K7czpXFtb4N0/q35tOtp5m7KZI1z7pVfilyUgzs+A+E/wpIsHGEvi9osh5FhQdrG0JAmyBoPRDObIWd/4FLR2DTbNjzBXH2o7HGj77N7WukUGZlEWXp+Ashvix0agcMAg5JKUeW0W4kMExKOUV/PhHoKaWcUajOGuA0EAhYAu9IKTcJIV4B7KSU7+vr/R+QCYQCH0op79GX9wNmSylvy0QohJiGXozR3d29e0hISKnPWRJpaWk4ODhUqq0xUXbB2qgcVkflIoDJ3jb087A2C7sqgjHtys6TzNqVSUqOZLqvLT2alD8bQlpaGi62Olpe+I3mceuxkHnohBXxze7lQstR5NqYZsmpyf+OUtLw2j48zwfjkH4egFjZkAvNR5DXdhjSwrwyTtzp6xUUFBQmpbxta3+ZTymlfK7wuRDCGU2GpCyKc79FvZQV2nTTQDQpk7/0mwxLaluePrVCKb8DvgPw9/eXAwcOLIfJtxMaGkpl2xqTumyXlJL5286wOuoMQsAnI7vySBlKpHX19Uqof57/W3uCjXFWvDSqf/nyqOdkEB38Kq3j12kBYoAuo7C4+008GnhiynVS5vF3DALdq/y5djHNDs/HyyIOj/hFkLkNBs7RNheaYoNjMRjr9SpPcLwoGWgf9mURCxSOFnkARff2xwJrpZS5UspzQKS+75Laxup/L61PRS1GSsm8raf5fPsZLAR8Ntq3TKdRlxkbcBeebvWIvprOyrDY0ivn52ny5l92p/W5nzWn0eZueGqXtmGugWe12FwjsLDg49iODM2ZS1j3uWTYN4Mb52D1U1pekeOrNFHHWkp51HF/F0Ks0x9/oH24ry1H3wcALyFEKyGEDTAWbed5YdYAQfr7NATaoSWJ2gwMEUI00AfFhwCbpZSXgFQhRC/9aqrHymmLohYgpeSjzZF8uSMKSwvB52P96sy6+cpibWnBy0M0AcT5206TmVOM7Hox+b1THVprQeGJq6Fp12q22vw5EZ/M8bgUHOxs6DxsKgd6LNA0uFxawrXT8Ntk7fWMWKe9vrWM8kzIfVLo9zzggpSyjK8uIKXME0LMQHMClsAPUsoTQoj3gINSynXcdBARQD4wS0qZCCCE+Dea8wF4ryBQDjwDLAXs0YLiKjBeB5BS8sHGU3y3KxorC8EX4/y4r0tTU5tVI7i/S1O+3XWW43EpLNlzjmcHFtpJH7NPk9m4uFc7d2kJg94i7JorA9sEmcbgGsDKg9pH4EN+mqChtLDUNLi6jIIjy2DXJ5AQASsmQhMfCHqj+kQdq4HyOI4Y4JKUMgtACGEvhPCUUp4vq6GUcgOwoUjZW4V+l8BM/VG07Q/AbXk/pJQHAe9y2K2oJUgp+fcfJ/nh73NYWQgWPNqNYd61W7a6KrGwEMwe1oGJ3+9nYehZHg24C5f0c9rehFN/aJXquWnpWv0ng5UNhIaa1GZzJis3n9WHNUHD2/JuWNmA/xPg+yiE/ahtHLwcDsFjoLk/BL2uTf/VcAdSnhjHSqDwZF2+vkyhMDpSSt79PYIf/j6HtaVg4YTuymlUgn5ejQhs60a9rATOL3my+PzevZ7WPvgUpbI14grJmbl0blaKoKGVLfScpok7Dv0v1G8EcQfhl4dhyb3aJsoaTHlGHFZSypyCEylljj5moVAYFZ1O8ta64/yyNwYbSwsWTujGoI6VTCta18lK5jO3dTheXIT91RyksET4T4YBs8FROeKKULBTvFxZ/qztofd06D4J9n+niSjG/KNtovTsB3e/CXf1Mq7BRqA8I46rQojhBSdCiAeBKtqKav6sPpNTdiVFlaPTSd5Yo3caVhZ8+1h35TQqQ6H83o2PfoW9yGFDfgCftP1Jn99bOY2KEHsjg91R17CxsuBB32blb2hTH/q+BC+EQ9CbYOcM5/+CH4bCzw9DbJjxjDYC5XEcTwOvCyFihBAxwGzgKeOaZT6sPZtbdiVFlaLTSeb8L5zg/THYWlmw+DF/gto3NrVZNYsS8nvHj/yd5/NfYuFxQVSCygBdUX4Li0VKGNq5SeUEDe2cYMAszYH0f1XbhX92Oyy+G5aPgUtHq95oI1AeraqzUspeaHpTnaWUfaSUUcY3TVEXyddJZv0WzoqDsdhZW/DDpB70b9fI1GbVHMrI793Muz9jerRAJ40kgFiL0emkYTXVmPJMU5WGvQvc/Qa8GK6NRKzrwelN2t/t1wlwJaLsPkxIefZx/FcI4SKlTJNSpur3VrxfHcaZmoSULFObUKfI10leWXmUVYdisbe2ZMmkAALb1h3huDumnPm9Xxjkhb21JVsirhB2ofzJiuo6e84mEpeUSXMXe/q0cauaTuu5wj3vaCOQ3jPAyg5O/g4L+2h7Qa6erpr7VDHlmaq6V0qZVHCiFyS8z3gmmQ8jvt4DwPjFe5m76RSbjl/mUnImZel7KSpOXr6Ol349wurDcdSzsWTpEz3oXVX/nLWdCub3buxkx+S+ngDM3XRKvZ/LSUFQfJS/R9ULGjo0gqH/0Va3BUwDS2tt9/nXPeF/T2l/YzOiPKuqLIUQtlLKbND2cQC2xjXLNHyml7Ioyt9RifwddTMPcT0bS/q0aYhvC2d8PFzo6uGCc72SBfYUpZObr+PFkCOsP3aJ+jaW/Dg5AH9PlSusTNKuwp9zIWwJ6PK0/N69ntamPspI1frUgDYs2xfD/nPXCY28SlAHFUMqjeSMXDaduIwQMNKYEjdOTeG+j7U87bs+gcM/Q3gIHFup7Q0Z8Cq4mF4BujyO4xdguxBiif78CeBH45lkOl4a3I6XBre7pcxzznq+m9ido7FJhMcmc/RiEilZeWw7eYVtJ6/crOdWj64tXPDxcMG3hTOdmzljZ20eQmfmTE6ejueDD7PpxGUcba1YOjmA7i3NMD+1OZGdBv8s0CcVSgNhoeX3Dnqt3KlaneysmRHUlvfXn2TuplMMaNeoTsmCV5S1R+PIydPRz6shHg3qGf+Gzh7wwHzo+yL8+TEcDdacyNEQ6DYR+r0CzqaT2ymPOu5HQohw4B40ddpNQEtjG2ZODOnchCGdtWWLUkrOJ2Zw9GISR2OTOHoxiRPxKZxPzOB8YgZrj2iai5YWgvbujnRt4UxXD82htHN3KJ86aR0hJ0/H9OWH2BpxBUc7K35+sie+LVxMbZb5kp8LYUu1UUb6Va2s3TAY9HalUrVO6NWSH3af49TlVNYejWOEnxKLLIlfDxRMU1Vzlr8Gnlru9n4ztb97+Ao4+AMcXqbtUO87Exyrf5l6ecXjL6PtHh8NnANWGc0iM0cIQauG9WnVsL5BYC83X0fk5VTDiORobBKnr6QScSmFiEspBO/X3nR21hZ4N3PWj0yc8W3hwl2u9SqfYKcGk52Xz7O/HGL7qQSc7a355cmedPEwTY4Hs0dKiFijSYRcj9bKPHrAPe/eUapWO2tLXhrcjlm/hfPJ5tPc16UptlZqlFyU43HJnIhPwdnemiGdTLSXyK0NPPwd9HsZQj/Q8r3v+0aTNQmYAoEvQv3qW0hSouMQQrRDU7QdByQCv6IlflLKZ0WwtrTAu7kz3s2debSnNv+YkZPHifgUvSPRHErM9QwOXrjBwUIrWVzqWevjJPqRSQtnGjvamepRqoWs3Hye+SWMnZFXcamnOY0SpRvqOud2wda3tVStoOX3HvQ2dHygSvSOHu7mwaK/ojl9JY1le2PqdK72klipD4o/5NvM9NPPjdrDqKXaVFXoB5pszJ4v4cAP0PMp6POctlJr92fQ9VGjmVHaiOMU8BfwQMG+DSHES0azxEx5sE3lgt71bKzo4elKj0JB3uvpOYQXipUcjU3iWloOu05fZdfpq4Z6zZztDPGSri2c6dLcGUe72hF8z8rNZ9rPYew6fZUG9axZNqUXnZo5mdos8+Pycdj2DkRt1c4d3LUkQX4TtRU3VYSlhWDW0A5M/ekgC3ZGMcrfo9a816qCrNx81uinn0f3qOZpqtJo4g1jl0H8Ydj5AZzZDLvnwYHFWkrf9KuavIndEKPcvjTH8QjaiGOnEGITEELxGfhqNSO8qk6Wy7W+DQPbN2agfhe0lJL45CzCLyZxJDaJ8IvJHItLJj45i/jky2w8fhnQpz1u5GCY3vLxcCFXV/OWUGbm5DP1p4PsjrqGW30blk3tSYcmddRplPSN0AT5ve/p2Bj/lg04eOEGi/46x8wiC0TqMlv0gobezZ3o3MwMR8XN/GD8Crh4QMuHHr0T/vwQbB0hPw+7bsYREi/RcUgpVwOrhRD1gYeAlwB3IcRCYLWUcotRLKpDCCFo7mJPcxd77tXnltDpJNHX0jhyMZlwffA94lIKUQlpRCWk8b9DmpyzpYDOEbv1gXfNobRu5IClma6MycjJ48mlB/knOpGGDrYET+2Jl7ujqc0yHWkJt34jzLiuSXDv/w7yc8DCGnpMgf6vGH3uWgjB7Hs7MOqbf1j8VzQTe9WptS+lsrIigoampEUPeGwNXNijffG4sBuAHgdfgOYW0HVsld6uPKuq0oFlwDIhhCswCpgDKMdhBCwsBG0bO9K2saNhvXh2Xj6nLqXqV3ElczQ2ibMJaYTHJhMem2xoW9/Gki76WElBAL65i73Jg+/p2Xk8sfQA+89dp7GjLcun9qJtYweT2mRyAl+Ar3th69MV/poHu+ffkt+boDfAtfriDT08XbmnY2O2nUzgyx1nuNsMv1xXN7cIGnatIZkmW/aBSX9osbGNr2J59ZSWznZ1IXnBAXO0pdt3QHlXVQGgz8L3rf5QVBO2VpZ0baE5A3prZRu37cS5VRdDvCQ8Npm4pEz2Rl9nb/R1Q9uGDjaGTYo++qXBrvWrTxU/LTuPJ5bs58D5G7g72RI8tRetG9VxpwHaSqlGHelx4HnQ6aVtWgfB4HdNlqp11tAObD+VwPJ9MXj3rd0LNMrDyoOaoOGwzk1q1gZfIcDDH6TkQouHafnE92BRtdsAKuQ4FOaDvZWgT5uG9Glzcxrjamq2YXrraGyyIfi+41QCO04lGOq1cLXXRiX6kYl3cyfq2VT9WyE1K5fHf9jPoZgkmjrbETy1F54NjTNPXyPIStZ0iMJX6BP5yFv/AaN3wrc7q+QbYWVo38SRh/08WHUolv+dyWF0tVtgPuh0kt/C9IKG5hQULy/rXwEPf865jKZlFTsNUI6jVtHI0ZZBHd0NeSuklFy8nqkPvGuruI7HpXDxeiYXr2fyR/glACwEtHN3xMdD22PS1cOF9k0csa7EZsXVZ3IYOBCSMzWnceRiEs1d7Ame2ou73Kphx625kZcNZ7bCsRUQuQnys7VyC2uwsiW20QA8nvylyr8RVpaXBnvx+9F49l7K53hccp1dJv332WvEJWXi0cCe3q1rmGba4WXa8u2pO2DPAaPcwqiOQwgxDPgcsAQWSyk/LHJ9EvAxEKcvWiClXCyECAI+K1S1AzBWSrlGCLEUGAAUTO5PklIeMd5T1FyEENzlVo+73OoxvKuWdCYvX8eZhDTCY5MMAfhTl1MNxwq9bLStlQWdmznpJVS0eImnW/0yZSnWns3lvYxcJv6wj/DYZDwaaE6jhWsdcho6HcTs0UYWEWu0kQYAQsv61mUUnPsTrOyIchmNh5k4DQCPBvWY2Lsl3+8+x0ebI/lpcoCpTTIJBf8Ho7q3qHlSLOlXtb0eRlqFB0Z0HEIIS+ArYDAQCxwQQqyTUhYVmv9VSjmjcIGUcifgq+/HFYji1mD8LCnlb8ayvTZjZWlBx6ZOdGzqxJgeWllWbr5hs2J4rDbNde5aOodikjgUYxBGxsnOyrC3pMChuDvdPhc+/vu9HI9L4S7Xeiyf2rN6tH3MgcvHtZHFsVWQEnuz3L0L+IwC70c0DaLDy+DKCaN+I7wTpge1Zfnec+w6fZU9UdfoU8ek7ZMycthcIGjoXwNlWPq+aPRbGHPEEQBESSmjAYQQIcCDQEUzlIwENkopM6rYPoUeO2tLurdscIu4YHJGLuFxWtD9yEUtbpKQms3uqGvsjrqZOdjdydYQK+nUVNuTcTwuhZZu9Qie2otmLvbV/jzVSlIMHPtNUy9NKPTWdr4LuowEn9HQuOOtbarhG+Gd4FrfhntbWfO/M7nM3XSKNdMDTb4yrzpZeyTeIGjYvLa/fyuJMJYWvxBiJDBMSjlFfz4R6Fl4dKGfqvoAuAqcBl6SUl4s0s8OYJ6U8g/9+VK0tUXZwHZgToHke5F204BpAO7u7t1DQkIq9RxpaWk4OJjfKiBT2HUjS0d0so5zyTrOJecTnawjM+/2eu71BHMC7GhgZz5TMFX5elnlptLo6t+4X/kTl+SbziLXypGExoEkNB5AsnMHTbW2Gu2qShKT03jvkAXJ2ZLpvrb0aGIe4dDqeL3e+juTmFQdz3S1pWfT8j23uf4d79SuoKCgMCmlf9FyYzqOUcDQIo4jQEr5XKE6bkCalDJbCPE0MFpKeXeh602BcKCZlDK3UNllwAb4DjgrpXyvNFv8/f3lwYMHK/UcoaGhDBw4sFJtjYk52DVvSyRf7Cg7i/ALg7xuk6uvbu749crNhMiN2sjizFbQ6XPRW9lB+/u0kUWbQWBVsaXO5vB3LI7Q0FAu2rXi/9Ycp3XD+mx5qb9ZKDsb+/U6HpfMv77cjUs9a/a9Pqjcoo/m/He8E7uEEMU6DmN+jYgFCq9j8wDiC1eQUiYWOl0EzC3Sx2i0Xeq5hdpc0v+arc8R8kqVWayoEDOHtGfmkPa3lHnOWc/5D+83kUVVjC5fC2KHr9SW0eakauXCAtrcDV1GQ8d/afIOtZCxPVrw/V/RRF9LZ8XBWIOAZ21mhUHQsLlSCi4FYzqOA4CXEKIV2qqpscAt4jxCiKaFHMFw4GSRPsYBrxXXRmiTrg8Bx41hvKKOIqUmHHdspZa6M+1msi6addNGFp0fNkkOhOrG2tKCV4a2Z8byw8zfdpoRfs2xt6m9H6ZZufmsOawt8DR7iRETYzTHIaXME0LMADajLcf9QUp5QgjxHnBQSrkOeF4IMRzIA64DkwraCyE80UYsfxbpepkQohGa4OIR4GljPYOiDnE9WhtZHFsBiYWm31xbayOLLqOgYVvT2Wci7vNuSpfm0RyLS2bJnnM8O7D2vgabT1wmJSuPLs2dlWJzGRg14iWl3ABsKFL2VqHfX6PIiKLQtfPAbQIxhWMgCsUdkXYVTvxP228RVygGVr+RtnS2y2ho3q1K8l7UVCwsBLOHdWDC9/tYGHqWRwPuwqVe9UnWVCcr9Xs3RtfEJbjVjHkslVDUGiqbv6TayE6DU+u1kcXZnSDztXLr+lpyJJ9R0GogWKp/jQL6ejWkb9uG7I66xtehZ3n9vo5lN6phXLyuCRraWlkw3LeGCBqaEPXfoahSqjJ/SZWRn4tr4kH47WeI3AC5+i1BFlbQdqgWt2h/r9nuqzAHZg/rwO4Fu1m65zyT+njWuv05K/W6VMO8m+Bsb+ZffswA5TgUtRMp4eJ+bWRxYjU+GYUW8LXopY0sOo2A+jVMh8hEdPFw5n6fpqwPv8T8baf5aKRpFHyNQb5O8pt+NdUYFRQvF8pxKGoXVyO1mMWxlZB0wVCcXq8F9XtN0nZzN/A0mXk1mVeGtGfz8cv8FhbLPxRAQAAAF+FJREFU1H6ta00irr+jrhGfnEULV3t61TRBQxOhHIei5pMSry2dDV8Bl8Nvljs2gy5akPvAqUQG9g8ynY21gFYN6zM2oAW/7I3ho82RLHrstn1hNZKCvRs1UtDQRCjHoaiZZCVDxDptKurcX4BeAcHWGToN1+IWLQPBQr/vIDLUVJbWKp4f5MWqsDi2Rlwh7MJ1urd0NbVJd8SN9By2nLiCEPBId7Waqrwox6GoOeRlw5kt2sji9OabuS0sbcBrCPiM0X5aq+x1xqKxox1P9m3Fgp1RzN0Yya9P9arRAohrj8SRk6+jf7tGStCwAijHoTBvdDq48Lc2sohYW3xui07Dwb5Bqd0oqo5pA1qzbN8F9p+/zs7IBO7uUDN30Usp+VXt3agUynEozA8p4cpxbWRxfBWkxN28ZshtMRKc1Xp7U+BkZ830oLa8v/4kH22KZEC7xljWwNjAifgUTl5KoUE9awZ3qpnOz1Qox6EwH5JitNVQ4SvhaiHZstJyWyhMwoReLVny93lOXU5l7ZE4Hu5W876x/3pAL2jopwQNK4pyHArTknEdTqzWHEbMPzfL7V2h8whtKqpFT7PJya3QsLO25KXB7Xhl5VE+3XKa+32a1qgP36zcfNYeUYKGlUU5DkX1k5MBpzdqI4uobYVyW9hDh/s0jag2d1c4t4Wiehnh15xFu6KJvJLKL3tjeLJvK1ObVG4KBA19PJzp2FQJGlYU5TgUVcPuz6DroyVfz8/TclscK8htkaaV15HcFrURSwvBrKHtmfLTQb7aGcVofw8c7WqGXIdh74YabVQK5TgUVUNaAvz9OdgNuVkmJcQf0kYWx1dBesLNa3Ust0VtZVDHxvi3bMDBCzdYtCv6tsRe5sjF6xn8HZWoCRp2bWZqc2okynEoqobAF+DrXtj49oD/b+/Ow6OqzwWOf99AwhoWCQRki8oiCggEIwqlLOIFSsGWvW5w5XKxWtC2CvT6aEvtbbG3RUF8qFAriBbECqWIIEuoiICyyb4JEdnDFgjEhCTv/eOcwBgmkAlz5oTk/TzPPJw553fmvPMbJu+c7f2d/MrZs9gyJ/jYFi0HQI3b/IvVhI2IMKbH7fSbspppn+7nkXsTqBlbzu+wrmqOu7fRwwoaFpklDhMeFeOgTisS1z0Nq89cnm9jW5R4bRNu4v5m8SzdcYxJy/cwrk9zv0MqUE6u8r5bCXfA3XaYqqjsUhVzfVRh50KY0h72JVPu4pnvLj+fCuWrQb1ESxol2HPdmxIl8O7aA6ScOO93OAX6NLCg4S1W0LCobI/DFN2BtbD0xcuX0UZFczi+EzcPnQExFf2NzURUk/hYftymHu+vP8ifluxm0uDWfocUVN5J8QFW0PC62B6HCV3qbpj1ELz5gJM0KtaAmxOheV92N33KkkYp9Uy3JsSUjeJfXx5m66G0a68QYafPZ7HEChqGhaeJQ0S6i8guEdkrImOCLB8iIqkissl9DAtYlhMwf37A/FtEZK2I7BGR2SJiF/tHytkjMH8kvH4P7FwA0RWh47PQ6Vdw8Tz0+rPfERof1a1WgUfbNQRg/KKdPkdzpXl5BQ0b1yxxIxhGmmeJQ0TKAJOBHsAdwGARuSNI09mq2sp9TAuYnxEwv3fA/PHABFVtDJwGHvfqPRjXt2mwbBxMbA0bpgMCiUNh5Ebo8rxzT0b/t2zoVcOTnRsRW64sK/ecYNXeE36Hc4mqXioxYneKXz8v9ziSgL2quk9Vs4BZQJ/reUFx6jd3Ad53Z00HHryuKE3BsjNh9evwaitY+SfIzoBmveHJtfDDVyC2ttOuw9NWQ8oAUL1SDCM6OZdaj1+0E1X1OSLH1kNn2Xn0HNUrRnP/HbX8DueG52XiqAt8E/D8oDsvv74isllE3heRwJ8C5UVknYisEZG85FADOKOq2dd4TXM9cnPhy9kwqS0sHgsZp6DBffD4Uhj4NsQ19jtCU4wNbe/cy7H5YBoLtxz1OxwAZq87AMCPWte7oWpqFVfi1S8CEekP/IeqDnOfPwIkqerPAtrUANJVNVNERgADVLWLu+xmVT0sIrcCy4GuwFlgtao2ctvUBxaqaosg2x8ODAeIj49PnDVrVpHeR3p6OpUrVy7Sul7yJC5Vqp/eyG1fzaDy+f0AnK/YgH23PsrJGm0LdTltqeqvMCipcS0/cJEZ27OIryj8rkMFyobpCqaixJWVo4xKvkBGNvy2fQXqx4b/93JJ/Rw7d+68XlWvHCNYVT15APcCiwOejwXGXqV9GSCtgGVvAf0AAU4AZYNto6BHYmKiFlVycnKR1/VS2OM6uF71rV6qL1ZxHn9qprphpmpOtr9xhYnFFZrrjSsrO0c7/TFZG45eoDPXpIQnKC1aXHM3HNSGoxdo70krwxZHfiX1cwTWaZC/qV4eqvoCaOxeBRUDDALmBzYQkToBT3sDO9z51UWknDsdB7QHtrtvJNlNIgCPAf/08D2UfKf2wZyhMLUz7P8EyleFbuPgZ+uh9UOXx+w2JgTRZaL4xQNNAHh16R4ysnJ8i8UKGoafZzcAqmq2iDwFLMbZm3hTVbeJyDicLDYfGCkivYFs4BQwxF29GfAXEcnFOQ/zB1Xd7i4bDcwSkZeAjcBfvXoPJVp6KnzyMqx7E3KzoUw5uOe/ocMzUPEmv6MzJUDP5nVoUXcfWw6l8eaq/TzZuVHEYzhw8gKffeUWNGxlBQ3DxdM7x1V1IbAw37wXAqbH4hzCyr/eZ8AV5y3cZftwrtgyRZGZDqsnw2cT3dLmAq0ehs5joardFGXCJyrKKYD40LS1TFnxFT9JakD1SpG97WrOemdvo2eLOlS5QUq+3wjszvHSIucifDHNuRdjxf86SaNJd3jiM3hwsiUN44n2jeL4XuM4zmVm8/qKvddeIYy+U9DQDlOFlSWOkk7VGZp1chJ8+AtnTIy6bWHIh/CT2RAf7J5MY8JndPfbAZi++msOncmI2HZX7knlSNq3NLipIvfcYodfw8kSR0m2fyVM6wpzhjgnwWs0ggEzYNhSSOjgd3SmlGhetyq9WtYhKzuXV5bsjth256zL29uoZwUNw8wSR0l0dCvM7AfTe8Gh9VA5HnpNgJ+ugTv6WHlzE3G/fKApZaOEf2w4yJ5j5zzf3qnzWXy8/ShRVtDQE5Y4SpIz38DcETClA+xdAjGx0Pl5p6ZU2/+EMnZy0PgjIa4Sg5Lqk6vw8uJdnm9v3sZDXMxROjapSZ2qVtAw3Gw8jpLgwimnltTnUyEnE6Ki4e7Hncq1leL8js4YAEZ2bcw/1h9iyfZjrEs5RdsEb847qOqlezcG2klxT9gex43sYgZ8OsEpQrj6NSdptOgPT30BPcZb0jDFSq3Y8gz73i2AtwUQtxxKY+fRc9xUKYauzeI92UZpZ4njRpSTTe0jS2FiG1j6a8hMg1s7wfB/Q99pcNMtPgdoTHDDO95K9YrRfJFymuU7j3uyjbzy6T9qXZeYsvYnzgvWqzeSgPG9b981Cc4dhtot4ZG58Og/4eZWfkdozFXFlo++dAf5y4t2kZMb3r2OjKwc5m86DNi9G16yxHGjOLAW/tYDZg2G1J1klK8FP57m7GXc1sXv6IwptIfbNaRutQrsOnaOeRsPhfW1F207wrnMbO6qX42mtWPD+trmMkscxV2w8b27j+fzpNehZX+Iso/Q3FjKR5fh592cAoh/XrKbby+GrwDi5VH+7BJcL9lfneKqoPG9R26CdiPQKLu01ty4Hmxdl6bxsRw6k8HMNV+H5TW/PnmeNftOUT46ih/eZQUNvWSJo7i51vje5av4HaEx161MlPBc96YATE7ey9lvL173a+bdKd6zuRU09JoljuKisON7G1NCdLm9FncnVOf0hYtM/WTfdb3Wdwoa3m0nxb1micNvubmw+T14zcb3NqWLiFN2HWDayv0cP/dtkV/rkz2pHD37LQ1rWEHDSLDE4RdV2LsU/tIRPvgvOHMAajaDwbNh6EKof7ffERrjucSGN3F/s3gyLuYwaVnRy67PWZd3Urw+YrXYPGeJww+HN8KMPjCzLxzbAlXqQp/J8MQqaNrdihCaUuW57k2JEvj75wdIOXE+5PVPpmeyZPsxp6BhG7uaKhIscURS3vjeb3SC/f/ON773wza+tymVmsTH0rdNPbJzlf/7OPQCiPM2HeZijvL9JjWpXbW8BxGa/CxxREJ6Kix8Fl67G7Z94Izvfd9I59La9qMg2qp3mtLtmW5NiCkbxYLNR9hyMK3Q66kq77n3bgy0k+IRY4nDS5npsGI8TGwFn78BuTnQ6iEYuQEe+C1UtJN4xgDcXK0Cj93bEICXF+8s9HqbD6ax69g5alSKocvtVtAwUjxNHCLSXUR2icheERkTZPkQEUkVkU3uY5g7v5WIrBaRbSKyWUQGBqzzlojsD1in+BVoKnB871Xw4Os2vrcxQfy0UyNiy5Vl5Z4TrNp7olDrzF5nBQ394FlPi0gZYDLQA7gDGCwiwQa4nq2qrdzHNHfeBeBRVb0T6A68IiLVAtZ5NmCdTV69h5CpwrZ5MPmeAsb3vtPvCI0ptqpXimFEp9sA+MNHO8m9RgHEjKwc/pVX0NAOU0WUlyk6CdirqvtUNQuYBfQpzIqqultV97jTh4HjQE3PIg2HS+N7PwanvrLxvY0pgqHtE6gVW44th9JYuPXIVdt+tNUpaNiqfjWaxFtBw0gSrwZTEZF+QHdVzTv89Ahwj6o+FdBmCPB7IBXYDTyjqt/ke50kYDpwp6rmishbwL1AJrAMGKOqmUG2PxwYDhAfH584a9asIr2P9PR0KleuXODySukp3LpvBjVOrQcgM6Y6KQmDOFr7fjTKuwEWrxWXXyyu0FhcV0o+cJHp27OIryj8rkMFykZdvjw9MK7fr81g1+lchtwZQ6f6/pYYKamfY+fOnderatsrFqiqJw+gPzAt4PkjwKR8bWoA5dzpEcDyfMvrALuAdvnmCVAOJ6G8cK1YEhMTtaiSk5ODLzh9QPWDEaovVlV9sYrq7+qqrnhZNTO9yNsKS1w+s7hCY3FdKSs7Rzv9MVkbjl6gb69O+c6yvLj2p6Zrw9ELtOnzC/VsRpYPUX5XSf0cgXUa5G+ql4eqDgKBBx7rAYcDG6jqSb28tzAVSMxbJiJVgA+B51V1TcA6R9z3lAn8DeeQWORcOAWL/wcmJcKX70JUWbhnBIzaBN9/FmIqRTQcY0qa6DJR/PIBpwDiq8v2cCEr+4o2c9Y7ByZ6tqhDrBU0jDgvE8cXQGMRuUVEYoBBwPzABiJSJ+Bpb2CHOz8GmAvMUNU5wdYRp67Ag8BWT6L/dAKcO3b5ebDxvZv3s/G9jfFAzxa1aVmvKqnnMvnbqpTvLAssaDjQRvnzhWeJQ1WzgaeAxTgJ4T1V3SYi40Skt9tspHvJ7ZfASGCIO38A0BEYEuSy23dEZAuwBYgDXvLkDaQfh1WvgubAhreDj+/d7682vrcxHhARRnd3CiBOWfEVp89nXVr2ye5Ujp3NJKFGRZKsoKEvvDt7C6jqQmBhvnkvBEyPBcYGWW8mMLOA14zMOKn3jYTX2pJUZi5kuEfYareEbr+xoVqNiYD2jeL4XuM4Vu45weTkvTzfy7ma/z333o3+VtDQN3bHTEFysyHrPBUzDkO1Bja+tzE+yNvrmLH6aw6dyWDWzkyW7rCChn6zxFGQavWhxQBn+swB+GAYjKsOv64Kyb/3NzZjSonmdavyw7tuJisnlwlLdrMoJZuLOUqnprWsoKGPPD1UdUPLOg9HNrHj9lE0GzTO72iMKbV+0a0JH205wgcbDl6aN8BOivvK9jgK8uEvoV5bjtW2Q1PG+CkhrhKDkxqQV4HEKWhYy9+gSjlLHMFsfAcOb4Cef/Q7EmMM8LOujagQ7YxX8+M2VtDQb3aoKpjzqdD/LbuZzxgfTViym1eX7bli/tSV+5m6cv+l56O6NuaZbk0iGVqpZ4kjmA5P+x2BMaXeM92aXJEQEsZ8SMoffuBTRCaP7e8ZY4wJiSUOY4wxIbHEYYwxJiSWOIwxN4w+t1kl3OLAEocx5obxo8YxfodgsMRhjDEmRJY4jDHGhMQShzHGmJCIM6xsySYiqcDXRVw9DjgRxnDCxeIKjcUVGosrNCU1roaqWjP/zFKROK6HiKxT1bZ+x5GfxRUaiys0FldoSltcdqjKGGNMSCxxGGOMCYkljmt7w+8ACmBxhcbiCo3FFZpSFZed4zDGGBMS2+MwxhgTEkscxhhjQmKJAxCRN0XkuIhsLWC5iMhEEdkrIptFpE0xiauTiKSJyCb38UKE4qovIskiskNEtonIqCBtIt5nhYwr4n0mIuVF5HMR+dKN6zdB2pQTkdluf60VkYRiEtcQEUkN6K9hXscVsO0yIrJRRBYEWRbx/ipkXL70l4ikiMgWd5vrgiwP7/dRVUv9A+gItAG2FrC8J/ARIEA7YG0xiasTsMCH/qoDtHGnY4HdwB1+91kh44p4n7l9UNmdjgbWAu3ytfkpMMWdHgTMLiZxDQFei/T/MXfbPwfeDfZ5+dFfhYzLl/4CUoC4qywP6/fR9jgAVf0EOHWVJn2AGepYA1QTkTrFIC5fqOoRVd3gTp8DdgB18zWLeJ8VMq6Ic/sg3X0a7T7yX5XSB5juTr8PdBURKQZx+UJE6gE/AKYV0CTi/VXIuIqrsH4fLXEUTl3gm4DnBykGf5Bc97qHGj4SkTsjvXH3EEFrnF+rgXzts6vEBT70mXt4YxNwHFiiqgX2l6pmA2lAjWIQF0Bf9/DG+yJS3+uYXK8AzwG5BSz3pb8KERf4018KfCwi60VkeJDlYf0+WuIonGC/ZIrDL7MNOLVk7gImAfMiuXERqQz8A3haVc/mXxxklYj02TXi8qXPVDVHVVsB9YAkEWmer4kv/VWIuP4FJKhqS2Apl3/le0ZEegHHVXX91ZoFmedpfxUyroj3l6u9qrYBegBPikjHfMvD2l+WOArnIBD4y6EecNinWC5R1bN5hxpUdSEQLSJxkdi2iETj/HF+R1U/CNLElz67Vlx+9pm7zTPACqB7vkWX+ktEygJVieBhyoLiUtWTqprpPp0KJEYgnPZAbxFJAWYBXURkZr42fvTXNePyqb9Q1cPuv8eBuUBSviZh/T5a4iic+cCj7pUJ7YA0VT3id1AiUjvvuK6IJOF8nicjsF0B/grsUNU/F9As4n1WmLj86DMRqSki1dzpCsD9wM58zeYDj7nT/YDl6p7V9DOufMfBe+OcN/KUqo5V1XqqmoBz4nu5qj6cr1nE+6swcfnRXyJSSURi86aBB4D8V2KG9ftYtsjRliAi8necq23iROQg8CLOiUJUdQqwEOeqhL3ABWBoMYmrH/CEiGQDGcAgr788rvbAI8AW9/g4wK+ABgGx+dFnhYnLjz6rA0wXkTI4ieo9VV0gIuOAdao6HyfhvS0ie3F+OQ/yOKbCxjVSRHoD2W5cQyIQV1DFoL8KE5cf/RUPzHV/D5UF3lXVRSIyArz5PlrJEWOMMSGxQ1XGGGNCYonDGGNMSCxxGGOMCYklDmOMMSGxxGGMMSYkljiMKSIRyQmogrpJRMaE8bUTpICqyMb4ze7jMKboMtxyHcaUKrbHYUyYuWMjjBdnrIvPRaSRO7+hiCxzC+AtE5EG7vx4EZnrFl78UkTuc1+qjIhMFWesjI/du7sRkZEist19nVk+vU1TilniMKboKuQ7VDUwYNlZVU0CXsOpqIo7PcMtgPcOMNGdPxH4t1t4sQ2wzZ3fGJisqncCZ4C+7vwxQGv3dUZ49eaMKYjdOW5MEYlIuqpWDjI/BeiiqvvcootHVbWGiJwA6qjqRXf+EVWNE5FUoF5Acby8svBLVLWx+3w0EK2qL4nIIiAdp7LvvIAxNYyJCNvjMMYbWsB0QW2CyQyYzuHyOckfAJNxKq+ud6vDGhMxljiM8cbAgH9Xu9OfcbkY30PAp+70MuAJuDSwUpWCXlREooD6qpqMM6BQNeCKvR5jvGS/VIwpugoBVXgBFqlq3iW55URkLc6Ps8HuvJHAmyLyLJDK5Qqlo4A3RORxnD2LJ4CCSl6XAWaKSFWcwXkmuGNpGBMxdo7DmDBzz3G0VdUTfsdijBfsUJUxxpiQ2B6HMcaYkNgehzHGmJBY4jDGGBMSSxzGGGNCYonDGGNMSCxxGGOMCcn/Awoi08YnlQkrAAAAAElFTkSuQmCC\n",
|
| 742 |
+
"text/plain": [
|
| 743 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 744 |
+
]
|
| 745 |
+
},
|
| 746 |
+
"metadata": {
|
| 747 |
+
"needs_background": "light"
|
| 748 |
+
},
|
| 749 |
+
"output_type": "display_data"
|
| 750 |
+
}
|
| 751 |
+
],
|
| 752 |
+
"source": [
|
| 753 |
+
"# Plotting our accuracy charts\n",
|
| 754 |
+
"import matplotlib.pyplot as plt\n",
|
| 755 |
+
"\n",
|
| 756 |
+
"history_dict = history.history\n",
|
| 757 |
+
"\n",
|
| 758 |
+
"acc_values = history_dict['accuracy']\n",
|
| 759 |
+
"val_acc_values = history_dict['val_accuracy']\n",
|
| 760 |
+
"epochs = range(1, len(loss_values) + 1)\n",
|
| 761 |
+
"\n",
|
| 762 |
+
"line1 = plt.plot(epochs, val_acc_values, label='Validation/Test Accuracy')\n",
|
| 763 |
+
"line2 = plt.plot(epochs, acc_values, label='Training Accuracy')\n",
|
| 764 |
+
"plt.setp(line1, linewidth=2.0, marker = '+', markersize=10.0)\n",
|
| 765 |
+
"plt.setp(line2, linewidth=2.0, marker = '4', markersize=10.0)\n",
|
| 766 |
+
"plt.xlabel('Epochs') \n",
|
| 767 |
+
"plt.ylabel('Accuracy')\n",
|
| 768 |
+
"plt.grid(True)\n",
|
| 769 |
+
"plt.legend()\n",
|
| 770 |
+
"plt.show()"
|
| 771 |
+
]
|
| 772 |
+
},
|
| 773 |
+
{
|
| 774 |
+
"cell_type": "code",
|
| 775 |
+
"execution_count": null,
|
| 776 |
+
"metadata": {},
|
| 777 |
+
"outputs": [],
|
| 778 |
+
"source": []
|
| 779 |
+
}
|
| 780 |
+
],
|
| 781 |
+
"metadata": {
|
| 782 |
+
"kernelspec": {
|
| 783 |
+
"display_name": "Python 3",
|
| 784 |
+
"language": "python",
|
| 785 |
+
"name": "python3"
|
| 786 |
+
},
|
| 787 |
+
"language_info": {
|
| 788 |
+
"codemirror_mode": {
|
| 789 |
+
"name": "ipython",
|
| 790 |
+
"version": 3
|
| 791 |
+
},
|
| 792 |
+
"file_extension": ".py",
|
| 793 |
+
"mimetype": "text/x-python",
|
| 794 |
+
"name": "python",
|
| 795 |
+
"nbconvert_exporter": "python",
|
| 796 |
+
"pygments_lexer": "ipython3",
|
| 797 |
+
"version": "3.7.4"
|
| 798 |
+
}
|
| 799 |
+
},
|
| 800 |
+
"nbformat": 4,
|
| 801 |
+
"nbformat_minor": 2
|
| 802 |
+
}
|
10. Data Augmentation/10.4 - Data Augmentation Demos.ipynb
ADDED
|
@@ -0,0 +1,420 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"# Data Augmentation\n",
|
| 8 |
+
"\n",
|
| 9 |
+
"### Let's look at our untouched dataset"
|
| 10 |
+
]
|
| 11 |
+
},
|
| 12 |
+
{
|
| 13 |
+
"cell_type": "code",
|
| 14 |
+
"execution_count": 1,
|
| 15 |
+
"metadata": {},
|
| 16 |
+
"outputs": [
|
| 17 |
+
{
|
| 18 |
+
"data": {
|
| 19 |
+
"text/plain": [
|
| 20 |
+
"<Figure size 640x480 with 9 Axes>"
|
| 21 |
+
]
|
| 22 |
+
},
|
| 23 |
+
"metadata": {},
|
| 24 |
+
"output_type": "display_data"
|
| 25 |
+
}
|
| 26 |
+
],
|
| 27 |
+
"source": [
|
| 28 |
+
"# Plot images\n",
|
| 29 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 30 |
+
"from matplotlib import pyplot\n",
|
| 31 |
+
"\n",
|
| 32 |
+
"# load data\n",
|
| 33 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 34 |
+
"\n",
|
| 35 |
+
"# create a grid of 3x3 images\n",
|
| 36 |
+
"for i in range(0, 9):\n",
|
| 37 |
+
" pyplot.subplot(330 + 1 + i)\n",
|
| 38 |
+
" pyplot.imshow(x_train[i], cmap=pyplot.get_cmap('gray'))\n",
|
| 39 |
+
" \n",
|
| 40 |
+
"# show the plot\n",
|
| 41 |
+
"pyplot.show()"
|
| 42 |
+
]
|
| 43 |
+
},
|
| 44 |
+
{
|
| 45 |
+
"cell_type": "markdown",
|
| 46 |
+
"metadata": {},
|
| 47 |
+
"source": [
|
| 48 |
+
"### Random Rotations\n",
|
| 49 |
+
"- As per Keras documentation random is 50%"
|
| 50 |
+
]
|
| 51 |
+
},
|
| 52 |
+
{
|
| 53 |
+
"cell_type": "code",
|
| 54 |
+
"execution_count": 2,
|
| 55 |
+
"metadata": {},
|
| 56 |
+
"outputs": [
|
| 57 |
+
{
|
| 58 |
+
"data": {
|
| 59 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAU4AAAD7CAYAAAAFI30bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO2dd9QURdaHnyuCCQOIKAKKuogiZkXM6Iphdc2ImLOurovhrMqKn7JrwLCs4egqKoK7ZjGHxYRgRBQjIkFMBEFEBbNofX/M3K6alzdMT+iZnvc+53Cmp6vf6WJ+0923qm4Q5xyGYRhG/ixV6Q4YhmGkDbtxGoZhxMRunIZhGDGxG6dhGEZM7MZpGIYRE7txGoZhxKSoG6eI7CkiU0RkuoicV6pOGZXFdK1dTNvSIIX6cYpIC2Aq0AeYCUwA+jvn3i9d94ykMV1rF9O2dCxdxN/2BKY752YAiMjdwH5AgyKISHP3tp/vnFut0p1oAtM1PmnQFWJqa7o2rGsxQ/WOwGfB+5nZfUbDfFLpDuSB6RqfNOgKpm1cGtS1GItT6tm3xBNKRE4CTiriPEaymK61S5Pamq75UcyNcybQOXjfCZhd9yDn3DBgGJjpnxJM19qlSW1N1/woZqg+AegqIuuISCvgUOCR0nTLqCCma+1i2paIgi1O59xiEfkzMBpoAQx3zk0qWc+MimC61i6mbeko2B2poJOZ6f+Gc26rSnei1JiupmuN0qCuFjlk1DTLLLMMyyyzTKW7YdQYduM0DMOISTGr6oZRVXTq1CnaXnfddQHo2DHjprjhhhsC8Pjjj0fHjB8/PsHeNR9EMl5PLVq0AODXX3+N2lq2bAnAzz//nHzHSohZnIZhGDGxxaFksUWEIll6aT9IateuHQC9e/cG4KCDDoraunXrBngrVP9OrSCAU089FYCRI0cW2y3TNWCTTTYBoHv37gCsssoqUduCBQsAaNOmDQCzZ2fcSN99993oGLVQ58+fD8APP/xQSDdKgS0OGYZhlAq7cRqGYcSk5haHll12WQB+/PHHaJ9OVlsp5PSz4oorRtvXX389AD179gRgzTXXjNoWL14MQKtWrXL+XvcDbLHFFgA88kgmeOarr74qQ4+bB+3bt4+2DzvsMABOP/10AL7++uuobY011gDg888/z/n7b775Jtp++eWXAZgzZw6QO4wfO3YsAHPnzi1Z3wvBLE7DMIyYpMLiVBeGAw88EICPP/44alt//fUBWH311QFYuHAhAMstt1x0zNtvvw14a+PFF18sb4eNkqN67r///tG+XXbZBYBZs2YBue5ITz31FOAt1M022wyA1q1bR8fovtAKNQpjpZVWirY33XRTwGsWWpx67b355psAdO3aFci9pjt3zuQh6devHwDvv+/The65554AvPDCCwCMGTNmib9PArM4DcMwYlK1FucKK6wQbd99992Ad2/QeUzwbibqULvyyisD3koFopC7SZMy+QwOP/zwqO2dd94ped+N0vPLL78A3rEdYMCAAQB89NFHQK7lqL8HdXvR31DojqTzn2uvvTYA7733Xln6XsvodbrRRhtF+3bffXfAz1FedNFFUZtaiDqnqSPE0GVJLdTNN98cgFNOOSVqO+qoowDYcsstAdhtt90AeOWVV6Jjnn/+eaC8eprFaRiGERO7cRqGYcSkyaG6iAwH9gHmOed6ZPe1Be4BugAfA4c458rmy3HDDTcAfjEgjDHWyf+ffvoJgN9++w3IHdLpIsAhhxwCwFlnnRW1HXPMMWXqdXVTDbrGQaNJBg8eHO1T9zJtU7ezsG3nnXcG/HB+1VVXjY7RoXrSCwvlJklt9fo74IADon061NaIrJtvvrnJz6nPvei1114D/EISwJAhQwA4+eSTATj++OMBv6AEsMMOOwDePe2WW26J2m6//fYm+5IP+VicI4A96+w7D3jWOdcVeDb73kgXIzBda5URmLZlpUmL0zk3TkS61Nm9H9A7uz0SeB44t4T94rvvvou2n3vuOQCefPLJJv9OJ//DxSWdXN53332BXAujPof55kCldC0UtSAbcx0KAxxUf10c2nrrrZc4Xi0ZtUK//fbb0nS2wiSprY70dIENYOrUqZmTFJkDQEePeo7ws889N9P1Sy+9FPCLROADIlZbLVPZ95///GfUduSRRwIwaNAgoPAMWYWuqq/unJsD4JybIyLtGzrQqualCtO1dslLW9M1P8rujlSKqnnhE6cpdL4rdMjVOQ+1NJ955pmorblZmqWi2qshqmWqQRP6PsyupC406jj/2We+5LhaO82NuLqqla8WIHg3pPD7LBWqi7qbaQalu+66KzpGtzVrllqXABtssAHg576nTZsG+KxN+VLoqvpcEekAkH2dV+DnGNWF6Vq7mLYlpFCL8xHgaGBI9vXhkvWoROhKOsBaa60FwGOPPQZYyGUjVL2ujRGOMvr06QP4lfbQ0lR0LkwdtsNkEuqYXUOURVtdQdcVcMj1big39SXuWW+99QCfkzVc79DfgQbFFJrYpUmLU0TuAl4BuonITBE5nsyX30dEpgF9su+NFGG61i6mbfnJZ1W9fwNNvy9xX4wEMV1rF9O2/FRtrHqhqGvJGWecEe3ThQGNVzZqg7Zt2wI+Q9Y666wTtV111VU5x9SH5inQRcMaHJ5XhErkvQ1zEJx0UsYp4OCDD16i7dprrwXgwQcfBArvq4VcGoZhxKRmLE6dkN5pp50AXzAKYNSoUQDMmDEDyA27+/LLLwGfTUndmZqrO0q1ouGR6tQMsP322wMwcOBAwC8CwpKWprqdLVq0KNp35plnArn5Ho10oQtBf/jDH6J9moFew0E1dyd4Z/hiyxObxWkYhhGTmrE41WLs1asXkJvpW10mzjnnHCB3XkMzx6uzruby01ejtISZ+XVOUl1CQncirUmjzul77bUXAFtt5au1ar5GneMMddV5bf2cZ599FvAuaQCvv/46UDuhls2BuvXD1PF+7733jo7RUeO9994L5DrHF2tpKmZxGoZhxMRunIZhGDGpmaG6DuX698+4sIVDwuOOOw7wCwRaXgN8yv0jjjgC8NlTNM4VyhNz29zQIfehhx4a7dOInY4dOwJ+8S5Ey8nqsD5c2KvLzJkzo23NAanRQKpzpcvKGsWh0UBa/kav9zAC6D//+Q8Al1xyCVCefBRmcRqGYcSkZixOReNStTwswOzZswEfT3vbbbdFberC8o9//APw2XTUiRbgggsuKGOPaxudzNfFulNPPTVqCx3WwRfjAz+Jr25ISmhZaG5GtVQ1byv4XJBW+je9LLVUxq4LF3o187uWDtasRjfeeGN0jC4Kff/99+XrW9k+2TAMo0apGYvz4YczyV6uvvpqILdcqFoi9Tm1q9VyxRVXAHDQQQcBPocn+NKl6tZk5I+6jSy//PIAzJvns5nVtThD6lqaalWqNQHeytAghjBvq5YTNtKLuqeF5YHV4pw1axbgaxDpvCaUzuWoMcziNAzDiEnNWJyKzlXGRTNBjx49GshN+KBzLUbh1F3dBm8hqpfDpptuusTfqTOzroaHeR81+3dYn8pIP3q9qadMaHFqHk1N0jF27FggGSszJJ98nJ1FZIyITBaRSSIyILu/rYg8LSLTsq9tyt9do1SYrrWJ6ZoM+ZhSi4GznXMbAr2A00SkO1ZuNO2YrrWJ6ZoA+SQyngNodbxFIjIZ6EgVl5ItBF280CG7DhOgNvM0Jq2rLtZoRiLwzswdOnQAfAEtgNNPPx3wDu/bbrst4IMZADbeeGPAx59PnDix2G6mnlq4Xvv27Qv434CW8AZ46KGHALjpppuA+oMmkiDWHGe2VvPmwHis3GjNYLrWJqZr+cj7xikirYFRwBnOuYX5FmSq9jKyimbRad8+83v69NNPozYNF9RM4bVEJXWdMmVKzuv06dOjtnHjxgFw4oknAt7i1Hyr4AvyaUjdJ598ErWpo3xzzaualutV+9WmjZ9yPe+8zCzChhtuCMBll10Wtd13330AfPjhh+XuWqPktVwsIi3JiHCHc+6B7G4rN5pyTNfaxHQtP01anJJ5JNwKTHbODQ2aUl1Kti49evQAoF27dkBuOVm1RmuJatQ1TNKhFuObb74J+Nybao2AL++rLmhhaN7gwYPL29kqpRp1bQzN4H7UUUdF+zbaaCPArzfofCZUz7WYz1B9e+BI4F0ReSu7729kBLg3W3r0U6BvebpolAnTtTYxXRMgn1X1F4GGJkis3GhKMV1rE9M1GWoucigfwkggdYXR1Pvq4qKRLuBdYjQ+1ig/daOBtJTJ5MmTo32aaUlzMoa5PnXYfs011wC5C0dG9aDDci2wBr4MjuaY0NIo4EvdfPDBB0BufoIksVhCwzCMmDRLizN0UVFXFrVCL7roIiB3QtqoPJpl6Ysvvoj2qZuKln+97rrroratt94a8NaLUV2su+66gM9CpotE4HOoamWAW2+9NWo74YQTAB+zbhanYRhGSmiWFmeIur2ETrZG9aLZkgB++OEHACZNmgTkZurXEUToVG9UDxrirOW8Q8tR57NffvllAAYNGhS16Vx1pTNimcVpGIYRE9G5o0ROVsUhlwnxhnNuq0p3otRUi6467wU+P2NCv2/TNSZaVeHss88GcqsraDUHrS4b5tpM8n5FI7qaxWkYhhETu3EahmHExIbqyWJDutrEdC2SsDif5m5NeFheHzZUNwzDKBVJuyPNB77LvqaNdhTf77VL0ZEqxHStTRLTtQzF1sqqa6JDdQAReT2Nw5q09jsp0vr9pLXfSZHW76fc/bahumEYRkzsxmkYhhGTStw4h1XgnKUgrf1OirR+P2ntd1Kk9fspa78Tn+M0DMNIOzZUNwzDiIndOA3DMGKS2I1TRPYUkSkiMl1Ezmv6LyqDiHQWkTEiMllEJonIgOz+tiLytIhMy762aeqzmgtp0NZ0jY/p2sh5k5jjFJEWwFSgDzATmAD0d869X/aTxyRbc7qDc26iiKwIvAHsDxwDLHDODcn+iNo4586tYFergrRoa7rGw3RtnKQszp7AdOfcDOfcz8DdwH4JnTsWzrk5zrmJ2e1FwGSgI5n+jsweNpKMOEZKtDVdY2O6NkJRN84YpnxH4LPg/czsvqpGRLoAmwPjgdWdc3MgIxbQvnI9Ky8xh2ip07a56gq1fc0mqWvBN86sKX89sBfQHegvIt0bOryefVXtByUirYFRwBnOuYWV7k9SxNQVUqZtc9UVavuaTVxX51xB/4BtgdHB+4HAwMaOJfPFN+d/XxT6fSf1L46uwfGV/l4r/a/qdS3wmq3091rpfw3qWkx2pPpM+W3qHiQiJwEnARsXca5a4ZNKdyAP4upqpENXyENb0zWHBnUtZo4zL1PeOTfMZbKUHFDEuYzkiKWrS2HmnGZMk9qarvlRzI1zJtA5eN8JmN3Qwc65J4o4l5EcsXQ1UoVpWyKKuXFOALqKyDoi0go4FHikNN0yKojpWruYtiWi4DlO59xiEfkzmUWfFsBw59ykkvXMqAima+1i2pYOK9aWLFbUqzYxXWsTK9ZmGIZRKuzGaRiGEZOkq1xWLSIZT42llso8S3799ddKdscwmj0rrLACAB06dIj2TZ8+HYDlllsOgB9++CH5jmEWp2EYRmyavcW5zDLLANClSxcAdt55ZwCGDUtrqZX0ssoqq0Tb7dq1A+CzzzKBLj/99FNF+mRUjvXWWw+AE088Mdqn1udDDz0EwFtvvQX43wnAN998U/a+mcVpGIYRE7txGoZhxKRZDtVXWmmlaHuttdYCYODAgQAcdthhAGy55ZbRMSeffHKCvWt+LL/88gD06dMn2nfAAZnUBkOHDgXg9ddfT75jRkXRBaB999032qdTOPvssw8AY8eOBWDWrFnRMQ888AAAjz32WNn6ZhanYRhGTJqVxdmyZUsA1l133Wjf1VdfDfhFIaVXr15LbL/66qvl7mKzRKPX2rTx9bTU+nz66aeB/CxOdSkDr/XPP/9csn4aybJo0SIADj/88GifjgR1pLjDDjsAudfrtttuC/hRyznnnBO1ffnllyXpm1mchmEYMWkWFqdaMmuvvTYAgwYNitr06VSXuXPnRtszZ84EvHP8b7/9VpZ+NldatGgB+PllgFVXXRWADz/8EPDWZGO5FdZYY41oe8cddwTg/fczRRk/+OADABYvXlyqbhtlRrWfP39+tO/FF18EoHPnTHa84cOHA15vgNNPPx2AI488com2vn37AvD2228X1TezOA3DMGJiN07DMIyYNDlUF5HhwD7APOdcj+y+tsA9QBfgY+AQ59xX5etm/uhwWiePwU8Sn3DCCQCsttpqUVurVq1y/n7hwkyBvPfeey/a99133wG1NUSvJl2XXXZZwEdvgR9S//jjj0DjOQRUw7POOivad8QRRwBw4YUXAvDJJ5nyMbrgUMtUk7YNkc+0l0aLffHFF0u0hZFC4GPYwUcTXX755QBssMEGUZsuIk2bNg2A77//PnbfIT+LcwSwZ5195wHPOue6As9m3xvpYgSma60yAtO2rDRpcTrnxmULvYfsB/TObo8EngfOLWG/YtOxY0cA9twz83sJF32OP/74vD/n66+/BuDxxx+P9n377bel6GJVUU26brbZZkDu018X5DRDTmOLQnvssUfOK8BHH30EeMtCLdfmQDVpW5cePXoA/nrV/ATvvvtudIy6kOko4Zdffmnyc8NcBvo7evbZZ4Hce8H+++8PwL333ptzbFwKXVVf3Tk3B8A5N0dE2jd0oJUbTRWma+2Sl7ama36U3R3JOTcMGAaFp+JXZ2ad9+rUqVPUps7sRx99NOAdYrt27brE53z++eeAd38BPz+28sorA36OM3RHyueJ19woha7KdtttB+Tqqo7vOr/V2FzYpptuCsDqq68e7bvzzjsBGD9+PGAa5kspdVXCEOe9994bgAEDBgB+zjrMjDVq1CgAnnzySQA+/vjjqE3dytQ9TUMw9foF7zB/7LHHAv7+AfC///0P8CPLQil0VX2uiHQAyL7OK6oXRrVgutYupm0JKdTifAQ4GhiSfX24ZD3K0rp162i7W7duABxzzDEAtG3bNmpTC1MTAqy44opLfNYrr7wCwJgxYwB44glf4l1XXXfbbTfAz6Vpns5mRtl1DdFVdB01hJqrhZnP/LLOhYXzoDo3qq+q50YbbRQdo6ut6jVx1113xf9PpIdEtQ0Js7TPm5d7v9a5yVD7fv36AdC7d28gN7/mO++8A3ineNUwXDnXz9LfzjPPPBO1XX/99UDjc+b50KTFKSJ3Aa8A3URkpogcT+bL7yMi04A+2fdGijBdaxfTtvzks6rev4Gm35e4L0aCmK61i2lbfqouVl2doXUSGeC6664D/HC8viGdup9oDKrGsIKPeX3ttdcAP6EMSy4aaF6/MItKPnHSRnxUO12QC9HFvV122QXwbiPhsaqHupuEixA69bL00pmfuC4shlmw1Bn+xhtvLPa/YjRCeI3dfffdgF+022abbQCfmwD8ta+LfZtssknUtvHGGwPeZUndzcJ4dl3Y1cWlW265JWorVRFGC7k0DMOISdVZnJpv7+KLL472aYikPrnCp8YLL7wAwIQJEwD417/+BeROKNd1cl1zzTWjbX3S6WeqhRK6N5ilWR7UetQFg1DXDTfcEIDzzz8fqH+xSEcemilHRyvgLRl1uFaLZMqUKdExDz74IOAtE6P86EKRZq3S11A7LZSoow39DYC/P+jocfDgwUBuKK0GT+jvKXQtLBVmcRqGYcSkaixOdRfR8p/hfJXOY6hV+fLLL0dtGqo1bty4vM/Vvr0PmlhnnXUA7xSvyUGaU4hepVAnZLUwdtppp6hNNVKrQS3GcPSwxRZbAN6KCa0WnZcePXo0AC+99BKQ+9uZOnUqYDk646DhsfqdhclwiiG83nRbrcpwlKAW52233Qb4EWfSmf7N4jQMw4iJ3TgNwzBiUjVDdR1arbfeeoAvGRu23XTTTQBMnDgxaouTuUgXfsLYVc0LqNEjWlJU49qN8qPx6Lvvvnu0T8ud6NBQI0zCobou2ulw/vbbb4/aNt98cwAeffRRAO6//37A62zkj1434KfUtMyJuv3pdBoU7/Kj17suGmomJfB6qttgpYrxmcVpGIYRk6qxOPUppU+X8CmnCzfqRqQFmwpFi7aBd4ZXdxddhDAXpOSo77v+6qtMcnLNL9AYOmrQOGbwjtLqjqSfYxZnfMLFMx0JHHXUUYD/7q+99troGA0eKfS7VvdDzXIUWpxz5swBfK7NSmEWp2EYRkyqxuJUVxK1LvVJBn5+Sy2TQmv/6JMzDLms61itT7swP2B9IYFG9aAahtap5vjUeVPN4zhixIjomFKF3zUn9HvUDGOnnnoq4L9vgGuuuQbweTTD7O6KzlWr+5/OSQP07NkTgL/85S9Abu5MDZ+stHZmcRqGYcSkaixODZnSOcawhsiVV14JwNixY4FcazSO9am5OsNcm/rkUod7TQ4xefLk6Jg77rgDKLw+iZEMmjgC4LnnngN8FvC+ffsC3msCyhOKV+uoFamr6ZpgJbQYNZGHZvFfsGBB1KZBB3q9achzWC9Kr1O9Bh966KGoLZ857yTIJx9nZxEZIyKTRWSSiAzI7m8rIk+LyLTsa5vyd9coFaZrbWK6JkM+Q/XFwNnOuQ2BXsBpItIdKzeadkzX2sR0TYB8EhnPAbQ63iIRmQx0pEzlRrWUhcalA7z66quAd3Mo1FVIF6DCxaGwcBt4N5ZwcSicGqgVktY1CaZPnx5t61BSsyTpUPK0006LjrnooouAwhcbq5GkdFWXQI0d10Ui8AtFdV3CwJfvVid3zXAVxqOrjjfccAMA//3vfwvtZtmINceZrdW8OTAeKzdaM5iutYnpWj7yvnGKSGtgFHCGc26hPjGaotByo+EkfqkIJ6kVDdlSa1RdltTRFuKFdaaNpHUtJ6GjtoblquU5dOhQwLu4gF9o0Nfw/572AIikdNUF06uuuirapxnbNdOZvoJf9NUgFP37cGFPF4iLLeFbTvIag4pISzIi3OGceyC728qNphzTtTYxXctPkxanZB5VtwKTnXNDg6aKlRstFJ3PDOuT1H0SaylhTTwRHpN2KySklnStD7U+1VG7V69eAOyzzz7RMZpZfNdddwVg9uzZUdudd94J5FYSSAPVoKuGvk6aNGmJNr0GdaSnroGh+2EayGeovj1wJPCuiLyV3fc3MgLcmy09+inQtzxdNMqE6VqbmK4JkM+q+otAQxMkVm40pZiutYnpmgxVEzmUBJpxKYw9b9WqFeCjkh5+ODOCCTMw1dIQvbmhpaEvueQSAGbMmBG1HXTQQQD069cPyC0KZqVTiqe+ePK6+9I2RFdqz0HRMAyjzEiS1lSl3VZ0kUeddsHn+tP8j3GKvhXAG865rcp5gkpQaV3jELrGqEuMLhaqdQqxRxmma23SoK5mcRqGYcSkWVmcShhmqVZoQiVizTKpTUzX2sQsTsMwjFLRrFbVlUpnjzYMI92YxWkYhhETu3EahmHExG6chmEYMbEbp2EYRkySXhyaD3yXfU0b7Si+32uXoiNViOlam5iuDZCoHyeAiLyeRp+3tPY7KdL6/aS130mR1u+n3P22obphGEZM7MZpGIYRk0rcOIdV4JylIK39Toq0fj9p7XdSpPX7KWu/E5/jNAzDSDs2VDcMw4iJ3TgNwzBiktiNU0T2FJEpIjJdRM5L6rxxEZHOIjJGRCaLyCQRGZDd31ZEnhaRadnXNpXua7WQBm1N1/iYro2cN4k5ThFpAUwF+gAzgQlAf+fc+2U/eUyyNac7OOcmisiKwBvA/sAxwALn3JDsj6iNc+7cCna1KkiLtqZrPEzXxknK4uwJTHfOzXDO/QzcDeyX0Llj4Zyb45ybmN1eBEwGOpLp78jsYSPJiGOkRFvTNTamayMUdeOMYcp3BD4L3s/M7qtqRKQLsDkwHljdOTcHMmIB7SvXs/ISc4iWOm2bq65Q29dskroWfOPMmvLXA3sB3YH+ItK9ocPr2VfVflAi0hoYBZzhnFvY1PG1QkxdIWXaNlddobav2cR1dc4V9A/YFhgdvB8IDGzsWDJffHP+90Wh33dS/+LoGhxf6e+10v+qXtcCr9lKf6+V/tegrsVkR6rPlN+m7kEichJwErBxEeeqFT6pdAfyIK6uRjp0hTy0NV1zaFDXYuY48zLlnXPDXCZLyQFFnMtIjli6uhRmzmnGNKmt6Zofxdw4ZwKdg/edgNkNHeyce6KIcxnJEUtXI1WYtiWimBvnBKCriKwjIq2AQ4FHStMto4KYrrWLaVsiCp7jdM4tFpE/k1n0aQEMd85NKlnPjIpgutYutaCtSO5sg6tQkqJEsyOJSGX+l9XDG7U4d2S6mq5JkfCNs0Fdk645ZBgVJbzwWrZsCUC7du0AmD3bpvuqiVVXXRWA4cOHR/v22msvAAYNGgTAFVdckXzHsOxIhmEYsTGL02hWbLTRRtH2tttuC0CPHj0A6NjRRxT269cPgF9//TXB3hkAyy67LABnn302APvuu2/UtnjxYgC6deuWfMcCzOI0DMOISc1ZnF26dAHgyy+/XKLtu+++A+C3335LsktGFdCzZ08Ajj766Ghf//79AVh++eUBaNWqVdR26aWXAnD++ecD3tIxykOLFi2ibbUwBw4cuMRxP/zwAwDjxo1LpmMNYBanYRhGTOzGaRiGEZNUDtV18hhggw02yHndfvvtAVhxxRWjYz766CMAPv74YwBeeumlqG369Oll7auRHKGrkW4feeSRgHdj0UWfkEceyQTPfP3119E+dYHR4bsN1cuD6rTVVt5d8rbbbss55pdffom2r776agBGjhxJJTGL0zAMIyapsjj16b/DDjtE+9Si2GWXXQBYbbXVgNwFILVQP//8cwAGDBgQtX3ySSZzVPhUM9KFOrJ36NAh2qe/i759+wKwySabALmW47///W8Abr31VgDee++9qE0tIbM0y8syyywDeNcj8It16gqmOgEMGTIkwd41jFmchmEYMUmFxalPpcMOOwyAs846K2pT96OZM2cCMGHCBACWWso/E9TRec011wRgt912i9reeustwHVxGKoAAAyASURBVOY604L+FgB+/vlnwLsY7brrrlHb/vtnanPpaGPSpEwui3vuuSc65vbbbwdg1qxZgDm7J4lenzoiOPjgg6M2jT+fMmUKAP/3f/8XtX3//fdJdbFRzOI0DMOIid04DcMwYtLkUF1EhgP7APOccz2y+9oC9wBdgI+BQ5xzX5WyY6FrySqrrALAn/70JwC6du0atenQfOjQoQA8+OCDAPTq1Ss65tRTTwXg8MMPB2CzzTaL2pprFFGldC0UnZLRRR6Agw46CPCuLOqSBn7YPXr0aACee+45AK688sroGP2NVSqnY7lIg7Y65bLffplS7eH1/tNPPwFw2mmnAfDNN98k3LumycfiHAHsWWffecCzzrmuwLPZ90a6GIHpWquMwLQtK01anM65cdlC7yH7Ab2z2yOB54FzS9gv1lhjjWj73HMzH73eeusB8MUXX0RtTz/9NABPPfVUzt/rhD/AxhtnCmyqa0noAD9v3rxSdjs1VErXfFELRDMWXXLJJQBst9120TFrrbVWzrGaiwB8jLnGNOsiYEitWZpKNWurMemq54EHHrjEMbrQ+/zzzzf4OXUTGockoWuhq+qrO+fmADjn5ohI+4YOtHKjqcJ0rV3y0tZ0zY+yuyM554YBwyBeKv4wZLJ79+6Ad2EI3RNGjBih58n5+y222CLabtu2LQALFiwA4LPPfGnpanFvSBuF6toYrVu3jrY7deoE+Aw5O+20U87+EA1eeOIJX0j1uuuu036WomvNhnLoGqKWoq5T6Pvwmtxnn31y/kYDHMAHuNQNndVwaoCHHnoIKK/2ha6qzxWRDgDZ1+Y53q09TNfaxbQtIYVanI8ARwNDsq8Pl6xHWZZe2ndNa8LMmTMHyH061X2qrLzyykBu0gB9Smkyh/fff7/U3W2UFK3ell3X+tAQu9Ca1CQOOj+93HLLNfj3apHoyjvA3//+dwBGjRoFwFdfZRaQNcS2GVIRbeuiYbF77713zv4PP/ww2tZkK7q2cc4550RtOnqsS+gds/baawN+rrQcNGlxishdwCtANxGZKSLHk/ny+4jINKBP9r2RIkzX2sW0LT/5rKr3b6Dp9yXui5EgpmvtYtqWn6qNVdfhG8C0adMAP6EcuiMpOrQ/5phjADj++OOjtrfffhvwufzGjx8ftZVq+KyZm4499tglPledry0ePhd1gtbhmy7ogC91oc7Q6gQdllhYuHAh4Kdyttxyy6hNneE1Zv3NN98EYNiwYdExL7/8MtB8gyAqwQEHHAAs6U4UTq3p9anuZvkQ5qZQVzQNfCnHFJmFXBqGYcSkai3O0GFZi86rZVKf5aaZjzTUMswS/8ADDwAwefJkoPgnULhQoZbRySefDPgQMnV9Avjd734H5E5yG16jiy++GMjNWqWW5muvvQb4AIewCJ9O/uv3q78B8IsPuk9/O9tss010zA033ADA9ddfD1juzSSorwAb5Lqihdt10Zy68+fPB5Z0VQQ/6rzpppuA+oMfisUsTsMwjJhUrcUZPv3feOMNwM9jhm06F7rjjjsC/gmkVgTAHXfcAfg5sULRecxNN9002nf66acDPhek5ogMz6UlTY1cNHwyzOiv6Dy2Zv++6667ljhG5zt1jrJNmzZRm+qv+my99daA/52An/NW61YtFEiF61gqCZ3ZG0K/e82dev/990dtr776as7nqGthOArU+dPQpbHUmMVpGIYRE7txGoZhxKRqh+r1Ud/kvQ7TdDFA45bHjh0bHTN79uyizqsZejSfpy4EgY+p//TTTwHv9vLwwz4wQ2NnjVx+/PHHnNcQHa6ttNJKDf593VIX4YKcbl9wwQUA/PGPfwT8dAt49yUtKX3fffdFbeEilFEcYex5OJ0CfpEnXDjV60WjverjlFNOAXIXgRW9FnWKrxyYxWkYhhGTVFmcSjjp27t3bwC6desGeFelsNRrHHTh54gjjoj2aSEptX7Cp9yTTz4JwLXXXgv4/I9G04wZMwaARx99FIAzzzwzauvcuTMAe+yxB+DzpuqxkJ/7kGa/0hFAqKu6sOjCUZiRyyzO0hGOBHTxVK+h2267DfC5CaDxgATNkqUubPXlgTjqqKOW2FdqzOI0DMOISSotzrBErDqcr7/++oB/gmmGFfCWRX1PMnW61vnL3/8+E84bzq3pts6bhnkf1QIKs7sY8dByz2qNgM+MoyF6qkGYAf7yyy8HvEXTmKWi85hqyYK3Vl588UUgN4O8UTpeeeWVaFvd9NTiDK/lhghdjTRz/KqrrppzTOjk/sEHHxTe2Twxi9MwDCMm+VS57AzcDqwB/AYMc85dU8mqeaFDueZw1DkwdYwNHZ11pV3rGKl1CT5TvFo0uur6+OOPR8foPOY777wD5OZ01BCwtFGNup53nq8fpiuqQ4Zksp/pSKBHjx7RMVqtdNCgQYDP1wreCtXkEYcccgjgczUCzJgxA/DWSn3JY9JGNeoazjXWHRUcd9xxQG71UQ2l1TnnO++8M2rTkYMyd+5cAPbdd99oXzjaLBf5WJyLgbOdcxsCvYDTRKQ7VjUv7ZiutYnpmgBN3jidc3OccxOz24uAyUBHMlXzdClsJLB/uTpplB7TtTYxXZNB4izZZ0uOjgN6AJ8651YJ2r5yzrVp4E/1mJL4B4SF2NSM1ww5usCgjujgczMq4WSzDvunTp0KwDPPPAP4kgvgh+aNOeTmyRvOua2aPixZqkXX+tBgg8GDBwO5Duw6LfPtt98CuYsQuk+d3HWaJsy8o+Vn1Tk+XJyKiemaJ5onV3MJ6OJQOM2ii3TaFi7oKVOmTAF87s0HH3wwaithftUGdc17VV1EWgOjgDOccwsbq2tc5++s3GgVY7rWJqZrecnL4hSRlsBjwGjn3NDsvilA72yN5g7A8865bk18TsmfYK+//jrgi3qpRRJmJ1LXBw3vCou9XXXVVYC3PCdMmAD4xaYSU1WWSTXrWhcNdNBFonCfBj9oJvgQtT5mzZoF+AU+gL/+9a+AD9ErolS06Zon6hqoTupqeYYVHxpD71ea3f3mm28Glgy/LREN6ppPsTYBbgUmqwhZtGoeVLBqnlEYpmttYromQ5MWp4jsALwAvEvGvQHgb8B44F5gLeBToK9zbkG9H+I/q+RPMK0pUzenY/j/UotC3RzCeTKdy0wo+3fVWCbVrmtDhI7PqqMGQeg8JvjQWR15aPb/4cOHR8eo20oRc5uK6RoTtTz79esHwIUXXhi1aTCLTi/ofCbAZZddBvg5zWJz7DZB4XOczrkXgYYmSKxqXkoxXWsT0zUZLHLIMAwjJrHckYo+WRlNf40y0MxJoeuQti1atKhcp8+XqhnSlZIkh+qNEeYX0AVBdUvSMhtl+g2YroWfI+cV/DRbFZQvKXxxyDAMw8glldmR6qMxS6IKLE0jAcKFgjIvGhglooqsy1iYxWkYhhETu3EahmHExG6chmEYMbEbp2EYRkzsxmkYhhETu3EahmHExG6chmEYMbEbp2EYRkySdoCfD3yXfU0b7Si+32s3fUgqMV1rE9O1ARKNVQcQkdfTGNeb1n4nRVq/n7T2OynS+v2Uu982VDcMw4iJ3TgNwzBiUokb57AKnLMUpLXfSZHW7yet/U6KtH4/Ze134nOchmEYaceG6oZhGDFJ7MYpInuKyBQRmS4i5yV13riISGcRGSMik0VkkogMyO5vKyJPi8i07GubSve1WkiDtqZrfEzXRs6bxFBdRFoAU4E+wExgAtDfOfd+2U8ek2zN6Q7OuYkisiLwBrA/cAywwDk3JPsjauOcO7eCXa0K0qKt6RoP07VxkrI4ewLTnXMznHM/A3cD+yV07lg45+Y45yZmtxcBk4GOZPo7MnvYSDLiGCnR1nSNjenaCEndODsCnwXvZ2b3VTUi0gXYnExN6tWdc3MgIxbQvnI9qypSp63pmhemayMkdeOsr85zVS/ni0hrYBRwhnPOCtg0TKq0NV3zxnRthKRunDOBzsH7TsDshM4dGxFpSUaEO5xzD2R3z83Op+i8yrxK9a/KSI22pmssTNdGSOrGOQHoKiLriEgr4FDgkYTOHQvJFHi+FZjsnBsaND0CHJ3dPhp4OOm+VSmp0NZ0jY3p2th5k3KAF5E/AFcDLYDhzrlLEjlxTERkB+AF4F3gt+zuv5GZN7kXWAv4FOjrnFtQkU5WGWnQ1nSNj+nayHktcsgwDCMeFjlkGIYRE7txGoZhxMRunIZhGDGxG6dhGEZM7MZpGIYRE7txGoZhxMRunIZhGDGxG6dhGEZM/h+EXvniD/G29wAAAABJRU5ErkJggg==\n",
|
| 60 |
+
"text/plain": [
|
| 61 |
+
"<Figure size 432x288 with 9 Axes>"
|
| 62 |
+
]
|
| 63 |
+
},
|
| 64 |
+
"metadata": {
|
| 65 |
+
"needs_background": "light"
|
| 66 |
+
},
|
| 67 |
+
"output_type": "display_data"
|
| 68 |
+
}
|
| 69 |
+
],
|
| 70 |
+
"source": [
|
| 71 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 72 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 73 |
+
"from matplotlib import pyplot\n",
|
| 74 |
+
"from tensorflow.keras import backend as K\n",
|
| 75 |
+
"\n",
|
| 76 |
+
"# Load data\n",
|
| 77 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 78 |
+
"\n",
|
| 79 |
+
"# Reshape our data to be in the forma [samples, width, height, color_depth]\n",
|
| 80 |
+
"x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)\n",
|
| 81 |
+
"x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)\n",
|
| 82 |
+
"\n",
|
| 83 |
+
"# Change datatype to float32\n",
|
| 84 |
+
"x_train = x_train.astype('float32')\n",
|
| 85 |
+
"x_test = x_test.astype('float32')\n",
|
| 86 |
+
"\n",
|
| 87 |
+
"# Create our image generator\n",
|
| 88 |
+
"# Define random rotation parameter to be 60 degrees\n",
|
| 89 |
+
"train_datagen = ImageDataGenerator(rotation_range=60)\n",
|
| 90 |
+
"\n",
|
| 91 |
+
"# fit parameters from data\n",
|
| 92 |
+
"train_datagen.fit(x_train)\n",
|
| 93 |
+
"\n",
|
| 94 |
+
"# configure batch size and retrieve one batch of images\n",
|
| 95 |
+
"for x_batch, y_batch in train_datagen.flow(x_train, y_train, batch_size=9):\n",
|
| 96 |
+
" # create a grid of 3x3 images\n",
|
| 97 |
+
" for i in range(0, 9):\n",
|
| 98 |
+
" pyplot.subplot(330 + 1 + i)\n",
|
| 99 |
+
" pyplot.imshow(x_batch[i].reshape(28, 28), cmap=pyplot.get_cmap('gray'))# show the plot\n",
|
| 100 |
+
" pyplot.show()\n",
|
| 101 |
+
" break"
|
| 102 |
+
]
|
| 103 |
+
},
|
| 104 |
+
{
|
| 105 |
+
"cell_type": "markdown",
|
| 106 |
+
"metadata": {},
|
| 107 |
+
"source": [
|
| 108 |
+
"### Randon Shearing and zooming"
|
| 109 |
+
]
|
| 110 |
+
},
|
| 111 |
+
{
|
| 112 |
+
"cell_type": "code",
|
| 113 |
+
"execution_count": 3,
|
| 114 |
+
"metadata": {},
|
| 115 |
+
"outputs": [
|
| 116 |
+
{
|
| 117 |
+
"data": {
|
| 118 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAU4AAAD7CAYAAAAFI30bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO2de7SU1Xn/PzsIiIIIKoiAgIoXVJSICCLeUSRaTAxGYxVjWpNlTLV1JRr/aNKuumLSVf0lrU210YCp8W4EL4SiwShKEMUrokC8oggiGoiXGOv+/THzffeew7nMe87MO+875/msxZo5797nvJt5ZvY8+7k67z2GYRhG9Xyu0QswDMMoGrZxGoZhpMQ2TsMwjJTYxmkYhpES2zgNwzBSYhunYRhGSrq0cTrnpjnnXnLOrXHOXVarRRmNxeTavJhsa4PrbBync64HsAqYCqwFlgFneu9fqN3yjKwxuTYvJtvasU0XfncCsMZ7/zKAc+4WYAbQphCcc9092n6j936XRi+iAwojV+ccAL179wZgl13CS7vDDjsAsO2221b8zubNm5PnmzZtAuC9994D4LPPPuvsUoogV0gpW/u8ti3XrmycQ4E3op/XAod14e91B15r9AKqoDBy7dWrFwAjR44E4IILLkjGjj/+eAD23XdfIGyy//u//5vMueWWWwC4/fbbAfjTn/7U2aUUQa5QINnmhDbl2pWN07VybatvKOfc+cD5XbiPkS25lmuPHj2S54MGDQJg//33B2DcuHHJ2K677grAX/7yFwDef/99AF566aVkzqpVqyrmdAM6lK19XqujKxvnWmB49PMw4K2Wk7z31wHXgan+BcHk2rx0KFuTa3V0xau+DBjtnBvlnOsFnAHMq82yjAZicm1eTLY1otMap/f+U+fchcACoAdwg/d+Rc1WZjSEvMpVR/SddtopuXbCCScA8JWvfAWAffbZJxnr27cvAO+88w4AN910EwDz589P5qxYUfpvdZejel5lW0S6clTHe38/cH+N1mLkBJNr82KyrQ1d2jgNo97Icy5Nc8qUKcmYPOfjx48HoF+/fsnYJ598AsAbb5ScyIsWLQKClgnBYWTkkwkTJgBw8803J9fmzStZFi6//HIAPvroo+wXhqVcGoZhpKaQGufnPhf2ewU477zzzkDrgc+arwDnOF7v448/BmDLli0AfPDBBwD83//9XzKnu9jA8sh2220HwJ577gnAueeem4wdeuihAPTv3x8IsZoQtMmXX34ZgGeeeQaADRs21HfBRs0YO3YsAHvssUdybfDgwUClrBuBaZyGYRgpKZTGKc/qjjvumFzTt9KXvvQlAI4++mgAhg0blsyRxvnnP/8ZgBdffDEZe+utUhjbkiVLAHjyyScB2LhxYzJnzZo1AHz66ac1+p8Y1XLIIYcAcPbZZwOw3377JWPynOtEEGuTCxYsAGDu3LlAZaqlUQx02sgjpnEahmGkxDZOwzCMlBTqqC7D8Iknnphc+/73vw/AkCFDANhmm9J/6bHHHkvm3H9/KWxNwdAHHXRQMnbkkUcCMG3atIp7vfBCKBjzzW9+Ewi5zXIoGbVBphRVOYLg7Js0aRIAJ510ElBpplFJRMl12bJlydgDDzwAwCOPPALAhx9+WJe1G/VDDts8YhqnYRhGSgqhcR5wwAFACEU577zzkrE+ffoAwQmgEmFy8gC8++67QAhHuvvuu7f6/VNPPRWAr371qwAMHx5qIfzt3/4tAP/2b/8GwKuvvtrV/5IRISdA7ND7whe+AMCxxx4LwIABA4DK6khyBj3//PMAzJkzJxmT/KW1dKHWptEgFBoYF1tXwHtnC7DXCtM4DcMwUpJbjVM2S4BzzjkHCMUc4oD0O++8E4Brr70WCHbIOMi9pbbRmu3k1ltvBWDt2rUAfPnLX07GZF/Tfe+5555k7OmnnwZCFXEjPUOHDgXgmGOOSa4dd9xxQCjcITto/Do//vjjANx7771ACHKHEE5mmmZx0UkkTkZRCm2jk1JM4zQMw0iJbZyGYRgp6fCo7py7ATgZ2OC9P6B8bSBwKzASeBU43Xtf07OqQo8ARo8eDYRQFB2PAa6//nogOAjaCxVSfmsc0nLwwQcDodWCjobKjYbgKNLxfcyYMcnYr3/9ayBUcClKhkqj5BrTs2dPILzmf/VXf5WMKQ9dTiGFE+moBqHi0V133QVUVjuKj3fdjTzIthao7kRsbnn77beBxmfxVaNxzgamtbh2GfCg93408GD5Z6NYzMbk2qzMxmRbVzrUOL33DzvnRra4PAM4uvx8DvAQcGkN15WEIEHQBlVjUQ4cgGeffRYIrWEVrhIHPCuwWnnPn//855MxaZz6fdV9lMMCgmak3GgFZ0PQXuOKTUWgUXLVawlw2GGlBotyCkkWEGprSoN/7bVSw8GHH344maPamhZyVEmjZFtrdIKIKyEpfFDXGhWW1Fmv+mDv/ToA7/0659ygtiZa17xCYXJtXqqSrcm1OuoejtTZrnlxMLRsHa3ZKJWSp6pICmGIw5FUm1MVpeO/vf322wPw+uuvAyFtb+nSpckcaajSguIWs7KtdqEndyFJK1fJILZdH3XUUQBMnDhxqzGheprqh/773/8+GVu9ejUQ7Jl6n0DQTHRfvWdkM43n632l1E2AN998Ewg21e5iM81Tl0slrsQap059OuE1Si6dPV+ud84NASg/WnXY5sDk2ryYbGtIZzXOecAs4Mry49yaragdpE3G9s9LLy2ZaaR5SoNUuhaE1DzZyRYuXJiMyTv3xBNPAFvbTCHYSJX+FwfQSzNptJevRtRNrrIdx/ZlFWuJ5dkSBbn/5Cc/AeCPf/xjMqb6qupLFKfJyi6uRArV8Yzvv9deewFBo2mtMIy8uE2gcTbkM9sVFFkh3waEdOdGy6NDjdM5dzOwBNjHObfWOfd1Si/+VOfcamBq+WejQJhcmxeTbf2pxqt+ZhtDx9V4LUaGmFybF5Nt/cltrvqmTZuS53K8yCAc57EPHDgQCOFHylFeuXJlMkdHc+WYv/LKK8mYjmkKhVGVJOWnA4waNQoIR8M4AF+58UbryEmjJIapU6cmYzpaa058JFM9VIUcKUc9DvtSyNjee+8NwPTp05Ox3XffHQjOIJkKWgslEzoaQghtUmsVrSd2IBn1QaYXtcOJ94LnnnuuIWtqSbGCDw3DMHJAbjXO+fPnJ8+lrUhriJs4/eEPfwBCGJEqfsf1OBVqJOLwBqVWXnDBBQB87WtfAyqNz9Jmf/SjHwHwm9/8ZqsxoxK9xtIKx40bB8CUKVOSOZKngpjjkC45haTRK7FBmiTA/vvvD4RwphkzZiRjOpUo4L61QGlpr3qM02x1upBTSU5A0zjrjzR/yTBOo86LE9Y0TsMwjJTkVuOMizlcccUVAPz85z8Hgk0MQoiRKkPrG6m1cAVpQXEQ9OWXXw7AySefDLSu/ah/jTTNlhqssTXq/SQ71SmnnALAyJEjkzkKTpfsZE+EEOiu9FrZQ2fOnJnMOeKII4CgFcb2S8lRYWmyn8Y2UtnSFMIWjykETUHymmvUH713RNyqW+FhjcY0TsMwjJTYxmkYhpGS3B7VY5Qt0jIsCUIJ/faqpEj11zHxF7/4RTKmrBUd/xXGdMcddyRzbrzxRiAcG60KT8fImSMHjmqY6lgcoyP64sWLk2uqASAHwfHHHw+E4zkEp2H//v2ByqpZa9asAULomZw6Cl8D2HfffQE4/PDDgUoTkN5j+n8UrfpV0YgdtqqaJZPaDTfckIzFoUmNxN4NhmEYKSmExiltsrOhCLvtthsQHAtjx45NxlTxSOFLt912W8UjBGdQo1uSFglpEHLYxA45Ic1dVY7uu+++ZEyvucLDVJMgloFOG9Im48pJDz74IABLliwBQjhRnBevFsSq0yqHUPy3FQqTlzCYZiU+CZx++ulAeH+ou0J8rdGYxmkYhpGSQmicnSGuzSiN4uyzzwYqbVnSRFQN5/bbbwdCmJPRNaR56jFOq1RFKiUtxCFop512GhAqUknTVI1GCD2HlCwhuyYEe6c0RdkxTzjhhGSO/rYSKhQWBUHjleYa20+N2hOnUatqlUIK4/TpvGAap2EYRkqaVuOMq7wrJU9e1NiD9+ijjwLw29/+Fgj1/oz6ENuoFNis9Ma4z5M0wwMPPBAImmOc8qiCD7Jjxql5sqnK865Uz8mTJydz1F1TxBqrtGGl9Mb1XY3ao1MhhCrvkqveH3mimnqcw51zi5xzK51zK5xzF5WvD3TOLXTOrS4/bm39N3KLybU5MblmQzVH9U+BS7z3+wETgW8558Zg7UaLjsm1OTG5ZkA1hYzXAeqOt8U5txIYSk7bjaoaThxydOSRRwLhmBi3X1CtTtVb7C5kLVc5dxRQDjBixAig9eD28ePHA6GFiY7hca6yju/KY49z1dW8Tw3h5HyIHYNCiRVyUkGoSxA7s4pA0T6vIq6lqvfIiy++2KjldEgqG2e5V/M4YCnWbrRpMLk2JybX+lH1xumc6wvcCVzsvd8cO1jaI+t2o2oxq1bAEJxDCjeJA6UV+N5d6yzWW64tkwbi1EUlJihlMp6rWp3SPnSSUBM2CCcJpXXGFd332GMPIDgJFXKkwHYIoWhyDCr0CIKjqNFNwTpLUT6vSkCJnUNCNVnzSFXhSM65npSEcJP3/q7yZWs3WnBMrs2JybX+dKhxutJX1fXASu/9VdFQLtuNKnxFYSgQNBlVa1fRDui+gc31lqtec9km9RjXtWytD1BbqFBLHGam8CVpo3qMkYYp22hs35Zd++677wZg+fLlydj69es7XFMeKdrnVXZunRQhvPYKR8oj1RzVJwNnA88559Sl7HJKArit3Hr0dWBmG79v5BOTa3Nics2Aarzqi4G2DCTWbrSgmFybE5NrNjRN5pCM3zqiK0QlHlNoyUMPPZSMxWX5jdqh0C+1UtYRW9lbEEKDWrZKaA05lVoLJ5JZIM41l6zXrVsHhMpLcdsTPVejv/fff7/DdRi1RbJT+2cIjrm8tMloDctVNwzDSEnhNU5pIgp4V2vROO9Z32pWTzM7FMYjA7+C1ePEBIUjKcRIwe4Q8pWljUqDjXPGFUKmCvJxdSVVClfI0WOPPQZUnjDkKFLFpaKGHhUZVT669tprk2uSQ57rA5jGaRiGkZJCapyx/fKCCy4AQjWdUaNGAZX1ODdsKIWsqW5jHqutNBvSGlQz86mnngIqZTdu3DgAJk2aVPEIoT+UNE5prLJZAixduhQIvYqUzAChnmpsOzPyh04SagFeFEzjNAzDSEkhNc648IKCZWXTVNqdbFsQtB4FvisQ3qg/0jy3bNkCVFbWV3EN9RxasGBBMqZumEq5bPl3INg45X2NOyDm2T5mFB/TOA3DMFJiG6dhGEZKCnlUjwOV7733XiCEluiIF+ck6yioVgtxhRwjG2Reic0sctzELSsMowiYxmkYhpESl2VQuHPuHeADoIh5jjvT9XWP8N7v0vG0YmFyNbnmkLrKNdONE8A594T3fnymN60BRV13VhT19SnqurOiqK9PvddtR3XDMIyU2MZpGIaRkkZsnNc14J61oKjrzoqivj5FXXdWFPX1qeu6M7dxGoZhFB07qhuGYaTENk7DMIyUZLZxOuemOedecs6tcc5dltV90+KcG+6cW+ScW+mcW+Gcu6h8faBzbqFzbnX5cUCj15oXiiBbk2t6TK7t3DcLG6dzrgewCpgKrAWWAWd671+o+81TUu45PcR7v9w51w94EjgVOBfY5L2/svwmGuC9v7SBS80FRZGtyTUdJtf2yUrjnACs8d6/7L3/BLgFmJHRvVPhvV/nvV9efr4FWAkMpbTeOeVpcygJxyiIbE2uqTG5tkOXNs4UqvxQ4I3o57Xla7nGOTcSGAcsBQZ779dBSVjAoMatrL6kPKIVTrbdVa7Q3J/ZLOXa6Y2zrMpfA5wEjAHOdM6NaWt6K9dyHQflnOsL3Alc7L3f3Oj1ZEVKuULBZNtd5QrN/ZnNWq6dtnE65yYBP/Den1j++XsA3vsftjUXOKHTK20ONua9GEQauUbzH8tuhbkk93KFTn1m25Src5X7qir1A/Ts2ROAXr16AbDtttt2+PvqVhv/vojLQKoTgHoVffTRR0BlVweN1YA25dqVepytqfKHtZzknDsfOB84sAv3ahZe63hKw0krV6MYcoUqZNuRXLXBaaNUM73+/fsncwYNKp2K1XBv9OjRyZh+Txunfj/eXAcPHlwxV+2fIdTi1Uap9sJxoz5tpmrw1wXalGtXNs6qVHnv/XXAdc656cB9XbhfKuJvQBU3lpDibzD1pqnht1TRSSVXAOdcbo9wRgUdyjaW67bbbutHjBjBwIEDk3FtcP369QNg5513BmCnnXZK5mgTHTJkSMUjVH4u45+lnca/r0067lSqDVOdaseMKVkaDjww6GUqWK7HuP/Uxx9/vPUr0Am64hxaCwyPfh4GvNXGXLz393fhXkZ2pJKrUShMtjWiKxvnMmC0c26Uc64XcAYwrzbLMhqIybV5MdnWiE4f1b33nzrnLgQWAD2AG7z3K2q2si4yYsSI5PmFF14IwKRJkwB49tlnk7Ef/rBkF3/11VezW1yOybtcjc6TVrbbb789EyZMYP/990+uDRhQSsDZbbfdANh3330BKo7zLe2feoStnUL6OXYOtZzTngNbLabjPmQ333wzENpHr1u3Lhmr1VG9S83aysdvO4I3GSbX5sVkWxsK2eWyPfSN+O///u/JtfHjSxX0ZdDec889k7FddilFG9x4440AzJ8/HwjGZyNfxKEqw4eXzHXHHnssADNmhMQWaTlyJrzwQilTcO7cucmcJ554or6LLTh9+vRhzJgxjBs3Lrkm55AcOPpMtQwhguBwjTubthyTphh3pd2yZUvF3xw6NMTd77DDDkDQUCVnOakAJk6cCITQpUceeSQZW758ecU9Ovs5t+pIhmEYKWkajbNPnz4AXHDBBQAcfvjhydjixYsBWLZsGQCHHHJIMqZvU2meigFbsGBBnVdspEEhZUcccURy7bvf/S4Ae+21FwDDhg1LxmQnk9Zx1FFHASG2EGD27NkALFmyBKgMojagd+/e7L333uy3337JNcVGSmPUa6bPTUfIXim5vPzyy0Clj0Fxm9tttx0AkydPTsYkY9lRpfHuuOOOyZxDDz0UCLKOtUr9bf0/TOM0DMPIiKbROPXNc8IJpazO2DP3H//xH0DwpkvDADjttNMA+OpXvwrA9OnTAdM488LBBx8MwNlnnw3AKaeckoyNGjUKCPaujRtDG23ZzGQfU4B2bAeVR3bVqlWAaZwt2bJlCw8++GCSnQPhNVISiTTI1hJIWvOGt9Q43333XaDSK97Sxin7NAQ59u3bFwja5THHHJPMkd1T/o4DDjggGXv77bcBeOihh7a6bxpM4zQMw0iJbZyGYRgpaZqjusIU5OSJj10tj91xQKyMxXIexMG+RuPYe++9AZg1axYA5557LhCcRACvvPIKAFdffTVAxZFSx3aZYs466ywgOJIgHOHkIHzjjbj+hfHee+9x1113VRy5N28uVWyrVSB5WnREV/iRQp1k0onHtCfovQQhKD5OgukMpnEahmGkpGk0ToUu9O7dG6hM4dK11kIPVqwoZZzFWqjReI477jggOPtUIUchRADXXHMNEAz+sRakcBM5ChSeFpc4UwiLypgZlXz22WeJo0bElcUagT7DmzZtAoLmeN99ofCaAuDHjh1bt3WYxmkYhpGSptE4ZavcsGEDAPvss08ypgDeZ555BqgMk5BGIq3UQlLygWT105/+FIA333wTgMceC0XJpXW0h0KOZAtrLUSmZY1Io8Rnn32Wu8+DNF49rl69GoDf/e53yRydIEzjNAzDyBG2cRqGYaSkw6O6c+4G4GRgg/f+gPK1gcCtwEjgVeB07/17bf2NLFCmyC233ALAlVdemYxdd911AJx33nkAPP/888mYKifJaaDqSM1O3uWqykVPP/00EJw9rVXaSUOcUabQlDVr1nTpb+aNvMu2luj9ELfXqDZvvitUo3HOBqa1uHYZ8KD3fjTwYPlno1jMxuTarMzGZFtXOtQ4vfcPlxu9x8wAji4/nwM8BFxaw3WlRsbiO+64A6jsmnfJJZcAcPHFFwPwve99Lxk7+uijgRAs29XA2KKQd7lKk+iqhtmS2DmkPOlmq/6fd9nWktYqyMfPobJZmwL4u/q+6qxXfbD3fh2A936dc25QWxOtjWyhMLk2L1XJ1uRaHXUPR8q6jazCVn71q18l19QXRcHUcb9l1eaUprlw4cJ6L7EpKEp7YKXQKhU3tnGqoo+1hg4URa5Ccv385z+fXFM/JFVgilNp1TK4s1WRRGe96uudc0MAyo8burQKIy+YXJsXk20N6azGOQ+YBVxZfpzb/vTskf0KgtdUNTe/8Y1vbDX/n//5n4HuY+Nsg9zLtVpU/VuRFNI85UmHUIdTyRNNTlPItqVNUwU9dKqE0A9J9szYhi2Za6yzdKhxOuduBpYA+zjn1jrnvk7pxZ/qnFsNTC3/bBQIk2vzYrKtP9V41c9sY+i4Gq/FyBCTa/Nisq0/TZOrLtVdpfUPO+ywZExl9ZWTHIektGxTqoo5XTUeG43l/PNLjmG1VlAdx1/84hfJnGuvvRZoXG1JIz36DCvcUO0x4hbCLRvB6RHg9ddfBypDlDqDpVwahmGkpGk0ToUlqBnXD37wg2RMWmTLyioQqucoKF7hTLfffnsyp7WKOkZjUKgJwBe+8AUgOAx02oDgCBw0qBSuKBnGjqC1a9fWd7FGzZGmKbnq/TBkyJBkjmp2qlJa7CjWSVJaaWcxjdMwDCMlhdc4pW185zvfAUKoUZx29T//8z9A+LaKe9MoIFYthJWeqSITUGkjMWqPZBhXYj/++OMBOPLII4EQ4KyEhc5y4oknJs+VZquajnfffXcyFheNMPJDyz5Ce+65J1CpcaojgNIq44QXaZpdPUWaxmkYhpES2zgNwzBSUvijutrGnnPOOUAIM/jud7+bzHn44YeBEMoQtwPQfOW2K9MkdkLYUb12xCYUmU4kO732EFr3ao6OVrFRv2UVnGry0HX0j58/9dRTQGUrjrlzC5lY0/T069cPgFGjRgEwbNgwoLJttLKCZIKJzS61cvSaxmkYhpGSQmqcPXv2TJ6fddZZQAgx+td//Vegsl1oHI7QFnfddRcAM2fOBMI3mdE1pAVKI5gyZUoy9stf/hIIYUSxNimDvir7S4ZxHUU14ZMWEWsTCjuSXAcOHAgEZwKE95EcgdI8jfyi95FqESgMMT59KMj90UcfBepTi8A0TsMwjJQUUuMcMWJE8lzhKYsXLwbgtttuA6rTMmNWrFgBhFClMWPGdHmd3ZmWIUbS5GPbc8sqNnFlqkceeQSAJUuWAME+ffrppydzVBFH2qk0DYBvf/vbFX9HiQ5qBw1B45SmW+tq80bX0HsolpnCkeSD0CnjhRdeSOaop5g+y/UILTON0zAMIyWF1DjlUYOgtehbRsGvaZGNZMuWLUClV13ffJZ6WT2qk6i0SCUWxMUYNm7cCMD3v/99AObNm7fV31FPqFmzZgEwderUZEwaorQNzYGgbcTptVAZDB0/N/KHPpNx/zB93pVyqZOEuqECPPnkkwC88sorQH3kXE09zuHOuUXOuZXOuRXOuYvK1wc65xY651aXHwfUfHVG3TC5Nicm12yo5qj+KXCJ934/YCLwLefcGKzdaNExuTYnJtcMqKaQ8TpA3fG2OOdWAkNpYLvR3r17b3VNzqC0jbd0HDjwwAMBmDx5MgA/+9nPkjnNeESvt1wVNrLXXnsBwaEXv5a//vWvgRAGJBkAnHHGGQBMm1ZqD66jv0wpEI7oavssBx90vfpNUcnj57Wz9OnTB4Dhw4cn1xSGtOuuuwKhmpmC3QHWr18PhCpJ9SCVjbPcq3kcsBRrN9o0mFybE5Nr/ah643TO9QXuBC723m+O09vao97tRrWOatfTEgVGy3mhlK7Y2NzM1EuuMsgr1EhG/Dh5QVXa9dja73/00UdAaLg3f/78ZM5VV10FVLZ/NUrk9fOahu222w4IpxYIVZFaapxxbdUsujdUFY7knOtJSQg3ee/vKl+2dqMFx+TanJhc60+HGqcrfVVdD6z03l8VDeWi3ahsZvomkgYZ29LiOo9Q2Ur0a1/7GgBHHXUUAAsWLABg2bJldVpxPqi3XGVnUrEMhY/svvvuyRw9V3B7rBXJZq2geMll0aJFyRwLWN+avH9e2yKWvcKPpFUedNBByZi0T32+pV3q/QbhdFNPqjmqTwbOBp5zzun8ejklAdxWbj36OjCzPks06oTJtTkxuWZANV71xUBbBhJrN1pQTK7Nick1GwqZORTnpcooPH36dCA4d+KjurKAFKqkfFcIWUivvfYaAP/1X/8FWHvgrqKMHcnq7//+7xu5HCPnyFwDoZWzsszGjRuXjKm6lcw0MunEGYNZHNUtV90wDCMlhdQ44/p6P/rRj4Cg0Zx00klAZX0+fYPp2ykOlr3nnnsAeOCBBwBYunQpkD6Q3jCMzqPQI4CxY8cCMHHiRKDSodiyPoEe4wpI9Qx8F6ZxGoZhpKSQGufHH3+cPFfr3169egEwevRooNJmIu1T1Xji9sAKd1Gb4O6aqmcYjaBlhwAINs3DDz8cqKyopdOmKiBJ44x9Ei0rYtUD0zgNwzBSUkiNM0Y9aa6++uoGr8QwjGqRpin/w5AhQ5IxJbMo4iWOkFF6rfpEqQNt1idF0zgNwzBSYhunYRhGSgp/VDcMo3joqD5gQKkQfVxzU0d1OYXi4HYlqsip26jKWKZxGoZhpMRlWd3cOfcO8AGwMbOb1o6d6fq6R3jvd6nFYvKEydXkmkPqKtdMN04A59wT3vvxmd60BhR13VlR1NenqOvOiqK+PvVetx3VDcMwUmIbp2EYRkoasXFe14B71oKirjsrivr6FHXdWVHU16eu687cxmkYhlF07KhuGIaREts4DcMwUpLZxumcm+ace8k5t8Y5d1lW902Lc264c26Rc26lc26Fc+6i8vWBzrmFzrnV5ccBjV5rXiiCbE2u6TG5tnPfLGyczm0Pkr4AABCdSURBVLkewCpgKrAWWAac6b1/od1fbADlntNDvPfLnXP9gCeBU4FzgU3e+yvLb6IB3vtLG7jUXFAU2Zpc02FybZ+sNM4JwBrv/cve+0+AW4AZGd07Fd77dd775eXnW4CVwFBK651TnjaHknCMgsjW5Joak2s7dGnjTKHKDwXibPy15Wu5xjk3EhgHLAUGe+/XQUlYwKDGray+pDyiFU623VWu0Nyf2Szl2umNs6zKXwOcBIwBznTOjWlreivXch0H5ZzrC9wJXOy939zo9WRFSrlCwWTbXeUKzf2ZzVyu3vtO/QMmAQuin78HfK+9uZRe+O78753Ovt5Z/Usj12h+o1/XRv/LvVw7+Zlt9Ova6H9tyrUr9ThbU+UPaznJOXc+cD5wYBfu1Sy81ugFVEFauRrFkCtUIVuTawVtyrUrG2dVqrz3/jrgOufcdOC+LtzPyIZUcgVwzm01buSSDmWbB7n27t0bCB1rx4wJ1oT9998fIOlD9Pvf/x6AhQsXJnOy6D/UFefQWmB49PMw4K22Jnvv7+/CvYzsSCVXo1CYbGtEVzbOZcBo59wo51wv4AxgXm2WZTQQk2vzYrKtEZ0+qnvvP3XOXUjJ6dMDuMF7v6JmKzMaQl7lquOb+tBA6E2jsY8++igZe/755wF45513APjLX/6SyTrzTJ5k26tXLwAGDSpFCQ0bNiwZGzlyJAATJkwAYOLEicnYpEmTKv7OvffeC8Cf/vSn5NqSJUuA+h7Zu9SsrXz8tiN4k2FybV5MtrWh8F0upW3suOOOAPTr1w+A/v37J3N22mknIHwDrV+/PhnbZpvSS/D+++9XjMXai5E9n/tcyYrUt29fAPbYYw8AZswIySsXXnghEOS7YcOGZOySSy4BYP78+QBs2rSpzis2qkHdLffbbz8Ajj76aABOOumkZM6UKVMA2G677Tr8eyeeeCIAPXv2TK7NnDkTgC1btnR9wW1g1ZEMwzBSUgiNU99S+laRlgihH7NsH2PHjgXgoIMOSuYce+yxAHz66adAsIsADBw4EIDHH38cgOuvvx6A1atXJ3OyCG8wKtEJ4otf/CIA5557LgCHHnpoMkd2Mr0/ZC+Ln+tEYuQDfYavvfZaIHxO4890jx49qv57a9euBcJnHILsTeM0DMPIEYXQOLfffnsgfDvJ6wYwefJkAE49tVT8RHYRaSMQgmVlNzv++OOTMV074IADAPjDH/4ABG8swLvvvluj/4nRHgMGhJKJ3/rWt4BgrxoyZAgAL7/8cjLnueeeA4IWc/LJJydjabQWIzv0edtzzz0B2HbbbducqxPi22+/nVx76qmnAFi0aBEATz75JBBOnlBp664XpnEahmGkxDZOwzCMlOT2qK4wFICvfOUrQDiO77777smYnDsyCMuRs3HjxmTOm2++CcBnn30GwLPPPpuMTZ06FQiB1bNmzQIqDcu/+c1vADuy1xvJAkJ4io5rN954IxBykwE+/vhjAE4//fSslmh0EcnznnvuAeC0004D4IMPPkjmvPjii0A4lusRwmd5zZo1QDjGy2zT8m/VC9M4DcMwUpJbjXPXXXdNnitI9sgjjwRCkDvAJ598AoSwBH3zLFu2LJkjh480ztjBoN8755xzgOAkOuGEE5I5K1aUstJM46wvccDzM888A8CqVauAEMgubQSCk/Db3/42EMKSjPwijVNhf0pMiFMmX3ih1NZo+fLlQNAu2+O9996r6To7wjROwzCMlORW49x5552T57JpStOU5gjw6quvAsEO+dvf/haotIW1F57w+uuvA7DvvvsCwbamnyHYP6UFKbzJqC0PP/xw8lxylK36ww8/BCrDV1QYQqFK0mYA/vjHPwJW3COvPProoxWPRcM0TsMwjJTYxmkYhpGSDo/qzrkbgJOBDd77A8rXBgK3AiOBV4HTvfeZWWfjykU60v3whz8EKisfVcO6desAeOSRR4BQA7BPnz7JHJkIlI305z//uTPLzhV5lGvstGsLZZwAfOMb3wBCOwU5+iAc++OwtO5CHmXbbFSjcc4GprW4dhnwoPd+NPBg+WejWMzG5NqszMZkW1c61Di99w+XG73HzACOLj+fAzwEXFrDdbVLrJmoqlGcW54GaZbjx48HQr50XGn8y1/+MgBvvFFqEFhUg3ZMHuXaHgpVUgMvCBWxdAL5x3/8x2Qszm/ubhRBtnL+KsSvaA7XznrVB3vv1wF479c55wa1NdHajRYKk2vzUpVsTa7VUfdwpFq2G1WKXZyCpSDZOEQpDfvssw8ABx5YavuuVM+4orTsarEW2t3Juo2sgt2POeaY5JrkIW0lrr1pwfCdo95yPeSQQwA45ZRTgOAvuPPOO5M5skvnuWp/Z73q651zQwDKj/Wv42Rkgcm1eTHZ1pDOapzzgFnAleXHuTVbUZnNmzcnz9UPSAU8VB0cYIcddujSfZTqpZqfsaYpNFZND5SCU3e5dhYVblDCA4RUPCUrqFo8hKD4l156CQidD+Pf72Y0TLax9q+eUSrco7G4nqbs00pciTswLFy4EGi8HDvUOJ1zNwNLgH2cc2udc1+n9OJPdc6tBqaWfzYKhMm1eTHZ1p9qvOpntjF0XI3XYmSIybV5MdnWn9zmqsf55ap0pEBnOXIgVDFS8HM9VHg5pZoh8L2ovPbaa0ClE0EJCarpKEcfhOZdOqr//Oc/Byob9b311luAtYLOksMPPxyAvffeu+J6HGYm85nMM3G9AbWC/s///E+g0qSXJZZyaRiGkZLcapxxqtwdd9wBhCb2X/rSl5IxGZn1zfPf//3fQNASWyMOWznzzNKpRt9kIg7IVVpmZ4PsjdqhalYAP/3pTwH43e9+B1Q24VN91T322AOAiy66CKhs9PfLX/4SCDVcrZJS/ZEWGdffhNDELX4ux29cEetf/uVfAFi6dCkQmrZljWmchmEYKcmtxhkj+5ZCEeJamWPHjgXgr//6ryt+J64AL/RNNn369OSanschTlD5jahvN1WmNvKBbJNqEfv8888nY0qL1elEdVb/5m/+Jpmz1157AXDllVdW/I5RW+LT269+9SugutObwv8UwhRf+8EPfgCE4jxxLdYsMI3TMAwjJbZxGoZhpKQQR3U5eh544AEgVDACOO+884CQT37++aX6BDNnzkzm6KigLIXBgwcnY7vssguwdcZQ7ITQEb071nYsAnLqxM6dJ554AgjtZNXuWc5AgBEjRgAhFMaO6vVH9XPVhqY9evToAYS2OADf+c53ABg3bhwA22xT2sLsqG4YhpFzCqFxSmNUwPL999+/1Ry1EB41ahQQQpcg1NyUcygOVYrDIGJi55By5dWK2Mg/0jAVZqZ6A9JQIDgdFY5k1Jb+/fsDoXEeBKdQmtA+fe4BDjroIAD+4R/+AQinBiU6ZIVpnIZhGCkphMYpVHNz1apVyTVphi+++CIQQpVkA4Fgw1IgrTQNCAHRSgGLg22N4rLrrrsCMG1aqYPEEUccAVRW2lELaYUzGbXl4IMPBkKVd6gMGauW994LrZFatuhWmq1pnIZhGDmnmi6Xw4EbgV2Bz4DrvPc/aWTXvDigVl5TPc6fPx8I9hUIVcOVwvXYY48lY7NmzQKCN767aJx5lGtXUTdSCJrmWWedBcDuu+8OwEMPPZTMWbx4cXaLy4g8yFXRK0pOiQt4/NM//RNQqYVW+/dg69ToRtXIrUbj/BS4xHu/HzAR+JZzbgzWNa/omFybE5NrBnS4cXrv13nvl5efbwFWAkMpdc2bU542Bzi1Xos0ao/JtTkxuWZDKudQueXoOGApKToiNoK4Tp+O71L540Dp2FnQXSmSXFtDyQuqwQmhVbCcfzqiqyIShDznZqVRcpUp7b777gMqEwvU7kSVrapB4YQQag3os6z6FVlT9cbpnOsL3Alc7L3fXG0XQWs3mm9Mrs2JybW+VLVxOud6UhLCTd77u8qX1zvnhpS/vdrsmpd1G9novsnz9iq3x/NiVIMTQgB8s1FEucbIkaewF9VqjMdU+f3WW28FQiomdL6ldN7Ji1zXr18PwOOPP55c++Y3vwmEJJS5c+dWzI1RssLf/d3fJddUX1W0V3e3nlTTrM0B1wMrvfdXRUPqmgc564hodIzJtTkxuWZDNRrnZOBs4Dnn3NPla5dT6pJ3W7mD3uvAzDZ+v5CsXr06eR5rn01EIeUah5lNmjQJCNXeBw0KZjvVfVSPomeffRZoXI+aDMmNXFUvNS7SoSr90jwnTpwIVKa9vvLKKwAccsghQGXHB50klP7cqFNDNV0uFwNtGUisa15BMbk2JybXbLDMIcMwjJQUKle9VsQVkaT6y+u4adMmAFauXJnMsTqcjWfIkCFAqIIFcOqppVBEVcKSAwjgpptuAmDFihWAtQBuBDpGxw3V1IZGtSQkO5ldINSSUOaRZA/Bmas2z1nX4RSmcRqGYaSkW2qccXvg3XbbDYBevXoBofqKDNTQuJCH7kbs3JGWofzzMWPGAKHpGoTaq+oMcPXVVydjquFoCQ6NR5XLAH72s58Boa33oYceCoS6mlDZjBEqHUAKoP/xj38MNK6ls2mchmEYKelWGqfsmHEr4ClTpgDQt29fIISrfPDBB8kc01qyQRWNIFStkp1LVcTjylZq63vLLbdktUSji9x+++1AqEz19a9/HYAvfvGLyZy4mhJUJrBcccUVQLCVNgrTOA3DMFLSrTROeeQ+/PDD5JrsL+p22VYKplF/TjzxxOS5OplKQ5k3bx5QmTJpnvLismFDKePzmmuuAWD27NnJmPwNIrZxpqnjWU9M4zQMw0iJbZyGYRgp6VZHdRE7fpSTPmHCBADWrl0LhPayRnbE4URqiaAjneoFxK1mjeIih6sascUN2YqAaZyGYRgpcVk6Q5xz7wAfAEXMYdyZrq97hPd+l1osJk+YXE2uOaSucs104wRwzj3hvR+f6U1rQFHXnRVFfX2Kuu6sKOrrU+9121HdMAwjJbZxGoZhpKQRG+d1DbhnLSjqurOiqK9PUdedFUV9feq67sxtnIZhGEXHjuqGYRgpyWzjdM5Nc8695Jxb45y7LKv7psU5N9w5t8g5t9I5t8I5d1H5+kDn3ELn3Ory44BGrzUvFEG2Jtf0mFzbuW8WR3XnXA9gFTAVWAssA8703r9Q95unpNxzeoj3frlzrh/wJHAqcC6wyXt/ZflNNMB7f2kDl5oLiiJbk2s6TK7tk5XGOQFY471/2Xv/CXALMCOje6fCe7/Oe7+8/HwLsBIYSmm9c8rT5lASjlEQ2ZpcU2NybYesNs6hwBvRz2vL13KNc24kMA5YCgz23q+DkrCAQW3/ZreicLI1uVaFybUdsto4W+vznGt3vnOuL3AncLH3fnOj15NjCiVbk2vVmFzbIauNcy0wPPp5GPBWRvdOjXOuJyUh3OS9v6t8eX3ZniK7yoZGrS9nFEa2JtdUmFzbIauNcxkw2jk3yjnXCzgDmJfRvVPhSo2JrgdWeu+viobmAbPKz2cBc7NeW04phGxNrqkxubZ336wC4J1z04H/B/QAbvDeX5HJjVPinDsCeAR4DlDN/ssp2U1uA3YHXgdmeu83NWSROaMIsjW5psfk2s59LXPIMAwjHZY5ZBiGkRLbOA3DMFJiG6dhGEZKbOM0DMNIiW2chmEYKbGN0zAMIyW2cRqGYaTENk7DMIyU/H+tbBGMNi7SGAAAAABJRU5ErkJggg==\n",
|
| 119 |
+
"text/plain": [
|
| 120 |
+
"<Figure size 432x288 with 9 Axes>"
|
| 121 |
+
]
|
| 122 |
+
},
|
| 123 |
+
"metadata": {
|
| 124 |
+
"needs_background": "light"
|
| 125 |
+
},
|
| 126 |
+
"output_type": "display_data"
|
| 127 |
+
}
|
| 128 |
+
],
|
| 129 |
+
"source": [
|
| 130 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 131 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 132 |
+
"from matplotlib import pyplot\n",
|
| 133 |
+
"from tensorflow.keras import backend as K\n",
|
| 134 |
+
"\n",
|
| 135 |
+
"# Load data\n",
|
| 136 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 137 |
+
"\n",
|
| 138 |
+
"# Reshape our data to be in the forma [samples, width, height, color_depth]\n",
|
| 139 |
+
"x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)\n",
|
| 140 |
+
"x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)\n",
|
| 141 |
+
"\n",
|
| 142 |
+
"# Change datatype to float32\n",
|
| 143 |
+
"x_train = x_train.astype('float32')\n",
|
| 144 |
+
"x_test = x_test.astype('float32')\n",
|
| 145 |
+
"\n",
|
| 146 |
+
"# Create our image generator\n",
|
| 147 |
+
"# Define shearing and zooming parameters to be 0.5 each\n",
|
| 148 |
+
"train_datagen = ImageDataGenerator(shear_range=0.5,\n",
|
| 149 |
+
" zoom_range=0.5)\n",
|
| 150 |
+
"\n",
|
| 151 |
+
"# fit parameters from data\n",
|
| 152 |
+
"train_datagen.fit(x_train)\n",
|
| 153 |
+
"\n",
|
| 154 |
+
"# configure batch size and retrieve one batch of images\n",
|
| 155 |
+
"for x_batch, y_batch in train_datagen.flow(x_train, y_train, batch_size=9):\n",
|
| 156 |
+
" # create a grid of 3x3 images\n",
|
| 157 |
+
" for i in range(0, 9):\n",
|
| 158 |
+
" pyplot.subplot(330 + 1 + i)\n",
|
| 159 |
+
" pyplot.imshow(x_batch[i].reshape(28, 28), cmap=pyplot.get_cmap('gray'))# show the plot\n",
|
| 160 |
+
" pyplot.show()\n",
|
| 161 |
+
" break"
|
| 162 |
+
]
|
| 163 |
+
},
|
| 164 |
+
{
|
| 165 |
+
"cell_type": "markdown",
|
| 166 |
+
"metadata": {},
|
| 167 |
+
"source": [
|
| 168 |
+
"### Horizontal and Vertical Flips"
|
| 169 |
+
]
|
| 170 |
+
},
|
| 171 |
+
{
|
| 172 |
+
"cell_type": "code",
|
| 173 |
+
"execution_count": 4,
|
| 174 |
+
"metadata": {},
|
| 175 |
+
"outputs": [
|
| 176 |
+
{
|
| 177 |
+
"data": {
|
| 178 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAU4AAAD7CAYAAAAFI30bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de9gUdf3/8edblDxgCnkMUQjRRLzKI5mmeCmKZqGJCJ7QSkvxmF2KlpmaZXmOb1aUCioXhoKKpzAVPKQihyBFQpAUEQIBf+JZ0c/vj93PzOzN7n3v7M7O7uz9elzXfe3cM3PvfNg399zv+RzNOYeIiJRvvXoXQEQka3TjFBGJSTdOEZGYdOMUEYlJN04RkZh04xQRiamqG6eZDTCz+Wa20MxGJFUoqS/FtXkptsmwSvtxmlkH4BWgP7AEmA4Mdc69nFzxJG2Ka/NSbJOzfhU/uw+w0Dm3CMDM7gIGAiWDYGbtvbf9SufclvUuRBsU1/iyEFeIGVvFtXRcq3lU7wq8Efl+SX6flPZ6vQtQBsU1vizEFRTbuErGtZqM04rsW+cvlJmdDpxexXUkXYpr82oztopreaq5cS4BukW+3w5Y2vIk59woYBQo9c8IxbV5tRlbxbU81TyqTwd6mVkPM+sIDAEmJVMsqSPFtXkptgmpOON0zq01s7OAyUAH4Fbn3NzESiZ1obg2L8U2ORV3R6roYkr9Zzrn9qp3IZKmuCquTapkXKup48yMxx57DICDDz4YgIkTJwbHjjnmmLqUSUSyS0MuRURiatqMs1+/fsH2AQccUHDsyCOPTLk0ItJMlHGKiMTUtBnnjBkzgu2ZM2cC8I1vfKNexZE669y5MwBnnXVWsO/DDz8E4Nprr61LmSS7lHGKiMSkG6eISEztoh+n73J0zz33APDJJ58Ex77whS+kWRT196uhwYMHB9u+Wmb48OEAmOWGaa+//rq1U59++ikAn332WbDvkEMOAeDZZ58t59KKa5m22247AL797W8DsOOOO65zjm/Y3X333QF46KGHgmM+nkuWLEm6aMWUjKsyThGRmNpFxrnrrrsC8PzzzwPQsWPH4Ng222wDwNtvv51GUZSZ1MD5558PwKGHHhrsO+yww6p6T/9UsuGGG5ZzuuJaRJ8+fQA477zzgn2nnnoqAK3dd/zTQWvnjB49GoC//vWvAMydG44cfffddysr8LqUcYqIJKVdZJzeuHHjABgyZEiwb9asWUBhh/mWPv/8cwDef//9aougzKRC3bt3B+Dwww8P9p122mlAmNkUq7+s1nrrlZVbKK4RPh7PPPMMAJ06dYq+J1B9xtnynJdeeik4duONNwJhVloFZZwiIknRjVNEJKY2n23M7FbgSGCFc65Pfl8X4G9Ad+A1YLBzLtHWlXPOOSfY3nLL3HpJ/lH75ZeTW5Rvjz32AODee+8FwhmUolauXAlA//79AZg9e3Zi16+XesW1XL6bip/JqmvX3NI4fgRQW/yMWIsXLwbCxr+xY8cG5/hqmmZT79j6xrpNN920WNna/PlKztltt92C7VtuuQWAvn37AnDGGWe0+X5xlZNxjgYGtNg3AnjcOdcLeDz/vWTLaBTXZjUaxbam2sw4nXNPmVn3FrsHAv3y22OAqcBFSRRo2LBhQFjBC+FflxEjcrH2jTVxtdZ4UCzT9O6//34AXnzxxYqu24jSjmtc++67LxA2NBSzfPlyIOyS4jMNgBUrVgDwwQcfAPCd73wHgEmTylsp4rrrrotZ4sZR79j6xqDWGnd8Y47P+k866aR1zin28++99x4Aa9asAcInkI022midc08/fd01584991ygcBBMJSpthtzaObcMwDm3zMy2KnWiVs3LFMW1eZUVW8W1PDWfHSnuqnl+WGR0zsxBgwYB8bqb/O9//wu2fb3WW2+9BcD3v//94NhOO+1U8HP77bcfENaNAaxatQooHJLX3tV6NcTXX88tae0zDJ/FRON67LHHAvDPf/6z5Pt897vfBeCOO+4Aite7eX7oJcDVV19dSbEzL61VLq+88koApk2bBsCee+4ZHPMDVop54IEHgDBD9UNro0+MP/vZz4BwOLXvtgbhPeAXv/hFVeWvtFV9uZltC5B/XVFVKaRRKK7NS7FNUKUZ5yRgGHB1/vX+pArkO5n/8Ic/DPb5FtGjjjoKCFvZi/HzLU6ePDnYt3DhwoJz7r777mD71VdfLTjmM5qUJhFoNDWLa1xPPfUUEGaKvlX9mmuuCc5pLdP0TxK333470Hqm6evbbr755mCff8poIg0T26g33ngDgAEDBqyzr5ihQ4cWnONjdtVVVwXn+PrtP/3pT+v8fMsnzEq1mXGa2TjgOWBnM1tiZj8g9+H3N7MFQP/895IhimvzUmxrr5xW9aElDpVuhpaGp7g2L8W29hp26Yx33nkn2PbdCnwH+N///vfBsZbdVX7+858DMGfOnGBfy0f1N998s+R1fUXyxRdfXEmxJWF+/sVyjBw5Mtj2cw988YtfLHm+bwzyDQX33XdfBSWUUop1ZC/VuX3p0qXB9r/+9S8gnI+zGN810c/rGR2X7ge1lNORvlIacikiElPDZpzFTJkyBQj/okDY6dl3T/Dza0YbgO68804ALrvsMqD1bkW9evVKsMSSBt9oeMIJJwT7Nt9886Ln+uGzEHZ5e+GFF2pYuvbHdyFrrQP8hRdeCIQDEqId0g866CAgnPn9m9/8Zsn38V2Xoo2Gnr9+LWaAU8YpIhJTpjJOL9pR+eSTTwZg+vTpQJhVbr311sE5P/3pT4GwPvSmm24q+d5+/sXoPIyVDvGU5EUn+bjhhhuAsJN7qSwTwo7WF1xwQbBPmWZt+Lj4tb6i83F6/qnxiCOOAArrl/0M7kcffTQAP/7xj4Nj/vx6L/WtjFNEJCbdOEVEYmrapTOis9v85Cc/if3z0UcB/5iXAC2xEJOvXmlZ3QKFjYQtffjhh0DY1ch3ZYt2e0mQ4lqEn1P3+uuvj74nEDbYPPfcc0DhQns+dq3xv5++O9KPfvSj4FiXLl2KXgvC+Q38/Ltt0NIZIiJJadqMMzqTkp9N/MEHHwSgZ8+ebf68Ms7yJRVXnylA2FXoD3/4AwCbbLJJsesCYUbhFweDMNOcOnVqEkVri+JaxNe//nUAnnzyyWCfnzOg5X3Hdz2CsLEvjv333z/Y9jMv+f8fl19+eXDs2WefBeDjjz8u522VcYqIJKVpM85i/Foo0WF8pbJPZZzlqzauv/vd74Cw4zMUzs9Yip/VyGcYTz/9dHAsOm9nChTXVkRj6buAtXbfqcUyzxVSxikikpSGubWnwXfMjdanzJ8/v+i5PXr0CLYTzDjbvWgndd/a6ofNlZNlRvl5WVevXg0UdpT2MfPzevp5XiV9M2fODLb9k0B0gEpLfo7NM888s7YFq0I583F2M7MpZjbPzOaa2bn5/V3M7B9mtiD/Wt66rdIQFNfmpLimo5xH9bXABc65XYBvAMPNrDdabjTrFNfmpLimIHbjkJndD/xf/qtffsW8bYGpzrmd2/jZujYOeR06dAi2/djX8ePHA7DhhhsC4eMfwGGHHQbAjBkzqr10wzYi1DqufuYivzwrwF571e6j8PMzXnHFFQC89tprNbsW7Tiuce2zzz5A2C2oGP+75wc41HEZm5JxjVXHmV+reXdgGlputGkors1Jca2dsjNOM+sEPAlc5ZybaGb/zzm3eeT42865VutNGiXjLMZ3tPYNDNHZkfyCX8OGDav2Mg2XmaQV17vuuguAwYMHV1nikJ8la82aNQCMGTMmOOa7vfgniRprt3GNy3c18kMe/RNfi+sC8OijjwKFC7mlrLruSGa2ATABGOucm5jfreVGM05xbU6Ka+21+ahuudv/LcA859z1kUMNudxopXyn+H//+99AuHYRhPUyzSTtuN52220ADBo0KNgXzepL8d2I/JyoCxYsCI75JWHLnLChXWj039e1a9fmCpOf+f3gg3Prx3Xs2HGdc/0Q3Oix6Ezx9VROHed+wEnAi2Y2O7/vEnIBGJ9fenQxcGxtiig1org2J8U1BeUsD/wMUGq5OC03mlGKa3NSXNPRrsaqN4CGa0RIQjlx9Qtu7bfffsG+Sy+9FIDZs3OJUXTZ5kWLFgHhvKrR7mENqN3GtVr//Oc/Aejbt2/0ukA4nv3Xv/51cMzPepUSjVUXEUlKuxqrLvXjOzxHOz4XW9JV2hff2BfNOFvaaaed0ipO2ZRxiojEpIxTROrmxhtvBMKlgAG++MUvAuFQy4cffjj9grVBGaeISEzKOEWkbnyPis0226zOJYlHGaeISEy6cYqIxKQbp4hITLpxiojElHbj0Erg/fxr1mxB9eXeIYmCNCDFtTkpriWkOlYdwMxmZHFcb1bLnZasfj5ZLXdasvr51LrcelQXEYlJN04RkZjqceMcVYdrJiGr5U5LVj+frJY7LVn9fGpa7tTrOEVEsk6P6iIiMenGKSISU2o3TjMbYGbzzWyhmY1I67pxmVk3M5tiZvPMbK6ZnZvf38XM/mFmC/Kvra5J3Z5kIbaKa3yKayvXTaOO08w6AK8A/YElwHRgqHPu5ZpfPKb8mtPbOudmmdmmwEzgKOAUYLVz7ur8f6LOzrmL6ljUhpCV2Cqu8SiurUsr49wHWOicW+Sc+wS4CxiY0rVjcc4tc87Nym+/C8wDupIr75j8aWPIBUcyElvFNTbFtRVV3ThjpPJdgTci3y/J72toZtYd2B2YBmztnFsGuWABW9WvZLUV8xEtc7Ftr3GF5v6dTTOuFd8486n8H4DDgd7AUDPrXer0Ivsauh+UmXUCJgDnOefW1Ls8aYkZV8hYbNtrXKG5f2dTj6tzrqIvYF9gcuT7i4GLWzuX3Affnr/eqvTzTusrTlwj59f7c633V8PHtcLf2Xp/rvX+KhnXamZHKpbKr7PGp5mdDpwO7FbFtZrF6/UuQBnixlWyEVcoI7aKa4GSca2mjrOsVN45N8rlZik5usj50nhixdVlcOacdqzN2Cqu5anmxrkE6Bb5fjtgaamTnXONt8anFBMrrpIpim1CqrlxTgd6mVkPM+sIDAEmJVMsqSPFtXkptgmpuI7TObfWzM4i1+jTAbjVOTc3sZJJXSiuzUuxTU6qsyOZWXoXa0wzm7HuSHFVXJtUybhqkg8RkZh04xQRiSntVS5rZpNNNgFgxIjcKLJjjjkmOLbzzjsDsGrVKgAmTpwYHLvxxhsB+M9//pNKOUUk+5RxiojE1DSNQxMmTABg4MCB/lrBMf9v9Pui/+YPP/wQgJNPPhmAe++9t1ZFBDUitKpLly7B9siRIwEYMmRIwTlLl4bdDu+77z4ALrvsMgBWr16dRDEqobg2JzUOiYgkRTdOEZGYMv+ofsABBwBw++23A/DnP/8ZgN/85jfrnLvlllsCcPHFFwf7zjvvPADmzZsHwK677pp0EaP0SFfE17/+dQD+9re/Bft69uxZznUBWLBgAVD4WD979uxqihSX4tqc9KguIpKUzGecjzzyCBA26owaFW8d+s8++wwIG4zWX7+mPbSUmUSst17u7/add94JwHHHHRcca/n/8sknnwTC7BJg6NChQNgV7e677w6OnXDCCQB8/vnnlRQtLsW1iL32yn0k06dPD/a99NJLAEydOhWAl1/OLWH08MPhHECvv144m9vGG28cbPfo0QOAV199FYCPPvqomiK2RRmniEhSMt8BfosttgDgqaeeKvtnfF0nFHZbknR96UtfAmDw4MFtnuvje/nllwf7rrnmGgDmz58PwLHHHhsc++1vfwukXtcpEWvXrgXg008/Dfb5NoSWbQkff/xxsP3WW28VHOvYsWOwvdVWuaWDhg8fDsAf//jHBEtcPmWcIiIx6cYpIhJTm4/qZnYrcCSwwjnXJ7+vC/A3oDvwGjDYOfd27YpZ2kknnQSUN9b8Zz/7GQA//OEPg32+ESI6fr09aIS4+lFbvqJ/xx13DI4ddVRuGWw/OqhPnz7r/Lz/uWL8CLL2+KjeCLGF8LN//PHHg337778/EFar+O6E/fr1C87ZZpttAOjWLTdZ/YoVK4Jjzz33HAALFy6sUanLU07GORoY0GLfCOBx51wv4PH895Ito1Fcm9VoFNuaajPjdM49lV/oPWog0C+/PQaYClyUYLnKVirTPPHEE4Pt6667DggbhaJdXXzj0Pe+9z0g/AsYp7Epixohru+99x4ATzzxBAC9evUKjq1Zk1sae9CgQQBccsklAPz0pz8Nzrn22muB4g187bnRrxFiG3XrrbcG2wceeCAQxsfHNco3Bn3hC18AChuXatz9qGyVtqpv7ZxbBuCcW2ZmW5U6UcuNZori2rzKiq3iWp6ad0dyzo0CRkE6Q7i++tWvAmGWCWG3F59pRjNOn7H6OTt93VqzZ5zVSjKud911FwCnnx7+vvo6ygsuuAAI6zpLlKWsfdK2Wvy+Rgcm+OHOf/3rXwHo2ze3rPuSJUuCcz755JOC10ZUaav6cjPbFiD/uqKN8yUbFNfmpdgmqNKMcxIwDLg6/3p/YiWqkB925+fljHZy99nHo48+CoQt8RDWtbzwwgtAOOmHHwYIMGvWrFoVu9HUJa6+DssPfwU466yzgHBI7WOPPZZGUZpZQ/zO+p4tfojlRRflqlnPPvvsehSnYm1mnGY2DngO2NnMlpjZD8h9+P3NbAHQP/+9ZIji2rwU29orp1V9aIlDBydcFkmR4tq8FNvay/xYdc+PWfcNQdHxrr/+9a8BuOmmm0r+/MqVKwHYfvvtAdhll12CY+3oUb0unn32WaBwnlQ/1vyWW24B4LDDDgO0qF7W+c7wPo4//vGPgcIGpCw0zGrIpYhITE2Tcfo5/PxwrUr5xiI/NAxg7NixVb2nlCfahcw34Pmhlr6R6Pzzzw/Oaa2Lkp/3URqL72J02mmnATBlyhSgcGYrZZwiIk2oaTLOavk1h/bYY486l0QgXIfonnvuAeDoo48Gwu5mEM4g72d598M0IcxkpDE988wzAIwfPx4I6zohfMJ7/vnn0y9YmZRxiojEpBuniEhMmXxU96OEIHyEO/fccwHYe++9K3pP3x2pPc+q04jOOOMMAGbOnAnAlVdeGRzzj+h+ZJhf+Avggw8+SKuIUoWbb74ZgOOPPz7Yd/XVub75AwbkZsZrlBmRopRxiojElMmM08+ABDB69GigsDtDJXyHd82q01j8QAbfIX7u3LnBMb8ktOdn2gF45ZVXgDBDve2224BwATFpDP5JItoFyc/Z2b17d6AxBz0o4xQRiSmTGecdd9wRbK9atQpYN/uIyw/Z9HWcvruENAZfn/nOO++UPGfOnDnBdu/evYFw+Vg/w390vak333wz8XJKPL7+8sILLwz2Pf300wD84he/AArrPxuFMk4RkZgymXH62doBRo0aVdV7+fpS/+rr1PxfPWl8ixYtAsL1ogCGDBkCwJ///GcADj30UKCwU/UxxxwDhHOxSv1MmzYt2PYTgfh2C9/yDo3zJFjOfJzdzGyKmc0zs7lmdm5+fxcz+4eZLci/dq59cSUpimtzUlzTUc6j+lrgAufcLsA3gOFm1hstN5p1imtzUlxTUM5ExssAvzreu2Y2D+hKHZcb9WOUoXCJjEpcddVVAGy88cYA/OUvfwFg8eLFVb1vo2vEuMbl/x/07NkTKBz84Ofx/Pvf/17w6huNIFwepX///kA4w1aWNUNcL730UiCcocwvAw3wzW9+EwgbC+slVh1nfq3m3YFpaLnRpqG4NifFtXbKvnGaWSdgAnCec25NuUMTa7HcaHRonV/O1y8tO3HiRCAcQhnls9Pbb7892OcbDfx7+tni24tGimtcLYdcDh8+PDjmG4F8lyM/fO+JJ54IzvGZ6imnnALA5ZdfXtsCpyjLcZ0xYwYADzzwABA29EG4EoCfn7VeyuqOZGYbkAvCWOfcxPxuLTeacYprc1Jca8/aGmJouT9VY4DVzrnzIvuvAVY55642sxFAF+fchaXeJ/8zifwFiw659H95dthhByDMPqJ/YVvui/6bWy4ZXCxTTdBM59xetbxAuRoxruXYYIMNgm0/h+MNN9ywznmTJ08GwiGXvsvSpEmTgnP22muvgnOqyDgV1xro1KkTAMuXLw/2+fWpfL10jZWMazmP6vsBJwEvmtns/L5LyC0vOj6/9OhioLrB4pI2xbU5Ka4pKKdV/RmgVAWJlhvNKMW1OSmu6cjkyKHobCm+C4p/3PKzHH3rW98KzvGP5n40UHRce2tLBkvj+fTTT4PtkSNHArDjjjsCcMIJJwTHfCOCfy1WTeNHiTVDN6Rm9N577wGFi/j5xfr8Ejf1WrpbY9VFRGJqs3Eo0YvVubK5ATRMI0KSGiWuBx10ULDtu5X5JxKfcUbnfRwxIjd4JjpOukKKaw1tt912wfb06dOBcBG/s88+u5aXLhlXZZwiIjEp40yXMpPmpLimZMyYMUA4R2ePHj0AWLJkSS0up4xTRCQpunGKSGaMGzeOcePG8dFHH/HRRx9x4IEHBmsUpUk3ThGRmHTjFBGJKZMd4EWkffLzqm666aZ1LYcyThGRmNLOOFcC7+dfs2YLqi/3DkkUpAEprs1JcS0h1X6cAGY2I4t93rJa7rRk9fPJarnTktXPp9bl1qO6iEhMunGKiMRUjxvnqDpcMwlZLXdasvr5ZLXcacnq51PTcqdexykiknV6VBcRiUk3ThGRmFK7cZrZADObb2YL86vsNSQz62ZmU8xsnpnNNbNz8/u7mNk/zGxB/rVzvcvaKLIQW8U1PsW1leumUcdpZh2AV4D+wBJgOjDUOfdyzS8eU37N6W2dc7PMbFNgJnAUcAq5JVf98qqdnXMX1bGoDSErsVVc41FcW5dWxrkPsNA5t8g59wlwFzAwpWvH4pxb5pybld9+F5gHdCVX3jH508aQC45kJLaKa2yKayuqunHGSOW7Am9Evl+S39fQzKw7sDswDdjaObcMcsECtqpfyWor5iNa5mLbXuMKzf07m2ZcK75x5lP5PwCHA72BoWbWu9TpRfY1dD8oM+sETADOc86tqXd50hIzrpCx2LbXuEJz/86mHlfnXEVfwL7A5Mj3FwMXt3YuuQ++PX+9VennndZXnLhGzq/351rvr4aPa4W/s/X+XOv9VTKu1cyOVCyV79vyJDM7HTgd2K2KazWL1+tdgDLEjatkI65QRmwV1wIl41pNHWdZqbxzbpTLzVJydBXXkvTEiqvL4Mw57VibsVVcy1PNjXMJ0C3y/XbA0lInO+ceruJakp5YcZVMUWwTUs2NczrQy8x6mFlHYAgwKZliSR0prs1LsU1IxXWczrm1ZnYWuUafDsCtzrm5iZWsTFttletlsHRp4R/O888/P9geOXJkqmXKskaJqyRPsU1OVUtn5B+/9QjeZBTX5qXYJqNpV7ncay/VbYtIbWh2JBGRmJo245Rs++UvfwnAgQceCEC/fv0AmDp1anDOk08+WfRnRGpNGaeISEypLp1hZolfzGcijz32WMH+AQMGBNstj9XRzGbsWJxUXKdMmRJs+7hW6qCDDgIKM9QaUlybU8m4KuMUEYlJN04RkZgy3zh0+OGHA2BWOAz3rbfeqkdxpAK+Uafax/OoYo1J0ni+/OUvAzB8+PBgX7duuVGhG220EQCHHHJIcMz/nt92220F7/PBBx8E23/5y18AeP/994Ha3AuUcYqIxJT5jHPjjTcG8HMIMmlSbujtnDlz6lYmqT/fjUkaS8eOHQH41a9+BcAPfvADADbffPOyft5nnOecc07Jc0aMyE1s/+abbwLwxz/+MTj2m9/8JmaJi1PGKSISUyYzzi233DLYPuOMMwD48MMPAfjd735XlzJJ+nz9ZbG60STrS6U6O++8c7A9fvx4APr06VPy/GeeeQaAV155BYDZs2cHx1oOejjxxBMB+NKXvhTs87H/yle+AsAVV1wRHFt//dwt78orr4z3j2hBGaeISEy6cYqIxNTmo7qZ3QocCaxwzvXJ7+sC/A3oDrwGDHbOvV27YhYaNmzYOvsWLlwIwPPPP7/Ose7duwPQq1evdY6tWrUKgFmzZiVYwsbXiHFtjX8s9yOCotIc/ZYFjRbbCy+8MNgu9Yi+du3aYNt3T4uOJCvFNwRFbbjhhgAcfXRutZ477rgjOOa7PY0dOxaARYsWtXmNYsrJOEcDA1rsGwE87pzrBTye/16yZTSKa7MajWJbU21mnM65p/ILvUcNBPrlt8cAU4GLEixXq0466aSSx7bddlsAzjzzzGDfKaecUnAs6r333gPCvzy+m4OvoG5WjRhXSUajxNZnlz7zi/Kd071NNtkk2B41ahQA3/72t4GwkahcH330EQDjxo0D4LDDDguO+XvHDjvsAFSecVbaqr61c24ZgHNumZltVepELTeaKYpr8yortopreWreHck5NwoYBcnNtuLXGcq/JxAOuXrxxRcB6Ny5c3DOeuvlaiR8Pdlrr70WHDv55JMB+NrXvgbAI488AhTWpc2YMSOJYjeVWsS1Nb6LSbTeq1h9p1QnybieffbZAGy22WbBvo8//hgI2yn872a0HtJ3I3r22WeBsAsTwE033QTA/Pnz27x+//79Aejbt28bZ8ZXaav6cjPbFiD/uiK5IkkdKa7NS7FNUKUZ5yRgGHB1/vX+xErUij322AOATp06Bft8i2rLvyrROsrLLrsMKD7hw6mnngrADTfcAIR1nE888URwjs9sZs6cWVX5M6AucW052zuU7sAe3a/W9FhSj+0222yzzr4NNtgACFvRH3jgAaBwko7LL78cgD333BOAH/3oR8Ex3+Hd15HeeeedQDgABuC0004DoEuXLkDY6R3C1XAXL15c2T8qr82M08zGAc8BO5vZEjP7AbkPv7+ZLQD657+XDFFcm5diW3vltKoPLXHo4ITLIilSXJuXYlt7mRqr3rt3byCcEQnWfVzz3Yt8ug7ldWfwy2v4bkzRaxxwwAFAu3hUr6toY0+1j+H+cU/qx1d3HXnkkcG+Tz75BIDly5cXnOsbZQGmTZsGwPe+9z2gcMDLfvvtB4Tdl37yk5+UvP4777wDwL333hvs83NZvPrqq3H+KevQkEsRkZgytTMcdrcAAAatSURBVFjbxIkTARg4cGCwz5ffdzE67rjjgMqzw+eeew6AvffeO9jnuzjtvvvuFb1nhBb1isl3P4o725HPOFNaMlhxLcI3zkyfPj3Y54c/33PPPUD4+1qugw8uv7bBZ7UvvfRSrGtEaLE2EZGkZKqO0/+1KuaFF14Aqq+HnDBhAlCYcUr9+HrPuJmn74LmpZR5SsTq1asBuPnmm4N91157LQD77LMPEHZZ+t///lfWez7++ONJFrFiyjhFRGLSjVNEJKZMPar7MePRRpoVK3Ijx/yiT9XyS3FElxtuufSwpM839sRtJNIje/35RdMAPvvsMwC23357AHbbbTeg/Ef1RqGMU0Qkpkx1R/Id4KMVxH7hNj/2dcCA3Pytxcalt+b003MzafmlRP1fRoATTjgBgLvvvruCUhdQt5UKJfX/NNoxPsHsU3Et07x58wDYaaedgDAbPeKII4Jzqug+lDR1RxIRSUqm6jhffvllAA455JBgn88+3347t3xKOd2RNtpoo2D7ootyk2D7tUi86PyACWSaUqHW6jT9U4VfMrZlfWYx0RmYJH1+qV4/q1HXrl2BwuV6i80Y32iUcYqIxNRmHaeZdQNuB7YBPgdGOeduqmTVvFrUmfhM0c8M7Ver9PNrAgwaNAgIJ+6Izue57777tiwjAD179gz2RWeMr1LD1IU1ely9YjO/t+Q7yUfrtcvpMJ/gsEzFtfzyAeHv7XXXXQcU1mH7J8oGWPerqjrOtcAFzrldgG8Aw82sN1o1L+sU1+akuKagzRunc26Zc25WfvtdYB7QldyqeWPyp40BjqpVISV5imtzUlzTEas7Un7J0aeAPsBi59zmkWNvO+c6l/hRf07iqb9fCMp3ju/Ro0ex6wKtd2n573//C4TLhz7//POJljOvYR7pohoxrl45j+pesS5o5XSYT2CAg+JaIT9X5ne/+91gn1+y1y+dsWbNmloXo5SScS27Vd3MOgETgPOcc2vK/c+m5UYbm+LanBTX2ior4zSzDYAHgcnOuevz++YD/fJrNG8LTHXO7dzG+9TsL9iOO+4IhB3Zo/xMR34m94cffjg4NnnyZCDsfuRnja6RhspMshBXzzfglNPlqFw+Q01gmWHFtUJbbLEFULjc7+ab5xLjhx56CIDjjz8+OOZXeEhJ5Y1DlvtTdQswzwchz6+aBymuiCjJUFybk+KajnK6I+0PPA28SK57A8AlwDRgPLA9sBg41jm3uo33au/ruTZMZpLVuEbrOuNO+NFSgpO3KK5VinYJu/TSSwuO1ahrYDkqr+N0zj0DlPofplXzMkpxbU6Kazo0ckhEJKZMzY7UBBrmkS5J9YprnK5KfpRQtMtS3Bm0WqG4Vim6HPcbb7wBhI1El1xySXDst7/9bVpFAs2OJCKSHGWc6VJm0pwU1wQdfvjhADz44IPrHOvQoUOaRVHGKSKSlEzNxykizW/OnDlAODv8ypUr61mcopRxiojEpIxTRBrK0qVLgXAlzEakjFNEJCbdOEVEYtKNU0QkJt04RURiSrtxaCXwfv41a7ag+nLvkERBGpDi2pwU1xJSHTkEYGYzsjjKIqvlTktWP5+sljstWf18al1uPaqLiMSkG6eISEz1uHGOqsM1k5DVcqclq59PVsudlqx+PjUtd+p1nCIiWadHdRGRmFK7cZrZADObb2YLzWxEWteNy8y6mdkUM5tnZnPN7Nz8/i5m9g8zW5B/7VzvsjaKLMRWcY1PcW3lumk8qptZB+AVoD+wBJgODHXOvVzzi8eUX3N6W+fcLDPbFJgJHAWcAqx2zl2d/0/U2Tl3UR2L2hCyElvFNR7FtXVpZZz7AAudc4ucc58AdwEDU7p2LM65Zc65Wfntd4F5QFdy5R2TP20MueBIRmKruMamuLYirRtnV+CNyPdL8vsampl1B3Yntyb11s65ZZALFrBV/UrWUDIXW8W1LIprK9K6cRZb57mhm/PNrBMwATjPObem3uVpYJmKreJaNsW1FWndOJcA3SLfbwcsTenasZnZBuSCMNY5NzG/e3m+PsXXq6yoV/kaTGZiq7jGori2Iq0b53Sgl5n1MLOOwBBgUkrXjsXMDLgFmOecuz5yaBIwLL89DLg/7bI1qEzEVnGNTXFt7bppdYA3syOAG4EOwK3OuatSuXBMZrY/8DTwIvB5fvcl5OpNxgPbA4uBY51zq+tSyAaThdgqrvEprq1cVyOHRETi0cghEZGYdOMUEYlJN04RkZh04xQRiUk3ThGRmHTjFBGJSTdOEZGYdOMUEYnp/wM6qn2w8AQFsAAAAABJRU5ErkJggg==\n",
|
| 179 |
+
"text/plain": [
|
| 180 |
+
"<Figure size 432x288 with 9 Axes>"
|
| 181 |
+
]
|
| 182 |
+
},
|
| 183 |
+
"metadata": {
|
| 184 |
+
"needs_background": "light"
|
| 185 |
+
},
|
| 186 |
+
"output_type": "display_data"
|
| 187 |
+
}
|
| 188 |
+
],
|
| 189 |
+
"source": [
|
| 190 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 191 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 192 |
+
"from matplotlib import pyplot\n",
|
| 193 |
+
"from tensorflow.keras import backend as K\n",
|
| 194 |
+
"\n",
|
| 195 |
+
"# Load data\n",
|
| 196 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 197 |
+
"\n",
|
| 198 |
+
"# Reshape our data to be in the forma [samples, width, height, color_depth]\n",
|
| 199 |
+
"x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)\n",
|
| 200 |
+
"x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)\n",
|
| 201 |
+
"\n",
|
| 202 |
+
"# Change datatype to float32\n",
|
| 203 |
+
"x_train = x_train.astype('float32')\n",
|
| 204 |
+
"x_test = x_test.astype('float32')\n",
|
| 205 |
+
"\n",
|
| 206 |
+
"# define data preparation\n",
|
| 207 |
+
"train_datagen = ImageDataGenerator(vertical_flip=True,\n",
|
| 208 |
+
" horizontal_flip=True)\n",
|
| 209 |
+
"\n",
|
| 210 |
+
"# fit parameters from data\n",
|
| 211 |
+
"train_datagen.fit(x_train)\n",
|
| 212 |
+
"\n",
|
| 213 |
+
"# configure batch size and retrieve one batch of images\n",
|
| 214 |
+
"for x_batch, y_batch in train_datagen.flow(x_train, y_train, batch_size=9):\n",
|
| 215 |
+
" # create a grid of 3x3 images\n",
|
| 216 |
+
" for i in range(0, 9):\n",
|
| 217 |
+
" pyplot.subplot(330 + 1 + i)\n",
|
| 218 |
+
" pyplot.imshow(x_batch[i].reshape(28, 28), cmap=pyplot.get_cmap('gray'))# show the plot\n",
|
| 219 |
+
" pyplot.show()\n",
|
| 220 |
+
" break"
|
| 221 |
+
]
|
| 222 |
+
},
|
| 223 |
+
{
|
| 224 |
+
"cell_type": "markdown",
|
| 225 |
+
"metadata": {},
|
| 226 |
+
"source": [
|
| 227 |
+
"### Random Shifts"
|
| 228 |
+
]
|
| 229 |
+
},
|
| 230 |
+
{
|
| 231 |
+
"cell_type": "code",
|
| 232 |
+
"execution_count": 5,
|
| 233 |
+
"metadata": {},
|
| 234 |
+
"outputs": [
|
| 235 |
+
{
|
| 236 |
+
"data": {
|
| 237 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAU4AAAD7CAYAAAAFI30bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO2debBU1Z3HPz8JrqCCCyKgoKKCGGXcQM24wqBi1CRampjRiSnjjElcMomOVVMzVcYySc1YMZVMOagEknFwGbREETcUl6goIsomiMaF+GQRUERFCWf+eP275za8pe973ff17ff9/NO37z3d97z+vj79O+f8FgshIIQQonK26eoOCCFE0dDAKYQQGdHAKYQQGdHAKYQQGdHAKYQQGdHAKYQQGenUwGlm48xsiZktM7Nrq9Up0bVI18ZF2lYH66gfp5n1AJYCY4DlwEvABSGERdXrnsgb6dq4SNvq8ZVOvPZoYFkI4S0AM7sTOAtoVQQz6+7e9qtDCHt0dSfaoa517dmzZ3J88MEHA/CVrzT/G2+zTZxAbd68GYD58+cDsGnTplp2qwi6QkZt9X1tXdfODJwDgPdSz5cDx3Ti/boD73R1ByqgrnXdfffdk+OHHnoIgF122QWA3r17J9c++eQTAIYMGQLA6tWra9mtIugKda5tHdKqrp0ZOK2Fc1v9QpnZpcClnbiPyJe61vWvf/1rcrzrrrsC0KtXr63avfde8/iwcePGfDpWDNrVVt/XyujMwLkcGJR6PhB4f8tGIYQJwASQ6V8QpGvj0q620rUyOrOr/hIw1MyGmNm2wPnAtOp0S3Qh0rVxkbZVosMWZwhhk5n9EHgE6AFMDCEsrFrPupgePXokx0OHDgXgtNNOA+I0EGDGjBkAbNiwIcfe1Y561dWseZa59957J+dch2HDhgGwbt26rdqLSL1qW0Q6M1UnhPAQ8FCV+iLqBOnauEjb6tCpgbOR6devX3L8/e9/H4ALL7wQgCeeeCK5NmfOHKBxLM56xV2OfvrTnybntt9++7I2K1asSI5dv2233TaH3onuhkIuhRAiI7I4t8DXxgYOHJic87VNtyofffTR5Nr69etz7F33xZ3bv/Wtb211zjnooIOS4ylTpgDRn1OIaiKLUwghMiKLcwt8N3233XZLzrn1uXBh8wakR6wAfPjhhzn2TqxcuTI53nHHHQHo27cvAOm8C3/4wx8AOcCL2iCLUwghMqKBUwghMqKp+hbsvPPOAHz1q1/d6pzHSX/55Zf5d0wA5dPxtlIivv322zn0RnRXZHEKIURGZHGW8E2hAw44AIDx48cn19zC9JA+WZz5c+yxxwIwaNCgdloKUXtkcQohREa6vcXpTtTucvSNb3wDgMMPPzxp09TUBMD06dMB+Oyzz/LsYrfGAxImTJjQxT0RWfG9Ac/UDzBgwAAAtttuOyCGyS5dujRp8/77zZnuOlrWJw9kcQohREY0cAohREbanaqb2URgPLAyhDCidK4vcBcwGHgbOC+EsLZ23awunmkHYPjw4QBcdtllAJx++ulAeT7OF198EYD77rsPKC/fUFSKputee+3V1V0oDF2trZc08RwPP/jBD5JrI0aMAOJ30HOq+jIYwJ133gnAvHnzatG9qlCJxTkJGLfFuWuBmSGEocDM0nNRLCYhXRuVSUjbmtKuxRlCeNrMBm9x+izgxNLxZGAWcE0V+1VT0pUSzz//fAAuuugiIGZAcusS4LbbbgPK8z0WnaLpunjxYgCOOuqoLu5J/dMV2qbznh5zTHPhzKuuugoozzTmmcU854C3/ad/+qekjeeJ8Nyr6cz+9UJHd9X7hRCaAEIITWa2Z2sNVTWvUEjXxqUibaVrZdTcHamequbtsMMOAIwaNSo55+5HX3zxBQAPPPAAAL/5zW+SNq+++mpeXSwMeenqLik33ngjAPfee2+tbiXIrqu7i+23337JOa+U4K5H//Vf/5Vcu/XWWwH46KOPADjjjDMA+OUvf5m0OfLIIwE47LDDAHjqqac68qfUlI7uqq8ws/4ApceV7bQXxUC6Ni7Stop01OKcBlwE/KL0eH/VelRDfG1z3Li4bu4hfM8++ywQfx3bsjJ32WWXrV7vu4P+S1pQ6lZX33VN77SmgxREu9REW99BP/PMM5NzZ599NhArwLqVCVvvE7z88ssAzJw5Mzk3ZswYoHxttN5o1+I0synA88BBZrbczC6h+cMfY2ZvAGNKz0WBkK6Ni7StPZXsql/QyqVTqtwXkSPStXGRtrWnW8Sqezy6L1aPHj06ueZT68cffxyAuXPntvo+PtU/99xzk3Mnn3wyEKcat9xyS7W6LVJ4Rqp0eeAtN4o8/hlg6NChQIyB3rx5c6272C3x79Txxx+fnFuzZg0Qp+ptufGtWrUKiGVpIE7V6xmFXAohREa6hcXpxbxOOukkAAYPHpxcW7RoEQBPP/00EN1ffNEbYOTIkQB87WtfA6K7BUQ3DC8cJouzNrguL7zwQnLOQ/N8s27s2LHJtZ/85CdAdDN7/fXXAXjnnXdq39luhDu+9+rVKzn35ptvAuVltNt7/U477VSD3tUOWZxCCJGRhrU43TEXoluDWySff/55cs1/FX2NpXfv3kD5Osvll18OxF/VtNXiazxuofo66OrVq6v1p4gUn3zySXJ89dVXA3DggQcC5e5JQ4YMAWIo7cMPPwzEssGiOniyju233z455+vJlViRe+yxBxCTf6Sp53VpWZxCCJERDZxCCJGRhp2q+xQAYl5A38iZM2dOcu2ee+4BYOPGjQAcffTRAPzoRz9K2vi0zyMgPEoI4saRT+P9Hpqq1x6ftnuU13/+538m19w1acqUKYDyedaKDz/8EIjZqwBOPfVUIGYemzZtWnLtgw8+AGD9+vVA3KhNuwh6aZp6zIrkyOIUQoiMNJzF6YvVhxxySHLukksuAaIjfNpNwt1UfFPHywKn8z56dpa7774biBYsQM+ePYHo9Ov5PEV+eEb+tgq6aQZQG7yQoWcVAxg2bBgA3//+94HybGQLFiwAYkG2gw46CIizOoiO8962HpHFKYQQGWk4i7N///5AeVikr6P86U9/AmJ4JUQ3CneO9/ycnqEaYjhlv379tnpvt2TuuOMOIDr/CtEd+PTTTwF47LHHknMexnzOOecA0V0M4lqmu++5dZp2M5s/fz4Ay5cvr1W3O40sTiGEyEjDWZweXum742l85zu95nLEEUcAcQfwgAMOAGIoJsSwMK+hcuihhybXnnzySQCmTp0KlDvXC9FdcMsTYNasWQA899xzQHmNL/9+nnDCCQDccMMNQFwrhRhW62G29Ugl+TgHmdmTZrbYzBaa2RWl833N7DEze6P02Kf23RXVQro2JtI1HyqZqm8CfhJCGAaMAi43s+Go3GjRka6NiXTNgUoSGTcBXh1vvZktBgZQp6Vk3Wk2nVfTXR58Gn7NNbGb7mybzpgEsPfeeyfH3/72t4Ho3J529nUXpbVr11al/3lRNF1FZdSTrp6Zyl2PIG4CuWtgjx49gPLv1DPPPFPLblWFTGucpVrNI4HZqNxowyBdGxPpWjsqHjjNrBcwFbgyhPBxOvtQW+RdHtizTf/3f/93cm7ZsmVAzJ6TznzkmcK3xBexIf46PvjggwDcddddybVKcg7WM0XRVWSjXnV1R3cPIvFQ59mzZydtihCsUJE7kpn1pFmEO0IIXq9A5UYLjnRtTKRr7WnX4rTmn6rbgcUhhJtSl+qylKy7A3nZUYgOte6I6+4SALvttlvZ61tygXCH3ueffx6A1157LblWzzkD26JouorKqEdd07k6Pe+muwH6d9Nnc0Whkqn6ccB3gflm5kWtr6NZgLtLpUffBc5t5fWiPpGujYl0zYFKdtWfBVpbIFG50YIiXRsT6ZoPDRc51BLuFuHZVuo564oQjUbatc/Labsbkm+uepayoqBYdSGEyEi3sDiFEPnjm0JeJQFijLoXR/QKDJ5TtSjI4hRCiIzI4hRC1AQPUT7vvPOSc77e6Vne01mRioQsTiGEyIgsTiFEVfG1zSOPPBIor9G1dOlSIOavLWqNLlmcQgiREQ2cQgiREU3VhRBVpXfv3kDMf+ulZyDme5g3b97WLywQsjiFECIjlmdBJDNbBWwA6j/h3tbsTuf7vW8IYY9qdKaekK7StQ6pqa65DpwAZjYnhHBkrjetAkXtd14U9fMpar/zoqifT637ram6EEJkRAOnEEJkpCsGzgldcM9qUNR+50VRP5+i9jsvivr51LTfua9xCiFE0dFUXQghMqKBUwghMpLbwGlm48xsiZktM7Nr87pvVsxskJk9aWaLzWyhmV1ROt/XzB4zszdKj326uq/1QhG0la7Zka5t3DePNU4z6wEsBcYAy4GXgAtCCItqfvOMlGpO9w8hzDWz3sDLwNnAxcCaEMIvSv9EfUII13RhV+uComgrXbMhXdsmL4vzaGBZCOGtEMIXwJ3AWTndOxMhhKYQwtzS8XpgMTCA5v5OLjWbTLM4oiDaStfMSNc26NTAmcGUHwC8l3q+vHSurjGzwcBIYDbQL4TQBM1iAXt2Xc9qS8YpWuG07a66QmN/Z/PUtcMDZ8mU/x1wGjAcuMDMhrfWvIVzde0HZWa9gKnAlSGEj7u6P3mRUVcomLbdVVdo7O9s3rp2eI3TzEYD/x5C+LvS838BCCHc2FpbYGyHe5oRr9sMsPPOOwOwxx7N8fq9evVKrnl1vc8//7zs+XbbbZe0+cpXmrPvrV27FoAVK1Yk1zZu3JilW6vrPRlEFl1T7Z/Lr4d1Sd3rCh36zkrXVnTtTD7Olkz5Y7ZsZGaXApcCh3biXpnZZZddkuOxY5vH6x/84AcAjB49Orn20UcfATGl/7p164CYSxBgzz2brfy77roLgJtuuim5tmzZsizdeidL4y4iq66iGLpCBdpK1zJa1bUzA2dFpnwIYQIwwcxOB6Z34n6ZOP7445Pjq666CoCvfvWrALz++uvJtWnTpgHw1FNPAbB69eqtXn/ZZZcBcMwxzf9j+++/f3It48BZBDLpCmBmdTuFE2W0q610rYzObA4tBwalng8E3m+tcQjhoU7cS+RHJl1FoZC2VaIzA+dLwFAzG2Jm2wLnA9Oq0y3RhUjXxkXaVokOT9VDCJvM7IfAI0APYGIIYWHW99lhhx2AuCnzxRdfdLRLZQwfHjcLvQbKjBkzgLhWCTBz5kwgTtGdTz75JDk++eSTATjwwAOBuFnUiFRLV1F/SNvq0akRoDT91hS8wZCujYu0rQ5dbjpts03zakG1Qz9feOGF5HjNmjUAvPjiiwAsWhSjxlqzcHv27Jkcu4X55ZdfAtE6FkJ0T5QdSQghMtLlFueGDRtq8r6zZs1q8bhSBgyI0WW77747EK1TtzyFEPWHzxZ9Ntu3b9/kmvt3+yxyp512AqKvNsRAl2effbbVe8jiFEKIjHS5xVlv+K/Ufvvtl5xzi3PevHkArFq1Kv+OCdFNSVuDe++9N1AeUr0lgwcPBqI3zUEHHZRc84hAt0r79+8PwGGHHZa0efPNNwE45JBDWr2HLE4hhMiIBk4hhMiIpupbMGzYMACOO+645JxvCrmzvJvyQoja881vfjM5PvPMM4G4qdOSG+OIESMA6NOnuVqGL79Vio8BbSGLUwghMiKLs4QvOl944YUAjBkzJrn2xBNPANE9oVYuVEKIrTn44IOT42OPPRYoTxvZGh42nQ5Y8U2lHXfcEWjZGn3ttdfafW9ZnEIIkZFub3H6L8/pp58OwDnnnAPAxx/H7PsPPvgg0JC5N4WoeyZOnJgcu4tRv3792n3d8uXLgXKL89RTTwVgyJAhQLQ4PaE5wC9/+ct231sWpxBCZEQDpxBCZKTdqbqZTQTGAytDCCNK5/oCdwGDgbeB80IIa2vXzdZxU9tNd49LbakgW0uZkDw64Dvf+Q4QI4ZeeeWVpM2+++4LxByfS5YsSa599tlnVfgr8qfedRUdp9G0ffXVV5PjH//4x0DbkUMeFeQuS4cffnhybfz48WWv9yW5e+65J2nz6KOPttunSizOScC4Lc5dC8wMIQwFZpaei2IxCenaqExC2taUdi3OEMLTpULvac4CTiwdTwZmAddUsV8tYtZcaypdLM3dE/7mb/4GgL322gsod1fYtGkTEEsAp/Gs7h7P6m3SlqS/p8fMvvHGG539U7qcetLVLYR0Zn3X2rXzjFTVztvaiNSTttXGXYx8RukzTd80gjg+uCO8b/hC/C77/9fixYsBuOGGG5I2nr+3LTq6q94vhNAEEEJoMrM9W2uocqOFQro2LhVpK10ro+buSNUoN+rrmEcddRQAF198cXLtxBNPBOL6ZVNTU9lrAHbbbTcgrl/6c4APPvgAiOGUngEpnUHeM8b7e3/66acd+TMaio7q6r/0EGcF1113HQDf+973kmuu0YQJE4DoIvLWW2+1+p5pi9X137x5c4uPpb+h0m53G+qxPPB2222XHHs45OjRowE44ogjgJgRCeKaZiUhl/7/lA5qqeT/oqO76ivMrD9A6XFlB99H1BfStXGRtlWkoxbnNOAi4Belx/ur1qMWcEvx0kubZxBnnHFGcu2ZZ54B4N577wXi+mN6V93b+3qIV9YEeOCBBwC45ZZbAHj99deB8jXObmSZ1FxX91AA+NWvfgXAWWedBcC22267VXtfp/K8iWmL0y1NX5/+9re/nVzz/IoePrdwYXMxx+eeey5p8+6773bmTykauX5nq4k7qwNcf/31QAyJTlujleBr5QsWLADi+JHVO6Zdi9PMpgDPAweZ2XIzu4TmD3+Mmb0BjCk9FwVCujYu0rb2VLKrfkErl06pcl9EjkjXxkXa1p5CxKqfckqz3ieffDJQ7g502223ATGDkZe58E0jgJEjRwLR7WX69OnJtUmTJgFxUyi9eSCqhy/Qp13JBg0aBLQ8RXcd3nvvPQDWrVu3VZtDDz0UiNO3ceOi66K/59e//nUgxi37kgzA5MmTAfjLX/4CdKslmUKx6667Jse+oehO8b4p7JtEUL4UB+XFFR955BEAfvOb3wDw/PPPA9HNqVIUcimEEBkphMXpTtAeOukuRBA3gU477TQAjjnmmLLnEDMgeVjVH//4x+Sa/3LJ0qwt7j5y7bUxYMWtBLf00hp4WKtr5pmpPG8qwE9/+lMAxo4dC5S7nWzcuBGIoXVe7vlf//VfkzZuydx8881AtEpFffHyyy8nx76huHr1aiC6oE2ZMiVpM3DgwLLX//nPf06O/X/G/786OsuQxSmEEBkphMU5a9YsIObS8/BKiCFUblX62tbSpUuTNo8//jgAM2bMAODtt9+uaX/F1nitGA9igGgt+EwinVzh3/7t3wCYM2dO2fucdNJJybFbrO687K4lAA8//DAAo0aNKru/J4GB6KbmwQ++1gla76wn0muUnhvX1zHPO+88ALbffvutXudW6R133JGc8yCWzuori1MIITKigVMIITJSiKm6T6F8qu2bRRBN9hUrVgBbRwRAnLa3lI9TdB2+GeRTZZ+ew9ZTdMc3eSBu/Pz85z8H4Pe//31yzUshTJ06FYibRenoogMOOACIrmvpIl3vv/9+1j9H5Igv11199dVAucuSa+3RhL/73e+Sa+kSGZ1BFqcQQmSkEBan58j0uHIv0wvRBcVz6LmjtNyL6p8PP/wQiE7prVmZadIbBe687PHnLVkTvkHgmwJujUDM4ejx82mrRRZnfeJZ3c8//3wg5rFIZ8Z6+umnAbj11luB6lmZaWRxCiFERgphcTpr164texTdDw+Vg5Yd57fE10FbqlHjr/c1c81S6h93IfNHd0NMZ23/j//4DyCuWaf3RKqFLE4hhMhIoSxOIdxZvlI8yYdbKGkHeK9w6IkfPKxT1C+eu9Xzs/oeR7piw5YJQGpBJfk4B5nZk2a22MwWmtkVpfN9zewxM3uj9NinZr0UVUe6NibSNR8qmapvAn4SQhgGjAIuN7PhqNxo0ZGujYl0zYFKEhk3AV4db72ZLQYG0CDlRrsrjahruhCcl0nxqZ0X+Uq38Sn6K6+8AtRmEyFvGlHXNB4Ms+WGXrokStbcmh0h0xpnqVbzSGA2KjfaMEjXxkS61o6KB04z6wVMBa4MIXyc/uVui3osNyoieenqmzotufy4q1A6n2Zr2WvSbbZ8XbpErIfi+eaQZ8/xYAqIeRpbyi5fdBr1+/q3f/u3QAy1du3SFmfWwmsdoSJ3JDPrSbMId4QQ7i2dVrnRgiNdGxPpWnvatTit+afqdmBxCOGm1KXClhsV+es6e/ZsoDwzv687epKNdD5MD5XckhNOOCE59qzyXocmnetzn332AaKl6zk7PaM8xHpT6fsWne72fV25snn8T7uSpcNya0UlU/XjgO8C881sXuncdTQLcHep9Oi7wLm16aKoEdK1MZGuOVDJrvqzQGsLJCo3WlCka2MiXfNBkUOipvimhE+p1q9fn1zzDZsf/vCHZY+V4htILW08uUuKZ9TyshxeRhpi6WFRHFwzLw29ePFioDzrVR4oVl0IITIii1PUFM91eeWVVwJw9NFHZ3q9W5MtZTByt5OHHnoIiCVfIRb18gw5ynzUGEycOBGIOTY963/eGdNkcQohREZkcYqa4E7pY8aMAWIp33QZ1y2twLSTtq9fejbvu+66C4iZbwDeeecdAFatWgU0RsikaBuvO+aPXYUsTiGEyIgsTlETPDTu4osvBmD//fffqo07LfuOaDpk8oMPPgBixvfp06cD+Tg3C9EesjiFECIjGjiFECIjmqqLmvDpp5+WPbpbUbqM66JFi4DoFN+nT0xKPnToUCC6L3kp4BUrVtSy20JUhCxOIYTIiLWW97AmNzNbBWwAWk59U9/sTuf7vW8IYY9qdKaekK7StQ6pqa65DpwAZjYnhHBkrjetAkXtd14U9fMpar/zoqifT637ram6EEJkRAOnEEJkpCsGzgldcM9qUNR+50VRP5+i9jsvivr51LTfua9xCiFE0dFUXQghMqKBUwghMpLbwGlm48xsiZktM7Nr87pvVsxskJk9aWaLzWyhmV1ROt/XzB4zszdKj33ae6/uQhG0la7Zka5t3DePNU4z6wEsBcYAy4GXgAtCCItqfvOMlGpO9w8hzDWz3sDLwNnAxcCaEMIvSv9EfUII13RhV+uComgrXbMhXdsmL4vzaGBZCOGtEMIXwJ3AWTndOxMhhKYQwtzS8XpgMTCA5v5OLjWbTLM4oiDaStfMSNc26NTAmcGUHwCkSwouL52ra8xsMDASmA30CyE0QbNYwJ5d17PaknGKVjhtu6uu0Njf2Tx17fDAWTLlfwecBgwHLjCz4a01b+FcXftBmVkvYCpwZQjh467uT15k1BUKpm131RUa+zubt64dXuM0s9HAv4cQ/q70/F8AQgg3ttYWGNvhnjYGq+s9GUQWXVPtn8uvh62z8847J8d77bUXADvuuCMQ6xGlM8h7jaMddthhq/fy9v798PR46bR2GzZs8MO61xU69J2tC127kFZ17Uw+zpZM+WO2bGRmlwKXAod24l6Nwjtd3YEKyKprl+M5PkeNGpWcu/ba5lnoyJEjgVjQrampKWnjheOGDRsGlBeLW7duHRAH2jlz5gBw0003JW1eeOEFPyyCrlCBtvWkax3Qqq6dGTgrMuVDCBOACWZ2OjC9E/cT+ZBJVwAz69IpXP/+/QE488wzk3MjRowA4L777gPgt7/9LRCTJwOcccYZAPzsZz8D4mAJ8Mc//hGAlStXAtHSXLBgQfX/gPxoV9t60rWe6czm0HJgUOr5QOD91hqHEB7qxL1EfmTSVRQKaVslOjNwvgQMNbMhZrYtcD4wrTrdEl2IdG1cpG2V6PBUPYSwycx+CDwC9AAmhhAWVq1noksooq577NG8ft+3b9/k3BNPPAHEKfq8efMA2HPP6JUyevRoIG4g3Xbbbcm1e++9F4ibQo1AEbWtVzpVrK00/dYUvMGQro2LtK0OqnIpCs/mzZsB2LhxY3Lu7bffBmDJkiVAtDS/+c1vJm1OOOEEAJ599lkAZsyYkVxrJEtTVB9lRxJCiIzI4hSFxy1OX6sE2G+//YDoo3nUUUcBcNlll231+ldeeQUoc2gXok1kcQohREZkcYrC88knnwAxOgjga1/7GgDXX389APvvvz8AAwbEPBW33347APfffz8Aa9eurX1nRUMgi1MIITKigVMIITKiqbooPH/5y18AePHFF5NzY8aMAWDcuHEAfPbZZwA8+eSTSRuPY/d4dCEqRRanEEJkRBanKCzbbNP8u7/TTjsB0KtXr+Sap5rzfJqeFu7Xv/510uapp54qayNEpcjiFEKIjMjiFIXCrUyAQYOaM6Sdf/75AFxyySXJNXc/8jDMl19+uewRouO8EFmRxSmEEBnRwCmEEBlpd6puZhOB8cDKEMKI0rm+wF3AYOBt4LwQgsIuCkTRdPV6QP369UvOXXDBBQBcfvnlQMzLCdH9qDtu/OShrRe4O/DAA4Hyz/nPf/4zEDfo0rp4zlR//T777APAtttuu9U9Vq9eDcD06bHizhdffNHRLleVSizOScC4Lc5dC8wMIQwFZpaei2IxCenaqExC2taUdi3OEMLTpULvac4CTiwdTwZmAddUsV+ixhRFV7c0PZ/m2WefnVz7zne+A0QrZvbs2cm1Dz/8EIhVLj1zUjqD0po1a2rV7S6lltq6Hr4xd9VVVwHRJQxg7ty5AGy33XYA7Lvvvsk1tzC9/WGHHQbEiqNpfCNv1qxZybl6sTg7uqveL4TQBBBCaDKzPVtrqHKjhUK6Ni4VaStdK6Pm7kgqN9qY1FpXt2x8fewb3/gGAD/60Y+SNnvvvTcAjzzyCAA333xzcu2tt94C4LrrrgOiZXPIIYckbZYvX17tbhee9nTt3bs3ACeddBIA48ePB2C33XZL2nzrW98CYNOmTUC5Ze8zAc9E5bWh0rqkLdRSnzr899SKju6qrzCz/gClRwX7NgbStXGRtlWkoxbnNOAi4Belx/ur1qOc8d28Pn36lD2meeedd4C4U9vA1I2uu+++OxB3zv/xH/+x7DzAPffcA8Qwytdffz255hamr436Dm/agfcDBRcAAAj7SURBVL6bURVtfT351FNPBaIFms5lunTpUiBWFvXvD8RaUB988EHZ+/7qV79Kjt1zwl9XL+uaadr9LzKzKcDzwEFmttzMLqH5wx9jZm8AY0rPRYGQro2LtK09leyqX9DKpVOq3BeRI9K1cZG2tadbxqq7mwREdxUvG+slYz/66KOkjbtcLFiwIK8udkvSTtCuw8UXXwxE95Vbb701aeNTdS+dceihhybX3FXpxBNPBGDRokUAfPnllzXoeffBP2t3EVq/fj0A7733XtLGyy17m7/+9a/JNc8PsOuuuwJw0UUXAeWbQ8uWLQPgf/7nf4D6LNXcbRd8hBCio3Qri9NdXLxkLMDPfvYzAM455xwgLnLffffdSRv/lRW1wTdshg8fnpzzGYCH9HnY3dNPP520OfnkkwEYNWoUEN2TIFowbuH4xpFvToiO4WGQEyZMAOLmUKXBBL5JN2LECAAuvbTZZdRdlwBeeOEFAB5++OEq9Lg2yOIUQoiMdAuLc8uwPbcutzwGePfddwGYMWNGck5WSm3xtU13qgYYO3YsENejjzzySACOOOKIpM1+++0HxHVLz70JUTOvMXTnnXcCcf1MdA7/zCuxNP37B9C/f38gzig8b+qbb76ZtHnggQeA+nb/k8UphBAZ0cAphBAZadipenp64JEIF154IQDf+973kmtbbvx41hefGgLcf39hA6MKgccip12FfLOgR48eQNz4+fjjj5M2Pu328sDp6Z5vLPimUj26tHQXevbsmRz7ppBHhLku6Y2gdP7NekUWpxBCZKThLE63NPfaa6/knDtD//jHPwbKLZvHH38ciBaNW5xyQcoPj0VOb9y4S4rr4ZmM3Lk6fbxkyRKg3BqVo3vX425mnoMT4Otf/zoQcw54PPv//u//Jm3SDvP1iixOIYTISMNYnO5YO3DgQCCWjIWYWcfXzW6//fbkmls27gjv6zELFy6scY+F42uc6XWuenZ+FpXRq1cvAE45JYbI++zPZwcelumWZ1GQxSmEEBmppMrlIOAPwF7AZmBCCOHmeqmI6A7SHkbpSSE8eQBEx2hPEPHb3/42ueavcydst1zTiUDawl/nj0VZG613XUXHqAddfdZ28MEHA+VBJl7z6aWXXgLg//7v/4BirGumqcTi3AT8JIQwDBgFXG5mw1HVvKIjXRsT6ZoD7Q6cIYSmEMLc0vF6YDEwgOaqeZNLzSYDZ7f8DqIeka6NiXTNh0ybQ6WSoyOB2WSoiFgt3NUoXYr0qKOOAuCyyy4D4LTTTitrCzBz5kwA7rvvPqDcVcVjZd1lwmNvfcoO0Wk3fc7xvIK+8fTcc88BMStPEehqXUVtyFPX9PfNXQE94MRzogKsXNlc6mjq1KkAvPrqq9XsRm5UPHCaWS9gKnBlCOHj9AfVzutUbrSOka6NiXStLRUNnGbWk2YR7ggh3Fs6vcLM+pd+vVqtmlfNMrK+sOx5GAGuvvpqAEaPHg3AunXrgPIi9pMmTQKiC8QZZ5yRXPNNJM+04xmUfv7znydtPv/8cyCG/6U3gHyRe+LEiUDhLM260FVUl67Qdfvtt0+OPVz5vPPOA8qzHHn48pQpU7a6ViQqKdZmwO3A4hDCTalLXjUPCl7psjsiXRsT6ZoPlVicxwHfBeabmXupXkdzlby7SxX03gXOrU0X45qmZ/r+h3/4h+Sa16bxZAG+xuhuDhDdIq644oqy10D8pXRL0V2XvJYKxNyO7iw/f/785Nqf/vQnoDzcryB0ua6iJnSJrgMGDEiO//7v/x6AnXfeGYDHHnssuXbzzTcDMTFLUamkyuWzQGsLJKqaV1Cka2MiXfNBkUNCCJGRuo1VT+8CusvRjTfeWPY8jW8cnX12s3uaZ2GBOP32TR13iYAYT+uvf/DBBwH453/+56SNF6gSQpTj35tjjz02OTd+/HggLnF52RKIRfOKjixOIYTISN1anJ7LD2LuPl+AdvcgiNakb874JlG6iJT/8r322mtALAEMcPrppwOxUNiqVasAWZlCtIXnZjj88MOBmPUI4sbqo48+CsQN1EZCFqcQQmSkbi3OdLaUV155BYBbbrkFgMMOOyy51tTUBMCLL74IwPvvvw+U15/Zsryv/0pCtDiFEJXjocYeTJIOSlmwYAEAv//974FYcruRkMUphBAZqVuLM41bj9dff30X90SI7o0Hoxx33HEAjBkzBoANGzYkbTwMOV1DqtGQxSmEEBnRwCmEEBkpxFQ9D9zFqSilL4ToCjzX5ne/+10gZkJKFzecPLk5X7JnKmtEZHEKIURGzEuz5nIzs1XABqCI3uW70/l+7xtC2KManaknpKt0rUNqqmuuAyeAmc0JIRyZ602rQFH7nRdF/XyK2u+8KOrnU+t+a6ouhBAZ0cAphBAZ6YqBc0IX3LMaFLXfeVHUz6eo/c6Lon4+Ne137mucQghRdDRVF0KIjOQ2cJrZODNbYmbLzOzavO6bFTMbZGZPmtliM1toZleUzvc1s8fM7I3SY5+u7mu9UARtpWt2pGsb981jqm5mPYClwBhgOfAScEEIYVHNb56RUs3p/iGEuWbWG3gZOBu4GFgTQvhF6Z+oTwjhmi7sal1QFG2lazaka9vkZXEeDSwLIbwVQvgCuBM4K6d7ZyKE0BRCmFs6Xg8sBgbQ3N/JpWaTaRZHFERb6ZoZ6doGeQ2cA4D3Us+Xl87VNWY2GBgJzAb6hRCaoFksYM+u61ldUThtpWtFSNc2yGvgbKnOc11v55tZL2AqcGUI4eOu7k8dUyhtpWvFSNc2yGvgXA4MSj0fCLyf070zY2Y9aRbhjhDCvaXTK0rrKb6usrK113czCqOtdM2EdG2DvAbOl4ChZjbEzLYFzgem5XTvTFhzQffbgcUhhJtSl6YBF5WOLwLuz7tvdUohtJWumZGubd03Lwd4Mzsd+DXQA5gYQrghlxtnxMyOB54B5gObS6evo3nd5G5gH+Bd4NwQwpoW36SbUQRtpWt2pGsb91XkkBBCZEORQ0IIkRENnEIIkRENnEIIkRENnEIIkRENnEIIkRENnEIIkRENnEIIkRENnEIIkZH/B6cpYMGSE3akAAAAAElFTkSuQmCC\n",
|
| 238 |
+
"text/plain": [
|
| 239 |
+
"<Figure size 432x288 with 9 Axes>"
|
| 240 |
+
]
|
| 241 |
+
},
|
| 242 |
+
"metadata": {
|
| 243 |
+
"needs_background": "light"
|
| 244 |
+
},
|
| 245 |
+
"output_type": "display_data"
|
| 246 |
+
}
|
| 247 |
+
],
|
| 248 |
+
"source": [
|
| 249 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 250 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 251 |
+
"from matplotlib import pyplot\n",
|
| 252 |
+
"from tensorflow.keras import backend as K\n",
|
| 253 |
+
"\n",
|
| 254 |
+
"# Load data\n",
|
| 255 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 256 |
+
"\n",
|
| 257 |
+
"# Reshape our data to be in the forma [samples, width, height, color_depth]\n",
|
| 258 |
+
"x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)\n",
|
| 259 |
+
"x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)\n",
|
| 260 |
+
"\n",
|
| 261 |
+
"# Change datatype to float32\n",
|
| 262 |
+
"x_train = x_train.astype('float32')\n",
|
| 263 |
+
"x_test = x_test.astype('float32')\n",
|
| 264 |
+
"\n",
|
| 265 |
+
"# define data preparation\n",
|
| 266 |
+
"train_datagen = ImageDataGenerator(width_shift_range=0.3,\n",
|
| 267 |
+
" height_shift_range=0.3)\n",
|
| 268 |
+
"\n",
|
| 269 |
+
"# fit parameters from data\n",
|
| 270 |
+
"train_datagen.fit(x_train)\n",
|
| 271 |
+
"\n",
|
| 272 |
+
"# configure batch size and retrieve one batch of images\n",
|
| 273 |
+
"for x_batch, y_batch in train_datagen.flow(x_train, y_train, batch_size=9):\n",
|
| 274 |
+
" # create a grid of 3x3 images\n",
|
| 275 |
+
" for i in range(0, 9):\n",
|
| 276 |
+
" pyplot.subplot(330 + 1 + i)\n",
|
| 277 |
+
" pyplot.imshow(x_batch[i].reshape(28, 28), cmap=pyplot.get_cmap('gray'))# show the plot\n",
|
| 278 |
+
" pyplot.show()\n",
|
| 279 |
+
" break"
|
| 280 |
+
]
|
| 281 |
+
},
|
| 282 |
+
{
|
| 283 |
+
"cell_type": "markdown",
|
| 284 |
+
"metadata": {},
|
| 285 |
+
"source": [
|
| 286 |
+
"### Applying all at once"
|
| 287 |
+
]
|
| 288 |
+
},
|
| 289 |
+
{
|
| 290 |
+
"cell_type": "code",
|
| 291 |
+
"execution_count": 6,
|
| 292 |
+
"metadata": {},
|
| 293 |
+
"outputs": [
|
| 294 |
+
{
|
| 295 |
+
"data": {
|
| 296 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAU4AAAD7CAYAAAAFI30bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO2de7hd453HP68QoXFJRCKSEJckEokRdQ0qLiFKJ7SYxHRKS7VKyzB1mw5tH1TrGdMq7UwMEqoIQTMlgjTuRCQUEZK4hJCLuMZd5J0/9v6u993nutc+e699Ob/P85xn77PW2nu9Z/3OXvt3/znvPYZhGEbxrFPtBRiGYdQbduM0DMNIid04DcMwUmI3TsMwjJTYjdMwDCMlduM0DMNISYdunM65sc65F51zi51z55RrUUZ1Mbk2Libb8uBKzeN0znUBFgJjgKXAHGCC9/758i3PyBqTa+Nisi0f63bgtbsDi733LwM4524CxgGtCsE519mz7Vd57zev9iLaoablus46wUjq0qULAF27dgVg3XXDv/MXX3wBwMcff5zFsupBrpBStvZ5bV2uHblx9gNej35fCuzRgffrDCyp9gKKoKbl+pWvfCV53r17dwC22WYbADbbbLNk35tvvgnA3Llzs1hWPcgValy2NUircu3IjdO1sK3ZN5Rz7iTgpA6cx8iWmpSrborf//73k23HHHNMwTGbbrpp8rxv374AzJkzB4CZM2cC8OqrrybHPPTQQwC8/PLL5V9wbdKubEuVa48ePZo9/+CDDwBYtWpV2rereTpy41wKDIh+7w+82fQg7/1EYCKY6l8nmFwbl3Zla3Itjo5E1ecAg5xz2zjnugLjgWnlWZZRRUyujYvJtkyUrHF679c4504FZgBdgGu89/PLtrIWUGBAQQEIQQCjPFRDri0hGW+//fYAnHjiiQB897vfTY6Rae5cSxZojlGjRgEwePBgoPD/5a677gJgwYIFALzxxhvJvnnz5gHwyiuvALB27dpS/5SaoRKyHTZsGAAnn3xysm3nnXcG4MUXXwRg+vTpyb6pU6d25HQ1Q0dMdbz3dwF3lWktRo1gcm1cTLbloUM3zqzYcMMNAdh3330BmDBhQrJv0KBBAJx22mkAPPvsswB89tlnWS7RKANKKwIYOnQoAD/60Y8AOPbYY4HCqHoxOciyUjbfvHlWibRYvc/ixYuTfX/9618BuOiiiwB49913i/wrOhdDhgwBYIcddki29e/fHwja/o477pjs22uvvQC47bbbgPB5/fzzz5Nj6uGzayWXhmEYKSm5cqikk6WI0sV+qz59+gBw1VVXAXDYYYc1O/7hhx8G4Pzzzwfg8ccfT/Z9+umnJay2Isz13u9a7UWUm3JFX8eMGZM8P/PMMwHYY49cmqF8jF9++WVyTJy3CfD222/HawKgZ8+eRZ8//iwsXLgQgH//938H4Pbbb2/rpZ1Wruuvvz4A6623XrJN1t9JJ+WymgYMCIH8d955B4DVq1cDIT3spptuSo5RGpN8o++9915pf0DHaVWupnEahmGkpGZ9nBtssEHyXD6SsWPHNjtOmoX8oAMHDgTg/vvvL9tadA75y+IIq81s6jiSnbQ7CJqmrv0NN9wAFPohP/zwQwCWL18OFCbAq5pIGqqiv/vvv39yzCabbFKwjtjK0euPPPJIAJ566imgMIHeCP7I2Ed5+eWXA/D887lKzl/84hfJvhEjRgDBWtDndfTo0ckxqvaSpvm3v/2t2fmqjWmchmEYKbEbp2EYRkpq1lSX0xmCiR53v2nKk08+CQTzoKPEZpuc20qlWLRoUbLvmWeeAWDNmjVlOW9nQjLee++9gcKUFpnfkyZNAlo21VtynTRFclTazOmnn57sUzK9Ahux20WpUfvssw8QGorE/xfmpgnE10KBn1mzZjXb96//+q9ASFVq6TMtWX3rW98CQhECwAsvvFDOZZeMaZyGYRgpqVmN86OPPkqeK3lZqQxxisnSpUuBoAUqjaSjxM5qJWGPGzcOgNmzZyf7fv7znwMhGBWnyxhtowDMP/7jPwKFjv+rr74agD/84Q9AkH1MMWWQ0nb0f/LYY48l+5Qu01LJpl63xRZbAHDKKacAhaWFRtsouHPHHXck25q2AoxTlZoeo8/bgw8+mOxTGWe1tX3TOA3DMFJSsxpn7O/aeOONgcJyu6YofalcybKffPJJ8rxph/Hhw4e3us80zuKRJn/EEUcAsNFGGyX75CeL01w6gv6Hdtlll2SbtJa2tJdu3boBIT1KPk8IRRdG8dx6661A8FVeeOGFABxyyCHNjlW62NFHH51s0zWvdg9V0zgNwzBSYjdOwzCMlLRrqjvnrgEOB1Z674fnt/UEbgYGAq8Cx3jvy9o+JjbR1JlGZnFsWskEe+mll7TeZsekQSkucdqLAgQKRsQug8suuwyA3/zmNwBMmTIFKAxu1SLVkmvM+++/D4SKH1UQQXCHxMPZOoLee6eddkr1Ov0f6f9L1Uq1TC3ItjXUN+K5554Dgqneu3fv5JiRI0cCIU0s7k2hfp5KUapWkKiY/8pJQNNax3OAmd77QcDM/O9GfTEJk2ujMgmTbUVpV+P03j/onBvYZPM4YHT++WTgfuDsMq6rINFVI15XrFgBBA0QggYgR7JSleJOOWmQVhmnqFx33XVAqHdWugSEINb3vvc9IDit4xSKaqdOtES15Bpf1wceeAAIqT7xdZKWoTQvJcCXihLq48IKna+tDvLqGL/nnnsCVe3UUzTVkm0apHk++uijAEybFiZ4yNqQxhkXnOj6V/szVWpUvY/3fhmA936Zc653awfalMu6wuTauBQlW5NrcVQ8HanUqXlx2sjuu+8OhLGj8beN0oAOOOAAICQ6q7M0hNQiJVjHqUatpbvEmoVGy06cOBGAU089Ndmnb0VpuDpHtb8RK02pco2vi5KfpdXFmp+KHpQGpI7hsezSIF+pZAlBi9Sa4rJZzWWXr3vLLbcEGr87UtZTLnVd42KGuLcnFPZCffrppyu9pKIo1fO+wjnXFyD/uLJ8SzKqiMm1cTHZlpFSNc5pwHHAJfnHv5RtRXlU6A9BW5B/KtZa5NM8/PDDgVC+F/uypIXOmTMHKCwBay2hNp6GKH/r3XffDYSoH8B+++0HwMEHHwzAn//8Z6Cw2Yg6WtcBFZdrjK75E088AQSrAULiua61Gr3EGmOa6yo/eTzJUq9XZkZsZdx8881A8L316tWr6HPVKJnKtjX0WVaGjLr+jx8/PjlGVsWdd94JFMq8VjT+djVO59yNwGPAEOfcUufcCeQu/hjn3CJgTP53o44wuTYuJtvKU0xUfUIruw4s81qMDDG5Ni4m28pTc7XqUuVjU1d1yzKf4x5+MttlZilIEw/y6tevHxDSmHbeeedkn0w3pUXcd999QOjCEr/366+/XrDG+LlSJtTFR6ah0TpynfzpT38CQr9TCL0HJKuf/OQnzV4vM7qY/gAK/MQBKMlKNfLxCGCZh/q/MEondpt95zvfAUIP1gMPzN3L33rrreQYDVxUQFCutlrCSi4NwzBSUnMap9ISVq4MQb/58+cDIUUpHrKlEiwlSEs7Vb8/CAO39t13XyAMiIqP0zfgiSeeCBRqGk210LjkUom82qbk3dihbbSMrp2uqzR6CBaDrqtkF/dvVPHDdtttV/A+EAoQpI3KMoiDjrJERBxs0sAwIz2yFiSXOOh3/PHHAyHdrEuXLkAoIIFgvdVy2bJpnIZhGCmpOY1TxBqjShyVNhKnI0mz0MwhldapnA/C3BqVTH77299O9u22224F59Nj3B9QidIi9p9Ko5EfZ9WqVc2O0TajEFkX0vRiv3bTZhzyTW677bbJtunTpwPBTx0nRyvdRe85YUIuXhKPmJavXJpNXObbUsd5o3Xicd4qWJk8eTIQUssgzOjS5ASlF6npBxTX2b/amMZpGIaRErtxGoZhpKTmTHU58ePqnBEjRhQcE5tUUvWLMa1ktqmHJkD//v0B6Nu3LxC6HalGGmDQoEFAMMvjqqIlS5YAoZ72nnvuAeqji06toLrwODikwFFs5kGhm0bmuwJGkiXAN77xjYLXKdVI/Q7i91Jf0Hnz5nXgr+jc9OnTJ3l+9tmFTZc0eA/C50SutXrFNE7DMIyU1JzGqWBNHMCR9ikNI05TUHeVNEO94mCNniuwoICD0iQgaJzqVC7tFEJncAUoSu0D2pmRdnnVVVcl2xTc0aAuBQjjZGppoy11omq6LdY0hfq7ygK55ZZbSvsDOjG6rhrlC3DooYcC4bNxxRVXJPt0zdMQFy3UStcx0zgNwzBSUnMap7odxSVY+saRZqKUI4CnnnqqIuuIy/hin6pROaShAJx11llAKLuTz1mFCgAHHXQQ0PbYaCFNRQUSAL/85S+B0OG/1F6fnRF9JmUB/PCHP2x2jEpZL7/88mTbpZdeCoSOWMUwdOjQ5LmsPZXrVqvzmGmchmEYKakZjVNTCOVbjH2M0gSkacbJshYJbUzUlEP9UvV47bXXJseoF6pK+lTiB0Ez0fREabPx7CI9t4Ys6ZEGryKCuNxVmSm69nHRgT67ymTQxIQ4U0Wl1aNGjQIKm/IozjB69Ogy/SWlUUw/zgHOuVnOuQXOufnOudPy23s65+51zi3KPzb3vhs1i8m1MTG5ZkMxpvoa4Ezv/VBgT+AU59wwbNxovWNybUxMrhlQTCPjZYCm4612zi0A+lHmcaMyl9SVKFbdlUyugJFa6hulk5Vcy01c6KBkagUK4m5HCu7JRFegIk4Xq+XuO6WStVyVXqQxzhDGyKh+PR6nrRp1pS+pC1pc665gkNLN4uCsgksy8atFKh9nflbzSGA2Nm60YTC5NiYm18rhik0odc51Bx4ALvLe3+ace897v2m0/13vfZt+kyzGjdY4c733u1Z7ETEm17LQ6eUal8aecMIJAJxzTs4bEJfCtkY8hE1aqIYqaiw3ZF5g0qpci0pHcs6tB0wFbvDe35bfbONG6xyTa2Nicq087ZrqLpfpejWwwHt/WbSrJsaNGqVhcm1MqiVXFadAmH6gFEOlJ0FovqI5Qmq+cv311yfHqPxZqUtKTaslivFx7g38C/Csc06dYs8jJ4Ap+dGjrwFHV2aJRoUwuTYmJtcMKCaq/jDgWtlt40brFJNrY2JyzYaig0NlOZkFEWouiFAOTK4m17aI08TUxUw9KWSqxz0ElGpUAyM0OhYcMgzDMAI1U6tuGEZjol61MXH3s3rENE7DMIyU2I3TMAwjJXbjNAzDSIndOA3DMFJiN07DMIyU2I3TMAwjJXbjNAzDSIndOA3DMFKSdQL8KuCj/GO90YuOr3vrciykBjG5NiYm11bItFYdwDn3ZD3W9dbrurOiXq9Pva47K+r1+lR63WaqG4ZhpMRunIZhGCmpxo1zYvuH1CT1uu6sqNfrU6/rzop6vT4VXXfmPk7DMIx6x0x1wzCMlNiN0zAMIyWZ3Tidc2Odcy865xY7587J6rxpcc4NcM7Ncs4tcM7Nd86dlt/e0zl3r3NuUf6xzZnUnYl6kK3JNT0m1zbOm4WP0znXBVgIjAGWAnOACd775yt+8pTkZ0739d7Pc85tBMwFjgCOB97x3l+S/yfq4b0/u4pLrQnqRbYm13SYXNsmK41zd2Cx9/5l7/3nwE3AuIzOnQrv/TLv/bz889XAAqAfufVOzh82mZxwjDqRrck1NSbXNujQjTOFKt8PeD36fWl+W03jnBsIjARmA32898sgJyygd/VWVllSmmh1J9vOKldo7M9slnIt+caZV+WvBA4FhgETnHPDWju8hW01nQflnOsOTAVO995/UO31ZEVKuUKdybazyhUa+zObuVy99yX9AHsBM6LfzwXObetYche+pn+6devmu3Xr5tdff/1mP2V4/7dKvd5Z/aSRa3R81eVW5Z+al2uJn9lqX9dq/7Qq1450R2pJld+j6UHOuZOAk4ARHThXZmy//fYAfP7558m2tWvXArB48eKOvv2Sjr5BBqSVq1EfcoUiZGtyLaBVuXbkxlmUKu+9nwhMdM59HbizA+erCF/5ylcAOPjggwEYO3YsAIMHD06OWblyJQAXXHABAC+88EKWS8yaVHIFcM4122/UJO3K1uRaHB0JDi0FBkS/9wfebO1g7/1dHTiXkR2p5GrUFSbbMtGRG+ccYJBzbhvnXFdgPDCtPMsyqojJtXEx2ZaJkk117/0a59yp5II+XYBrvPfzy7ayCtK3b9/k+SGHHAIEE12Pm2yySXLMkiU5V4fM94ULFyb75P9sFOpZrkbbmGzLR4dGZ+TNbzPBGwyTa+Nisi0PWc8cqio77LADEAJB0FzTdC7nP1+9enVyzKxZs4CgaeoYwzA6J9YdyTAMIyWdQuP82te+BsABBxwAwJgxY5J9o0aNKjj2zTdzQcb77rsv2TZtWs5//vrruRS4L7/8snKLNQyj5jGN0zAMIyUNq3Gec07oX/Ctb30LgO7duwPB1xmjpHZpmnfdFfzn06dPr9g6DcOoP0zjNAzDSIndOA3DMFLSMKb65ptvDsDPfvYzAI466qhk32abbQbA+uuv3+x1TzzxBBBM9DvvzJXTP/roo5VbrFFWevXqBYS+AxACeY1WoGDUBqZxGoZhpKSuNM511ml+nx8+fDgQNM399tsPCFomQJcuXQpeM3PmzOT5PffcAwRNc/58q0CrBVRk4IuYiXXssccWPALsuOOOADz33HMA3HHHHQD87//+b3LM22+/XZ7FGp0O0zgNwzBSUhcap7TKAQNyHbFiX+UZZ5wBBA2jZ8+ezV6v8kn5MWfMmJHsk6a5dOnSci/bSMl6662XPJfPettttwXgrbfeAmDFihXJMSNHjgTgkksuAQoti65duwKwxx65Pr2yVu6+++7kGNM4jVIxjdMwDCMlduM0DMNISbumunPuGuBwYKX3fnh+W0/gZmAg8CpwjPf+3XIuTN2KAC699FIANt10U6Cwc5FSUZqa6LHprWCQzDSZ503fqzNRLbm2hMzo3XffPdl28cUXA0G+s2fPBuCZZ55JjjnhhBOAYL4/9thjyT5t+8EPfgDAdtttB3SOzla1JNtGpRiNcxIwtsm2c4CZ3vtBwMz870Z9MQmTa6MyCZNtRWlX4/TeP5gf9B4zDhidfz4ZuB84uxwLGjgwd6o4tWTQoEFACArFSc1NU5SWL18OFHY3mjJlClBczbmSqD/99NNkm1Kb1BVJNe8A77//PgAbbrghEAa7xaxZs6bd82ZN1nJtCV3rr371q0ChzPfee28gaIh9+vQBYLfddkuOkawvuugioLAz/6GHHgrABx/kRmz37t0bgGXLlpX5r6g9akG2lUbB4K222goIhSyQTdCv1Kh6H+/9MgDv/TLnXO/WDrRxo3WFybVxKUq2JtfiqHg6Utpxo6tWrQIKU1Oalkq2lAj/xRdfFLz+9ttvT/bJx7nRRhsB0KNHj2TfuuvmLsE222wDBI1XjxA03o8++ggo1Hi7desGhBK/l19+GSjUeNXjM57VXu+UOkZW1xtCn1T5M+WHhCDjq6++GgjX+cMPP0yOke9b1zz2X8oSaFpy2Vl92sVS6+OBZZ385Cc/AWD//fcHgrwhpKlV0tIrNaq+wjnXFyD/2Nw+NeoRk2vjYrItI6VqnNOA44BL8o9/KdeC5LOQzxDCN0esrTRF/kf5OuIu7YcddhgQNJo999wz2Sdtdueddwbgs88+A4IGGp9f5X+xZqNEa2lCOuY///M/k2MUxX/yySdbXX+NUDG5yoJQQjrAiSeeCIRrH/umzjrrLABuvfVWIPw/xBqjtHwRl2fqWm+wwQZA8FlLGwF45JFHSv576pCKybbSfOc730meS37xNigssf7+978PwB//+MeKraldjdM5dyPwGDDEObfUOXcCuYs/xjm3CBiT/92oI0yujYvJtvIUE1Wf0MquA8u8FiNDTK6Ni8m28tRcrfo777wDhIAKtG2iCwUPRo8eDcC4ceOSfUqGVwpDbNLJhFQQoaXAk8xx0VKQJ05RAjjzzDOT5zL/68BUrxg77bQTAOedd16yTZ2sJI943IlM9Pfee6+k87366qtAKJp4991crnfspulkpnrdMXjwYAAOP/zwZNvRRx/d4rGxa08yryRWcmkYhpGSmtM4pZ3FicoKAqg7Uluom06sVcaOY2g5TUGJ0gomSGOB0HXn2WefBWCTTTZJ9ilQpLQIEadQKTilLj6dkX322QcIiekx0vZVqABBHqWy8cYbF/yu4FDcXcmoTbbYYgsgjPOOrcfWiAOFGrwo67ESUwBM4zQMw0hJzWmc8kXFCeRHHHEEEDS/LbfcMtmnb5OmzRvidCT5SPWtFJdTqjGENBxplW+88UZyjJLqlWQba7N9+/YFQgL+wQcfDBT6ReNk/s7GP/zDPwBw0EEHAYXXTtf8uuuuK/i9HDS1PFTeGVst2qbCBqN6xFacrJPzzz8faB5jgPB5kwVxwQUXJPvUEKaS86ZM4zQMw0iJ3TgNwzBSUnOmuojH8yq1R4EFpSlACPzINHvxxReBwgDQokWLgOA0Vm0zwMMPPwyEoJBGNMSmf1s1r9on87+lKic5uzsju+yyCwC77rorUHhdZYpde+21ZT/v9ttvX3A+yUVul3ibUT0URB01alSyTf8XcoPFqEJPQ/h+85vfAIX3iywCgKZxGoZhpKRmNc6YWbNmAWF0b+wsVhBIAaO///3vAOywww7JMa+99hoQAgVxvbM0kri7SnvEwR4FG5T+0lICfWcMPqgmXalYSkqOHfaXX355xc6vvgSSuTrHaxw0hNQ3o3rIijzttNOSbSNGjCg4RoEgCNbjySefDATNM2vrwTROwzCMlNSFxila6q4ujVG+jyFDhgDwyiuvJMdI6/j444+bvT5OjymWeAyturVoXo40zvhbsjOWWh5//PFA0ChUEhsXFpQbdYmH0HGp6f/HkiVLKnZ+o2Xiz4t64qpr/1577QUUdq1qiuIWEMpyn3766bKvMw2mcRqGYaSkrjTOllD0Wr7G008/HShMplY3+Oeffx4IjURKJW4UoeT8pr7NuDnF4sWLO3S+ekQJ78pWkAa+YMGCsp9LkdkxY8Yk2yQj+VTjzvFGtkirBDjjjDOAMFNKs6Ba4qabbgLgrrvuSrbFPupqUkw/zgHOuVnOuQXOufnOudPy23s65+51zi3KP/Zo772M2sHk2piYXLOhGFN9DXCm934osCdwinNuGDZutN4xuTYmJtcMKKaR8TJA0/FWO+cWAP2okXGjMgGVAH/ggblerfGwtV69egHwX//1X0BhkCiuW28N9fpTX0CdA2DrrbcGQvBDKLEeYNKkSe3/IRlTabnGg9cgFB8oubmcKHl6woTQv7dfv35ASFf76U9/Wvbz1iK1+HmNuxvJtdW0t0T8mfzv//5vAP785z8DMHfu3EovMTWpfJz5Wc0jgdnYuNGGweTamJhcK0fRN07nXHdgKnC69/6Dpt8YrZHVuFGl/CxcuBAoHLamwU5KZ1L3FIDHH38cCF3dFeSJu0iPHz8egN122w0odHbHo4YhdHW68cYbk22ffPJJaX9UBlRKrnofpXvp2ne0A1Lc6Vv9GjUqVh3lIRQpKBilKQCdhVr4vKrMNu7g3nQdSjW68sork20KAMVpSLVGUelIzrn1yAnhBu/9bfnNNm60zjG5NiYm18rTrsbpcl8RVwMLvPeXRbtqatyovsnUMEI+LoDhw4cDcOqppwLBzwIhIVtlmOo8rxk58XvJnxmnHindRalO0jilyUJhY4laoRJylZ+5yXmAUCbb1vwo+aIh+K4lF13n2F8mTVOaTZxoLQ0znmPUGaiFz6v6nKqvpj43MSqDlv//97//faWWUxGKMdX3Bv4FeNY5p3T988gJYEp+9OhrQMtTlIxaxeTamJhcM6CYqPrDQGsOEhs3WqeYXBsTk2s21H3lkJCJPWfOHKCw845G0qq6SL0a4+fqlCNTMu62Eg9ei48FePvtt4EwgkOmR2ccChYHbjSCRL0DVMkj8xpCJ6vly5cDhSadrrG6LKnbVTxiQSlP6qEaB+FmzJgBFAYCjWxQN7ChQ4cCoXoMQnqYZH/ppZdmvLryYLXqhmEYKWkYjVOou3uc9qCUmBNOOAGAzTffPNknrUXBC70uDjSIN998EyhMmpeG+atf/Qro3F3F5fAH+PWvfw3AxRdfDMBWW20FwJFHHpkco5HKSgWLB6mpv6k0T2n9sVzVIUepX7fddluyT+lPnbEXarVR8FTDEWMLTZ8rBf1k4cXDFesB0zgNwzBS4krpR1nyySqYAN/CuZLn0l7kb9MjwOjRo4HgO9M8o/j1L730EhCS7NWJHkLnpSKZ673fNc0L6oG25CpfllLC4lQu/e/pWsfJ8ZKZkAyuv/76ZNtTTz0FwLRp00pffHnodHItBo2Gnjp1arJNfujLLstlSk2ZMgWoWcugVbmaxmkYhpGShtU42yLWZpRY3b9/fwC6d+8OFEZv1U9TSdnydZZAp9NMdB133HHHgkeAYcOGFWyLI+C65vKNqlAh1jhriE4n1zTEU2mVeSH/p7JSahTTOA3DMMqF3TgNwzBS0ilN9SpiJl2RqN5Zw70UOGpp4F4NYHJtTMxUNwzDKBdZJ8CvAj7KP9Ybvej4upu3iWkMyi5XpadkkKZicm0d+7y2QqamOoBz7sl6NGvqdd1ZUa/Xp17XnRX1en0qvW4z1Q3DMFJiN07DMIyUVOPGObEK5ywH9brurKjX61Ov686Ker0+FV135j5OwzCMesdMdcMwjJTYjdMwDCMlmd04nXNjnXMvOucWO+dqdvSgc26Ac26Wc26Bc26+c+60/Paezrl7nXOL8o892nuvzkI9yNbkmh6TaxvnzcLH6ZzrAiwExgBLgTnABO/98xU/eUryM6f7eu/nOec2AuYCRwDHA+947y/J/xP18N6fXcWl1gT1IluTazpMrm2Tlca5O7DYe/+y9/5z4CZgXDuvqQre+2Xe+3n556uBBUA/cuudnD9sMjnhGHUiW5NrakyubdChG2cKVb4f8Hr0+9L8tprGOTcQGAnMBvp475dBTlhA7+qtrLKkNNHqTradVa7Q2J/ZLAtK8l8AABD7SURBVOVa8o0zr8pfCRwKDAMmOOeGtXZ4C9tqOg/KOdcdmAqc7r3/oL3jG4WUcoU6k21nlSs09mc2c7l670v6AfYCZkS/nwuc29ax5C58Z/55q9TrndVPGrlGx1f7ulb7p+blWuJnttrXtdo/rcq1I92RWlLl92h6kHPuJOAkYEQHztUoLKn2AoogrVyN+pArFCFbk2sBrcq1Iz7OolR57/1En+tScmQLxxu1Ryq5+jrsnNOJaVe2Jtfi6MiNcykwIPq9P9DqFDPv/V0dOJeRHankatQVJtsy0ZEb5xxgkHNuG+dcV2A8UPUB10aHMbk2LibbMlGyj9N7v8Y5dyq5oE8X4Brv/fyyrcyoCibXxsVkWz5sWFu22FCvxsTk2pi0KtesZw5VDE1F3GCDDQDYdNNNk30rV64EwqREwzCMjmDdkQzDMFJSVxpnt27dAOjfv3+yTc8PPvhgAI44onlJqiYl3nzzzQA899xzzV6/cOFCAB577DEAvvjii7Ku3TCMxsE0TsMwjJTUhcY5cOBAAI455hgA/vmf/znZt+222wKw7rq5P2W99dYD4JNPPkmOeffddwE49NBDAfjVr36V7Pv4448LHi+77DIAHnrooeSYTz/9FICnn366HH+OkYJ11sl9t3ft2hUIsjCMamIap2EYRkrsxmkYhpGSmjXV+/Tpkzw/6KCDADj99NMB2GKLLZJ9a9euBWDBggUAzJkzB4D7778/OUa5queddx4QzD+ANWvWFLznz3/+82ZrmTFjBgD33HMPAPfdd1+yb/Hixan+LiMdu+++OwCHHHIIAL169Ur2SY5yy0g+AI888ggQAoOGUU5M4zQMw0hJzVQObbbZZgB87WtfA+C0005L9g0ZMgSA7t27A7Bs2bJkn1KLzj33XACWL18OBE0yfs9vfOMbQGGAQUGgnj17AnD44YcDMG5c8ykB7733HgBXXnllsu3CCy8E4LPPPmvtT4uxCpM2iC2BHXbYAYAbb7wRgKFDh+pcyTEK+vXokZvD9eijjyb7zjjjDADmzZsHBKujQphcG5NW5Woap2EYRkpqxsf5pz/9CYBdd83d4GNf1osvvgjAFVdcAcBf/vKXZJ+0z7fffrvV954+fToQfJXyi8ZIk5k9ezYAt99+e7Jv/PjxAIwdOxYoTIeaPz/XI+HWW28FCjVdIx1KOwM49dRTgWBtPP98brjir3/96+SYV155BYBbbrkFgD32CD15R40aBcDf//53wORilBfTOA3DMFJiN07DMIyUtGuqO+euAQ4HVnrvh+e39QRuBgYCrwLHeO/fLWUBv/zlL4FQay6T6v3330+O+d3vfgcE81kBoLS0ZKILBQ8UbJJ7AKBLly4A7LXXXkBhOtRRRx0FwIMPPgjAm2/WR0PtSsu1FJR6BLDffvsBQQ6///3vgRAsAhgwINfM/O677wbg2GOPTfbJ5RMHnDoLtSjbRqOY/6pJwNgm284BZnrvBwEz878b9cUkTK6NyiRMthWlXY3Te/9gftB7zDhgdP75ZOB+4OxiT6remQAffvghAF9++SUQtMLzzz8/OeaOO+4AStc0SyHujqRg1AEHHACEmnkIqTC9e+fm3deLxlkJuZaKNPodd9wx2bb55psDcP311wMwc+bMZq97/fXcwMbHH38cCEnyAH379gVg8ODBQGFHrEanlmTbHrII4jQzWRKyPpcuXZr9wtqh1Kh6H+/9MgDv/TLnXO/WDrRxo3WFybVxKUq2JtfiqHg6kvd+IjARQkJtXAa35ZZbAqFsTl2K4m7tccJ7NXjnnXcAWLVqFQCff/55sk/9PJWc31loSa6loms4YcKEZJsKEtQfdcWKFa2+XtZB3BFLBRX9+vUDOpfG2RHKKde2kIa58cYbA3DBBRck+0aOHAnAPvvsAxSW0v7P//wPUJiSWA1K9ZyvcM71Bcg/rizfkowqYnJtXEy2ZaRUjXMacBxwSf4x1e0/buChbx75Pddff32gMAFe+6rVsGH77bcHQqlmrF3KHyOttM7pkFzTIt+mshTiBHhp9fKBtdWHU8nxmjcFsMkmmwDBT6ZzyZfeCclUtsWy9957A/D1r3892bbNNtsAQWbqoxs/f+mll4DQW1cFNFB0+XOHaFfjdM7dCDwGDHHOLXXOnUDu4o9xzi0CxuR/N+oIk2vjYrKtPMVE1Se0suvAMq/FyBCTa+Nisq08ValVjwM/r732GhBMXaWhKNkc4KabbgKyMdUVqFDqEcDo0aMB+OY3vwkUdto588wzgWAuGsUjs1lD+NRTFYJp/vLLLwNtdzdatGgRAE8++WSyTZ2wFGiI+7Ma1UdBIX22YveXxt8o6KeuZBD+V7beemsAfvrTnwKhAxrAvffeCwSZVyKdqfOVVRiGYXSQqmiccdqI0gq++tWvAnDYYYcBoQQTQqqCOuOUs+u6hoBJ01VKjLrNQ0hul7P6uuuuS/b98Y9/LNtaOisbbrghEFLRIGijCvK0hQKJsWYibUVlnBrm14mDQ2VFaYTSFBXkgWA5qDChJfS5U/AvDhiLhx9+GAg9VSFoqrvssgsQ0s3icltZGbI6pkyZkuybNWsW0PFgrmmchmEYKal6P075sDTrR2kGP/7xj5Nj1P9S3dnjb5A//OEPQPgG1Myh2CembzWlNenbDuC4444Dgoar1CMlUAO88MILQPgG/cEPfpD67zTaJ04nkkagkty2kKYZ+8AlY6W7qQTz1VdfLctaOxM777xz8ny77bYD4OijjwbC50VWGYQy5GeeeQaAa665Jtl39dVXA+HzOWjQIKCwX6r2qQgibuyiEd1KS1L5c1wGPWLEiILHeG1vvPEGEPruljoZwDROwzCMlNiN0zAMIyVVN9XVDWnhwoUA3HzzzUBIOwDYdNNNgTC64nvf+16y74c//CEQzGkFBeIuRcOHDwfCmAsNAoNQ+SN0zM9+9rNkm8x/deNpq6+nkR4F5uLKITn926pRFxqb8sADDyTb1ClJHZdUjWKmevFsuOGGDBs2jKlTpybbFJxRQE+mbuwm0WdQo0xkMgOcdFKuf8hVV10FhAF7Cg5DcKvos7nbbrsl+/Q5lxwvvvhioHBk9ymnnFLwnnIvxO/VVuCqGEzjNAzDSEnVNc6mqIuNElshJKVrIJpSlgCOPPJIoHl6xE477ZQcozQipa3Ejmhpk+r7qNQHBa2gYerQaw4FcJSGFPdplQWSRkOMpwZIM1KaS1x0YRRHt27dGDJkSEGAbquttgKCpqniFA3Fg9BnQEnq++67b7JPgaYTTzwRCHKO+3HquT7Le+65Z7JPye0qetC5YtnrfApc6fMPobBCgadSP9umcRqGYaSk5jTOllDJlB7j8sZJkyYBQStVuVacgiB/6cqVuU5ad955Z7JPmslbb70FFCbnG5VF2oKS0+MEeD2Xn0zpIy2hdLO4b6u0V2kY775r43VKYe3atWy77bbJ79I0JQ9pgNdee22z10rjjEthzzjjDCBorm3NhpJFokR2CAUMZ5+da14v/3bsY33iiSeAUIa5ZMmSZJ8+35YAbxiGkTHFTLkcAFwHbAGsBSZ6739Xzal5im7Hz+OyrNaQ7yROeo3L9DoTtSRX9U9UwxcIFoNK66S1tFQyqSyHWItQorVKaKW1aFpm/LpGopxydc7RtWvXgswG9U5VnECaZ6wx6rpK04s1PpVRfvvb3wbgRz/6EVBoIcY+SQj+SIDx48cD4fOu8ufYhz158mQA/vrXvwKFWTQPPfRQW39y0RSjca4BzvTeDwX2BE5xzg3DpubVOybXxsTkmgHt3ji998u89/Pyz1cDC4B+5KbmTc4fNhk4olKLNMqPybUxMblmQ6rgUH7k6EhgNikmItYKpdalNjrVlqu66cQpJTKvRo0aBcD06dOBttOT4pEJcsuobloBDiVuQ3F18PVMR+X62Wef8dJLLxW4QFSYoFQfjbxQYnp7SH6XX345AI888ggAJ598cnLMgQfm+i3HJrrQaB31rZg7dy5QGIBS0FHjxGNXQ7ncM0XfOJ1z3YGpwOne+w/ivKt2XmfjRmsYk2tjUg656iZlNKeoG6dzbj1yQrjBe39bfvMK51zf/LdXq1Pzsho3aqSnVuT6yiuvAPB///d/yTYFhf7pn/4JCGlF//Ef/5EcI40i+nuS5yqx1DZ1xVHqUyNTLrl269bNL1++vCDwqrJYlUFLk5cmCoXB29ZQMEfBotga1BhudYePy6KVFK/RwWeddRYA//Zv/5Yco2CUAomVsDSLGdbmgKuBBd77y6JdmpoHNTQ1zygOk2tjYnLNhmK+fvcG/gV41jn3dH7beeSm5E3JT9B7DTi6Mks0KkTNyFVNIW6//fZk20EHHQTA/vvvD8B3v/tdIDQEAfjtb38LhMT3WJtU8rV49tlngU5R4FA2uX7xxResWLGCadOmJdtUkCAftDrsxyk/xWic8TmgcCaUnut/QClIEJq3KH1JjTzkc4XCculKUcyUy4eB1hwkNjWvTjG5NiYm12ywyiHDMIyUNL6n3Kh5lCISVw5dccUVQDDJhg0bBoSRCRD6LMo0i7teqQeratQ1kiVOWTLaZu3ataxevTrpmQnwt7/9DQjXXqMvVAkEwVQvNkWpNdRjM+61KbmqEkwyv+eeezp0rrSYxmkYhpESl2VSuKUjMdd7v2u1F1FuKiFXJa5/85vfBELn8LgbuFDaSZxAr6T6W265BQj1y/ExZaSh5RoH3dQL98ILLwRCh32liwHMmDEDgIsuuggo7CNRZ/0BWpWraZyGYRgpMY0zWxpaM8mawYMHA2GmTVyiJy1HJXkVptPIVelgsgSUeB6nf0mr11hf+ashlFyqLLLGMY3TMAyjXJjGmS2dRjPpZHRaucr3/OMf/zjZpsi37i2aDwShaEFJ9S2VwsbZEVXGNE7DMIxyYTdOwzCMlJipni2d1qRrcDqtXBWQ22+//ZJtRx11FBB6qWqQIgTTXI8arBYH8TSATXXsjz/+eLIv4x6qZqobhmGUi6w1zreAj4BVmZ20fPSi4+ve2nu/efuH1RcmV5NrDVJRuWZ64wRwzj1Zj2ZNva47K+r1+tTrurOiXq9PpddtprphGEZK7MZpGIaRkmrcOCdW4ZzloF7XnRX1en3qdd1ZUa/Xp6LrztzHaRiGUe+YqW4YhpGSzG6czrmxzrkXnXOLnXPnZHXetDjnBjjnZjnnFjjn5jvnTstv7+mcu9c5tyj/2KPaa60V6kG2Jtf0mFzbOG8WprpzrguwEBgDLAXmABO8989X/OQpyc+c7uu9n+ec2wiYCxwBHA+8472/JP9P1MN7f3YVl1oT1ItsTa7pMLm2TVYa5+7AYu/9y977z4GbgHEZnTsV3vtl3vt5+eergQVAP3LrnZw/bDI54Rh1IluTa2pMrm2Q1Y2zHxAPW16a31bTOOcGAiOB2UAf7/0yyAkL6F29ldUUdSdbk2tRmFzbIKsbZ0tznms6nO+c6w5MBU733n9Q7fXUMHUlW5Nr0Zhc2yCrG+dSYED0e3/gzYzOnRrn3HrkhHCD9/62/OYVeX+K/Corq7W+GqNuZGtyTYXJtQ2yunHOAQY557ZxznUFxgPTMjp3KpxzDrgaWOC9vyzaNQ04Lv/8OOAvWa+tRqkL2ZpcU2Nybeu8WSXAO+e+DvwW6AJc472/KJMTp8Q5tw/wEPAsoFmm55Hzm0wBtgJeA4723r9TlUXWGPUgW5NrekyubZzXKocMwzDSYZVDhmEYKbEbp2EYRkrsxmkYhpESu3EahmGkxG6chmEYKbEbp2EYRkrsxmkYhpESu3EahmGk5P8BtOxyjTXD5toAAAAASUVORK5CYII=\n",
|
| 297 |
+
"text/plain": [
|
| 298 |
+
"<Figure size 432x288 with 9 Axes>"
|
| 299 |
+
]
|
| 300 |
+
},
|
| 301 |
+
"metadata": {
|
| 302 |
+
"needs_background": "light"
|
| 303 |
+
},
|
| 304 |
+
"output_type": "display_data"
|
| 305 |
+
}
|
| 306 |
+
],
|
| 307 |
+
"source": [
|
| 308 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 309 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 310 |
+
"from matplotlib import pyplot\n",
|
| 311 |
+
"from tensorflow.keras import backend as K\n",
|
| 312 |
+
"\n",
|
| 313 |
+
"# Load data\n",
|
| 314 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 315 |
+
"\n",
|
| 316 |
+
"# Reshape our data to be in the forma [samples, width, height, color_depth]\n",
|
| 317 |
+
"x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)\n",
|
| 318 |
+
"x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)\n",
|
| 319 |
+
"\n",
|
| 320 |
+
"# Change datatype to float32\n",
|
| 321 |
+
"x_train = x_train.astype('float32')\n",
|
| 322 |
+
"x_test = x_test.astype('float32')\n",
|
| 323 |
+
"\n",
|
| 324 |
+
"# define data preparation\n",
|
| 325 |
+
"train_datagen = ImageDataGenerator(\n",
|
| 326 |
+
" rotation_range=45,\n",
|
| 327 |
+
" width_shift_range=0.2,\n",
|
| 328 |
+
" height_shift_range=0.2,\n",
|
| 329 |
+
" shear_range=0.2,\n",
|
| 330 |
+
" zoom_range=0.2,\n",
|
| 331 |
+
" horizontal_flip=True,\n",
|
| 332 |
+
" fill_mode='nearest')\n",
|
| 333 |
+
"\n",
|
| 334 |
+
"# fit parameters from data\n",
|
| 335 |
+
"train_datagen.fit(x_train)\n",
|
| 336 |
+
"\n",
|
| 337 |
+
"# configure batch size and retrieve one batch of images\n",
|
| 338 |
+
"for x_batch, y_batch in train_datagen.flow(x_train, y_train, batch_size=9):\n",
|
| 339 |
+
" # create a grid of 3x3 images\n",
|
| 340 |
+
" for i in range(0, 9):\n",
|
| 341 |
+
" pyplot.subplot(330 + 1 + i)\n",
|
| 342 |
+
" pyplot.imshow(x_batch[i].reshape(28, 28), cmap=pyplot.get_cmap('gray'))# show the plot\n",
|
| 343 |
+
" pyplot.show()\n",
|
| 344 |
+
" break"
|
| 345 |
+
]
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"cell_type": "markdown",
|
| 349 |
+
"metadata": {},
|
| 350 |
+
"source": [
|
| 351 |
+
"### Read more about it at the official Keras Documentation\n",
|
| 352 |
+
"https://keras.io/preprocessing/image/"
|
| 353 |
+
]
|
| 354 |
+
},
|
| 355 |
+
{
|
| 356 |
+
"cell_type": "markdown",
|
| 357 |
+
"metadata": {},
|
| 358 |
+
"source": [
|
| 359 |
+
"### Test Augmentation on a single image\n",
|
| 360 |
+
"- Outputs to ./preview diretory"
|
| 361 |
+
]
|
| 362 |
+
},
|
| 363 |
+
{
|
| 364 |
+
"cell_type": "code",
|
| 365 |
+
"execution_count": 7,
|
| 366 |
+
"metadata": {},
|
| 367 |
+
"outputs": [],
|
| 368 |
+
"source": [
|
| 369 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator, img_to_array, load_img\n",
|
| 370 |
+
"\n",
|
| 371 |
+
"datagen = ImageDataGenerator(\n",
|
| 372 |
+
" rotation_range=40,\n",
|
| 373 |
+
" width_shift_range=0.2,\n",
|
| 374 |
+
" height_shift_range=0.2,\n",
|
| 375 |
+
" shear_range=0.2,\n",
|
| 376 |
+
" zoom_range=0.2,\n",
|
| 377 |
+
" horizontal_flip=True,\n",
|
| 378 |
+
" fill_mode='nearest')\n",
|
| 379 |
+
"\n",
|
| 380 |
+
"img = load_img('dog.jpeg') \n",
|
| 381 |
+
"x = img_to_array(img) # creating a Numpy array with shape (3, 150, 150)\n",
|
| 382 |
+
"x = x.reshape((1,) + x.shape) # converting to a Numpy array with shape (1, 3, 150, 150)\n",
|
| 383 |
+
"\n",
|
| 384 |
+
"i = 0\n",
|
| 385 |
+
"for batch in datagen.flow(x, save_to_dir='output', save_prefix='dog', save_format='jpeg'):\n",
|
| 386 |
+
" i += 1\n",
|
| 387 |
+
" if i > 35:\n",
|
| 388 |
+
" break "
|
| 389 |
+
]
|
| 390 |
+
},
|
| 391 |
+
{
|
| 392 |
+
"cell_type": "code",
|
| 393 |
+
"execution_count": null,
|
| 394 |
+
"metadata": {},
|
| 395 |
+
"outputs": [],
|
| 396 |
+
"source": []
|
| 397 |
+
}
|
| 398 |
+
],
|
| 399 |
+
"metadata": {
|
| 400 |
+
"kernelspec": {
|
| 401 |
+
"display_name": "Python 3",
|
| 402 |
+
"language": "python",
|
| 403 |
+
"name": "python3"
|
| 404 |
+
},
|
| 405 |
+
"language_info": {
|
| 406 |
+
"codemirror_mode": {
|
| 407 |
+
"name": "ipython",
|
| 408 |
+
"version": 3
|
| 409 |
+
},
|
| 410 |
+
"file_extension": ".py",
|
| 411 |
+
"mimetype": "text/x-python",
|
| 412 |
+
"name": "python",
|
| 413 |
+
"nbconvert_exporter": "python",
|
| 414 |
+
"pygments_lexer": "ipython3",
|
| 415 |
+
"version": "3.7.4"
|
| 416 |
+
}
|
| 417 |
+
},
|
| 418 |
+
"nbformat": 4,
|
| 419 |
+
"nbformat_minor": 2
|
| 420 |
+
}
|
10. Data Augmentation/dog.jpeg
ADDED
|
11. Assessing Model Performance/1. Introduction to the Confusion Matrix & Viewing Misclassifications.srt
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,720 --> 00:00:06,930
|
| 3 |
+
Hi and welcome back to Chapter 11 where we start taking a look at the confusion matrix which is confusing
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,930 --> 00:00:13,340
|
| 7 |
+
for now but it's very helpful and also viewing all the data that gets misclassified by our classifier.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:13,560 --> 00:00:18,660
|
| 11 |
+
It's very important to look at this data sometimes because we need to know exactly what are classified
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:18,690 --> 00:00:21,580
|
| 15 |
+
what's wrong and why.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:21,590 --> 00:00:23,820
|
| 19 |
+
So let's look at the contents in this chapter it's fairly simple.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:23,830 --> 00:00:28,600
|
| 23 |
+
Eleven point one and basically deals with the confusion matrix and eleven point two deals with misclassified
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:28,600 --> 00:00:30,710
|
| 27 |
+
data and how we can view them.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:30,710 --> 00:00:30,990
|
| 31 |
+
OK.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:31,000 --> 00:00:32,980
|
| 35 |
+
So let's get started on eleven point one.
|
11. Assessing Model Performance/2. Understanding the Confusion Matrix.srt
ADDED
|
@@ -0,0 +1,723 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,510 --> 00:00:00,940
|
| 3 |
+
Hey.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:00,960 --> 00:00:05,520
|
| 7 |
+
And welcome back to chapter eleven point one where we go into the confusion matrix and then calculate
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:05,520 --> 00:00:07,180
|
| 11 |
+
precision and recall.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:07,190 --> 00:00:08,530
|
| 15 |
+
So let's get started.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:08,880 --> 00:00:15,480
|
| 19 |
+
So before I dive into Python that book and we actually would you an example using sikat confusion matrix
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:15,480 --> 00:00:16,240
|
| 23 |
+
function.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:16,530 --> 00:00:19,310
|
| 27 |
+
Let's take a look at what the confusion matrix actually looks like.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:19,350 --> 00:00:22,700
|
| 31 |
+
Now typically which I shown you before.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:22,980 --> 00:00:29,490
|
| 35 |
+
You have basically true true positives true negatives false positives and false negatives.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:29,610 --> 00:00:31,510
|
| 39 |
+
And that was in a binary class analysis.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:31,550 --> 00:00:37,400
|
| 43 |
+
Now I'm going to go a step further and make you understand this concept using a multiclass problem and
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:37,400 --> 00:00:41,010
|
| 47 |
+
we're using actual real world results for amnesty the set.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:41,250 --> 00:00:45,410
|
| 51 |
+
So this big worry kind of looks like this big matrix looks a bit strange.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:45,450 --> 00:00:48,720
|
| 55 |
+
However you do pick up initially that is a pattern right here.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:00:48,900 --> 00:00:50,170
|
| 59 |
+
That is a diagonal way.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:00:50,190 --> 00:00:53,470
|
| 63 |
+
These are very large numbers and then there are small numbers on the outskirts.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:00:53,760 --> 00:00:55,740
|
| 67 |
+
What do these numbers actually mean.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:00:56,250 --> 00:00:57,770
|
| 71 |
+
So let's take a look now.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:00:58,230 --> 00:00:59,110
|
| 75 |
+
I've made it simple.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:00:59,130 --> 00:01:00,820
|
| 79 |
+
We know we're looking at amnesty to set.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:00,840 --> 00:01:04,210
|
| 83 |
+
So we have 10 classes that's 0 2 9 9.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:04,290 --> 00:01:05,100
|
| 87 |
+
Likewise here.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:05,190 --> 00:01:08,320
|
| 91 |
+
And what this call them here is the predicted value.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:08,340 --> 00:01:15,230
|
| 95 |
+
So these these numbers here mean that classify a predicted zero and true value was actually zero.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:15,240 --> 00:01:17,180
|
| 99 |
+
So that's why it is a big number here.
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:17,440 --> 00:01:20,170
|
| 103 |
+
So to one shoot value was actually 1.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:20,370 --> 00:01:23,810
|
| 107 |
+
So having large numbers in this diagonal is a good thing.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:24,000 --> 00:01:29,550
|
| 111 |
+
Having large numbers outside of his batting so we can see we have some large numbers here.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:29,550 --> 00:01:30,360
|
| 115 |
+
We have an 11.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:01:30,360 --> 00:01:32,940
|
| 119 |
+
We have six to five here.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:01:33,220 --> 00:01:36,950
|
| 123 |
+
So now let's take a look at these numbers so I've highlighted them here.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:01:37,220 --> 00:01:38,790
|
| 127 |
+
So what do they actually mean.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:01:38,790 --> 00:01:44,350
|
| 131 |
+
It means that our classify predicted to when it was actually 7.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:01:44,460 --> 00:01:49,920
|
| 135 |
+
So classify as confusing systems with two is it seeing a seven but it's classifying it as a two which
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:01:49,920 --> 00:01:50,660
|
| 139 |
+
is wrong.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:01:50,940 --> 00:01:56,070
|
| 143 |
+
And likewise for sixes and zeroes you know nines and fours here.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:01:56,970 --> 00:01:58,680
|
| 147 |
+
Sorry this one here.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:01:59,120 --> 00:02:00,990
|
| 151 |
+
And it's as well.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:00,990 --> 00:02:08,550
|
| 155 |
+
So what I mean is that let's look at nines and fours classify a predicted four but five times it was
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:08,550 --> 00:02:10,160
|
| 159 |
+
actually a 9.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:10,260 --> 00:02:11,660
|
| 163 |
+
So we can see the biggest problem.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:11,670 --> 00:02:17,400
|
| 167 |
+
Oh emulous classify as facing is confusing to his and sevens which is actually a real problem for a
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:17,400 --> 00:02:18,630
|
| 171 |
+
lot of humans.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:18,660 --> 00:02:20,070
|
| 175 |
+
My handwriting isn't very good.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:02:20,070 --> 00:02:21,690
|
| 179 |
+
I'll be the first one to admit that.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:02:21,810 --> 00:02:25,700
|
| 183 |
+
And lots of times I'm looking at numbers I write and I'm like Is that a 2.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:02:25,700 --> 00:02:26,550
|
| 187 |
+
Was it a 7.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:02:26,550 --> 00:02:34,050
|
| 191 |
+
So we can see all calcify sort of leaning generally like a human with lean and interpreting all misinterpreting
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:02:34,080 --> 00:02:36,840
|
| 195 |
+
results just like a human like ourselves would.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:02:37,080 --> 00:02:41,860
|
| 199 |
+
So let's actually work out or recall value based on this real world data.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:02:42,210 --> 00:02:43,770
|
| 203 |
+
So let's take a look at the number seven.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:02:43,860 --> 00:02:44,350
|
| 207 |
+
All right.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:02:44,550 --> 00:02:47,040
|
| 211 |
+
So this is the true classes for number 7 here.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:02:47,040 --> 00:02:49,220
|
| 215 |
+
So we saw a classified got it right.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:02:49,410 --> 00:02:52,800
|
| 219 |
+
One thousand and ten times that's true positives here.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:02:53,130 --> 00:02:55,020
|
| 223 |
+
So how do we get the number of false negatives.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:02:55,020 --> 00:02:56,970
|
| 227 |
+
Now for times number of false negatives.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:02:56,970 --> 00:03:03,330
|
| 231 |
+
Basically how many times are classified I predicted a number would have been a number that was supposed
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:03,330 --> 00:03:04,250
|
| 235 |
+
to be a 7.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:03:04,500 --> 00:03:06,530
|
| 239 |
+
So you can see all these all these crimes here.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:03:06,570 --> 00:03:11,730
|
| 243 |
+
Numbers were supposed to be a seven but it was actually predicted to be indifferent to us like 0 1 2
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:03:11,730 --> 00:03:12,960
|
| 247 |
+
especially.
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:03:12,960 --> 00:03:14,700
|
| 251 |
+
So let's sum this up.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:03:14,700 --> 00:03:18,980
|
| 255 |
+
This rule here everything here except to tell then we'll give you 18.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:03:19,230 --> 00:03:21,090
|
| 259 |
+
And that's exactly what we calculate here.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:03:21,100 --> 00:03:25,310
|
| 263 |
+
And we get ninety eight point to four percent and that's our recall.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:03:25,370 --> 00:03:29,810
|
| 267 |
+
And now let's move on to precision.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:03:29,830 --> 00:03:34,660
|
| 271 |
+
So looking at precision we know it's number of correct predictions over how many occurrences of that
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:03:34,660 --> 00:03:36,650
|
| 275 |
+
class when the test data set.
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:03:36,730 --> 00:03:38,770
|
| 279 |
+
That's another way of seeing what this is here.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:03:38,980 --> 00:03:43,630
|
| 283 |
+
True positives over true positives Plus are false positives.
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:03:43,630 --> 00:03:45,230
|
| 287 |
+
So again let's look at number seven.
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:03:45,310 --> 00:03:45,870
|
| 291 |
+
OK.
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:03:46,030 --> 00:03:47,240
|
| 295 |
+
So now we go the other way.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:03:47,290 --> 00:03:48,900
|
| 299 |
+
This is interesting.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:03:48,970 --> 00:03:51,610
|
| 303 |
+
So are true positives again.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:03:51,840 --> 00:03:52,980
|
| 307 |
+
It isn't 10.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:03:53,320 --> 00:03:57,400
|
| 311 |
+
And what about false positives or false positives here.
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:03:57,400 --> 00:04:05,610
|
| 315 |
+
Basically all this time the classify was predicting something to be a 7 when it was actually 0 to a
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:04:05,650 --> 00:04:07,080
|
| 319 |
+
tree above or below.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:04:07,450 --> 00:04:14,620
|
| 323 |
+
So those are the false positives for sevenths and we just sum this up everything here gives us a thousand
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:04:14,620 --> 00:04:15,240
|
| 327 |
+
seventeen.
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:04:15,300 --> 00:04:18,060
|
| 331 |
+
One two tree and then three plus four.
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:04:18,060 --> 00:04:25,560
|
| 335 |
+
It seems that that's exactly how we get ninety nine point one percent so basically we don't actually
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:04:25,560 --> 00:04:26,710
|
| 339 |
+
have to do it manually.
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:04:26,910 --> 00:04:32,370
|
| 343 |
+
So I could learn actually does it generate generates a report for us automatically that gives us record
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:04:32,370 --> 00:04:35,000
|
| 347 |
+
precision F1 and support.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:04:35,010 --> 00:04:37,390
|
| 351 |
+
I think you remember and you guys know what one is.
|
| 352 |
+
|
| 353 |
+
89
|
| 354 |
+
00:04:37,560 --> 00:04:41,040
|
| 355 |
+
I haven't actually dealt with support but I'll talk about it now in the next slide.
|
| 356 |
+
|
| 357 |
+
90
|
| 358 |
+
00:04:41,340 --> 00:04:46,140
|
| 359 |
+
But you can see we have precision we have recall and we have all one score here.
|
| 360 |
+
|
| 361 |
+
91
|
| 362 |
+
00:04:46,190 --> 00:04:47,350
|
| 363 |
+
All right.
|
| 364 |
+
|
| 365 |
+
92
|
| 366 |
+
00:04:47,640 --> 00:04:49,500
|
| 367 |
+
Now actually I can talk about support right now.
|
| 368 |
+
|
| 369 |
+
93
|
| 370 |
+
00:04:49,500 --> 00:04:50,820
|
| 371 |
+
It's actually quite easy.
|
| 372 |
+
|
| 373 |
+
94
|
| 374 |
+
00:04:50,850 --> 00:04:53,180
|
| 375 |
+
Support is basically you see the numbers here.
|
| 376 |
+
|
| 377 |
+
95
|
| 378 |
+
00:04:53,460 --> 00:04:54,530
|
| 379 |
+
Ten twenty eight.
|
| 380 |
+
|
| 381 |
+
96
|
| 382 |
+
00:04:54,540 --> 00:04:55,840
|
| 383 |
+
What was that.
|
| 384 |
+
|
| 385 |
+
97
|
| 386 |
+
00:04:55,980 --> 00:05:01,920
|
| 387 |
+
Go back to it here 10 20 it is basically true positives plus false negatives here.
|
| 388 |
+
|
| 389 |
+
98
|
| 390 |
+
00:05:02,220 --> 00:05:05,430
|
| 391 |
+
So support just gives us that some here in the column.
|
| 392 |
+
|
| 393 |
+
99
|
| 394 |
+
00:05:05,430 --> 00:05:09,630
|
| 395 |
+
So if you look at column 0 Let's go back to it.
|
| 396 |
+
|
| 397 |
+
100
|
| 398 |
+
00:05:10,080 --> 00:05:12,760
|
| 399 |
+
Column zero would be everything here.
|
| 400 |
+
|
| 401 |
+
101
|
| 402 |
+
00:05:13,760 --> 00:05:14,130
|
| 403 |
+
Sorry.
|
| 404 |
+
|
| 405 |
+
102
|
| 406 |
+
00:05:14,170 --> 00:05:14,920
|
| 407 |
+
Everything in disarray.
|
| 408 |
+
|
| 409 |
+
103
|
| 410 |
+
00:05:14,980 --> 00:05:15,970
|
| 411 |
+
Added up here.
|
| 412 |
+
|
| 413 |
+
104
|
| 414 |
+
00:05:15,970 --> 00:05:20,200
|
| 415 |
+
So this is 977 plus 1 2 3 980.
|
| 416 |
+
|
| 417 |
+
105
|
| 418 |
+
00:05:20,200 --> 00:05:23,550
|
| 419 |
+
And that gives us our support here and support.
|
| 420 |
+
|
| 421 |
+
106
|
| 422 |
+
00:05:23,550 --> 00:05:24,490
|
| 423 |
+
It basically is useful.
|
| 424 |
+
|
| 425 |
+
107
|
| 426 |
+
00:05:24,490 --> 00:05:31,540
|
| 427 |
+
How many times of classify is actually basically missing data essentially missing classifications because
|
| 428 |
+
|
| 429 |
+
108
|
| 430 |
+
00:05:31,630 --> 00:05:39,560
|
| 431 |
+
think about it intuitively we have nine hundred and eighty zeroes represented here in US in this report.
|
| 432 |
+
|
| 433 |
+
109
|
| 434 |
+
00:05:39,560 --> 00:05:46,350
|
| 435 |
+
All right now what's what is telling us here is that we basically had a that's how much is zero.
|
| 436 |
+
|
| 437 |
+
110
|
| 438 |
+
00:05:46,390 --> 00:05:48,790
|
| 439 |
+
How many zeros were in our report.
|
| 440 |
+
|
| 441 |
+
111
|
| 442 |
+
00:05:48,790 --> 00:05:53,290
|
| 443 |
+
So now we can actually use that as a basis to can gauge what is happening here.
|
| 444 |
+
|
| 445 |
+
112
|
| 446 |
+
00:05:53,320 --> 00:05:57,650
|
| 447 |
+
So you can also see if any class imbalances in this are in the data here.
|
| 448 |
+
|
| 449 |
+
113
|
| 450 |
+
00:05:57,950 --> 00:06:03,430
|
| 451 |
+
So what about this and imbalances are essentially which you can usually check before you even reach
|
| 452 |
+
|
| 453 |
+
114
|
| 454 |
+
00:06:03,430 --> 00:06:03,870
|
| 455 |
+
this point.
|
| 456 |
+
|
| 457 |
+
115
|
| 458 |
+
00:06:03,880 --> 00:06:06,810
|
| 459 |
+
You can easily check it when you have your test entering data.
|
| 460 |
+
|
| 461 |
+
116
|
| 462 |
+
00:06:07,150 --> 00:06:15,520
|
| 463 |
+
Just check to see how many quantities it seems are the conchs I should see of data of each class of
|
| 464 |
+
|
| 465 |
+
117
|
| 466 |
+
00:06:15,520 --> 00:06:15,780
|
| 467 |
+
data.
|
| 468 |
+
|
| 469 |
+
118
|
| 470 |
+
00:06:15,820 --> 00:06:20,040
|
| 471 |
+
You have no data set.
|
| 472 |
+
|
| 473 |
+
119
|
| 474 |
+
00:06:20,060 --> 00:06:23,020
|
| 475 |
+
So how do we analyze overclassification report.
|
| 476 |
+
|
| 477 |
+
120
|
| 478 |
+
00:06:23,030 --> 00:06:29,830
|
| 479 |
+
So basically we can just quickly interpret something here high record with low precision that is bad.
|
| 480 |
+
|
| 481 |
+
121
|
| 482 |
+
00:06:30,080 --> 00:06:35,700
|
| 483 |
+
And let me tell you what this tells us that most of the positive examples are being correctly recognize.
|
| 484 |
+
|
| 485 |
+
122
|
| 486 |
+
00:06:35,840 --> 00:06:40,350
|
| 487 |
+
That means a lot of false negatives but there are a lot of false positives.
|
| 488 |
+
|
| 489 |
+
123
|
| 490 |
+
00:06:40,670 --> 00:06:43,330
|
| 491 |
+
And that means a lot of the other classes have been predicted.
|
| 492 |
+
|
| 493 |
+
124
|
| 494 |
+
00:06:43,400 --> 00:06:51,450
|
| 495 |
+
As a class in question and Alternatively we can have lower recall with high precision.
|
| 496 |
+
|
| 497 |
+
125
|
| 498 |
+
00:06:51,720 --> 00:06:52,630
|
| 499 |
+
What does that mean.
|
| 500 |
+
|
| 501 |
+
126
|
| 502 |
+
00:06:52,650 --> 00:06:58,640
|
| 503 |
+
It means are classified as missing a lot of the positive examples has a high false negative rate but
|
| 504 |
+
|
| 505 |
+
127
|
| 506 |
+
00:06:58,720 --> 00:07:03,760
|
| 507 |
+
do as I predict as positive on the positive side of false positives.
|
| 508 |
+
|
| 509 |
+
128
|
| 510 |
+
00:07:04,110 --> 00:07:10,570
|
| 511 |
+
So we can use our prosecution classification report to sort of gauge what's actually happening.
|
| 512 |
+
|
| 513 |
+
129
|
| 514 |
+
00:07:10,710 --> 00:07:13,220
|
| 515 |
+
In this example everything looks pretty good.
|
| 516 |
+
|
| 517 |
+
130
|
| 518 |
+
00:07:13,250 --> 00:07:13,740
|
| 519 |
+
All right.
|
| 520 |
+
|
| 521 |
+
131
|
| 522 |
+
00:07:13,920 --> 00:07:18,720
|
| 523 |
+
But later on you look at some examples where we generate these reports and you can actually analyze
|
| 524 |
+
|
| 525 |
+
132
|
| 526 |
+
00:07:18,720 --> 00:07:22,350
|
| 527 |
+
and figure out which class overclassify is having trouble with.
|
| 528 |
+
|
| 529 |
+
133
|
| 530 |
+
00:07:24,110 --> 00:07:28,360
|
| 531 |
+
So let's quickly take a look at the code to generate this confusion matrix.
|
| 532 |
+
|
| 533 |
+
134
|
| 534 |
+
00:07:28,360 --> 00:07:29,000
|
| 535 |
+
All right.
|
| 536 |
+
|
| 537 |
+
135
|
| 538 |
+
00:07:29,300 --> 00:07:35,760
|
| 539 |
+
And later on we're likely to see him could look at misclassified data for now this is the generic administrating
|
| 540 |
+
|
| 541 |
+
136
|
| 542 |
+
00:07:36,130 --> 00:07:36,470
|
| 543 |
+
code.
|
| 544 |
+
|
| 545 |
+
137
|
| 546 |
+
00:07:36,500 --> 00:07:40,480
|
| 547 |
+
You've seen a few times before it's using a lot of my examples.
|
| 548 |
+
|
| 549 |
+
138
|
| 550 |
+
00:07:40,910 --> 00:07:43,700
|
| 551 |
+
There's one thing I wanted to show you here in this file.
|
| 552 |
+
|
| 553 |
+
139
|
| 554 |
+
00:07:43,790 --> 00:07:50,720
|
| 555 |
+
Basically when we see of we still history here some people have asked me How can I can I see if my history
|
| 556 |
+
|
| 557 |
+
140
|
| 558 |
+
00:07:50,720 --> 00:07:51,200
|
| 559 |
+
file.
|
| 560 |
+
|
| 561 |
+
141
|
| 562 |
+
00:07:51,500 --> 00:07:56,900
|
| 563 |
+
And look at it again because I've spent like hours or maybe a week or days training a classifier and
|
| 564 |
+
|
| 565 |
+
142
|
| 566 |
+
00:07:56,900 --> 00:08:01,090
|
| 567 |
+
I want to actually see if the plots were not seeing the image when I should see the file.
|
| 568 |
+
|
| 569 |
+
143
|
| 570 |
+
00:08:01,430 --> 00:08:04,330
|
| 571 |
+
Yes you can you can use pite one function called pickle.
|
| 572 |
+
|
| 573 |
+
144
|
| 574 |
+
00:08:04,700 --> 00:08:09,980
|
| 575 |
+
And basically what it does here just stores a file as a pickled file pickle file is basically an array
|
| 576 |
+
|
| 577 |
+
145
|
| 578 |
+
00:08:09,980 --> 00:08:12,360
|
| 579 |
+
of data as a method of storage.
|
| 580 |
+
|
| 581 |
+
146
|
| 582 |
+
00:08:12,410 --> 00:08:16,920
|
| 583 |
+
I'm not going to get into the detail now but just know it's a way we can store files.
|
| 584 |
+
|
| 585 |
+
147
|
| 586 |
+
00:08:17,570 --> 00:08:22,910
|
| 587 |
+
So what we do here we just pick allowed to create a file we've been it's basically us telling us we're
|
| 588 |
+
|
| 589 |
+
148
|
| 590 |
+
00:08:22,910 --> 00:08:27,260
|
| 591 |
+
going to create this pickle file and then we just dump this file.
|
| 592 |
+
|
| 593 |
+
149
|
| 594 |
+
00:08:27,260 --> 00:08:28,880
|
| 595 |
+
This is some history to history file.
|
| 596 |
+
|
| 597 |
+
150
|
| 598 |
+
00:08:28,880 --> 00:08:33,240
|
| 599 |
+
We want to save and then close the file and it's done.
|
| 600 |
+
|
| 601 |
+
151
|
| 602 |
+
00:08:33,620 --> 00:08:39,170
|
| 603 |
+
And similarly if we want to look at this file because it's simply just loaded back here and here we
|
| 604 |
+
|
| 605 |
+
152
|
| 606 |
+
00:08:39,170 --> 00:08:39,390
|
| 607 |
+
go.
|
| 608 |
+
|
| 609 |
+
153
|
| 610 |
+
00:08:39,420 --> 00:08:44,360
|
| 611 |
+
Well we have all just for one epoch there was more than one book it would look a lot bigger.
|
| 612 |
+
|
| 613 |
+
154
|
| 614 |
+
00:08:44,690 --> 00:08:47,540
|
| 615 |
+
But basically it's a dictionary file or just on file.
|
| 616 |
+
|
| 617 |
+
155
|
| 618 |
+
00:08:47,540 --> 00:08:50,110
|
| 619 |
+
For those of you who come from a javascript background.
|
| 620 |
+
|
| 621 |
+
156
|
| 622 |
+
00:08:50,810 --> 00:08:52,760
|
| 623 |
+
And basically this is how it looks.
|
| 624 |
+
|
| 625 |
+
157
|
| 626 |
+
00:08:52,760 --> 00:08:57,460
|
| 627 |
+
We have a loss accuracy validation accuracy of Allision loss and values for it here.
|
| 628 |
+
|
| 629 |
+
158
|
| 630 |
+
00:08:58,500 --> 00:09:01,380
|
| 631 |
+
As a key these are the keys and these are the values.
|
| 632 |
+
|
| 633 |
+
159
|
| 634 |
+
00:09:01,380 --> 00:09:08,370
|
| 635 |
+
So now we can get some plots but these plots obviously for one epoch are pretty much not fun to look
|
| 636 |
+
|
| 637 |
+
160
|
| 638 |
+
00:09:08,370 --> 00:09:08,730
|
| 639 |
+
at.
|
| 640 |
+
|
| 641 |
+
161
|
| 642 |
+
00:09:08,760 --> 00:09:15,040
|
| 643 |
+
Just one point here and an accuracy chart one point here and one point here that one can actually see
|
| 644 |
+
|
| 645 |
+
162
|
| 646 |
+
00:09:15,040 --> 00:09:17,240
|
| 647 |
+
it's really quite good.
|
| 648 |
+
|
| 649 |
+
163
|
| 650 |
+
00:09:17,820 --> 00:09:19,050
|
| 651 |
+
This is what I wanted to show you.
|
| 652 |
+
|
| 653 |
+
164
|
| 654 |
+
00:09:19,230 --> 00:09:22,560
|
| 655 |
+
This here is a good fusion matrix at ossification report.
|
| 656 |
+
|
| 657 |
+
165
|
| 658 |
+
00:09:22,560 --> 00:09:25,400
|
| 659 |
+
So we import from Escaflowne metrics.
|
| 660 |
+
|
| 661 |
+
166
|
| 662 |
+
00:09:25,560 --> 00:09:27,330
|
| 663 |
+
Both of these functions here.
|
| 664 |
+
|
| 665 |
+
167
|
| 666 |
+
00:09:27,600 --> 00:09:30,850
|
| 667 |
+
And basically what we do we just get our predictions here.
|
| 668 |
+
|
| 669 |
+
168
|
| 670 |
+
00:09:30,960 --> 00:09:38,010
|
| 671 |
+
So we run ex-s accesses or test data or validation data through our models are pretty classes and basically
|
| 672 |
+
|
| 673 |
+
169
|
| 674 |
+
00:09:38,010 --> 00:09:43,870
|
| 675 |
+
we just print out the classification or report classification report just takes two arguments.
|
| 676 |
+
|
| 677 |
+
170
|
| 678 |
+
00:09:43,950 --> 00:09:49,490
|
| 679 |
+
We tested the X test labels here to White US labels here and predictions.
|
| 680 |
+
|
| 681 |
+
171
|
| 682 |
+
00:09:49,500 --> 00:09:54,640
|
| 683 |
+
So basically what we're doing here we're comparing labels to labels.
|
| 684 |
+
|
| 685 |
+
172
|
| 686 |
+
00:09:54,870 --> 00:10:00,570
|
| 687 |
+
And the reason we have to use the max function is because our labels before will not want encoded.
|
| 688 |
+
|
| 689 |
+
173
|
| 690 |
+
00:10:00,630 --> 00:10:06,420
|
| 691 |
+
So it's not like for like we actually have to knock reconvicted back into basically a one for one type
|
| 692 |
+
|
| 693 |
+
174
|
| 694 |
+
00:10:06,420 --> 00:10:07,500
|
| 695 |
+
matching.
|
| 696 |
+
|
| 697 |
+
175
|
| 698 |
+
00:10:07,530 --> 00:10:11,010
|
| 699 |
+
So this is basically what our protection matrix give us.
|
| 700 |
+
|
| 701 |
+
176
|
| 702 |
+
00:10:11,130 --> 00:10:16,500
|
| 703 |
+
And basically this is what the classification of what looks like which we saw in our slides.
|
| 704 |
+
|
| 705 |
+
177
|
| 706 |
+
00:10:16,840 --> 00:10:17,890
|
| 707 |
+
Are It averages here.
|
| 708 |
+
|
| 709 |
+
178
|
| 710 |
+
00:10:17,910 --> 00:10:23,310
|
| 711 |
+
They don't really tell us that much closer but the same may be far more interesting datasets and of
|
| 712 |
+
|
| 713 |
+
179
|
| 714 |
+
00:10:23,300 --> 00:10:28,560
|
| 715 |
+
course those values would differ and confusion matrix is done here.
|
| 716 |
+
|
| 717 |
+
180
|
| 718 |
+
00:10:28,980 --> 00:10:33,720
|
| 719 |
+
Basically the same thing same exact arguments as above and we get it here.
|
| 720 |
+
|
| 721 |
+
181
|
| 722 |
+
00:10:34,350 --> 00:10:38,940
|
| 723 |
+
So that's it for confusion metrics and our misclassification.
|
11. Assessing Model Performance/3. Finding and Viewing Misclassified Data.srt
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,830 --> 00:00:05,740
|
| 3 |
+
So welcome to 11. to where we actually can find and view our misclassified.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,120 --> 00:00:12,790
|
| 7 |
+
If you recall correctly last classifier was actually confusing sevens with tudes.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:13,110 --> 00:00:16,800
|
| 11 |
+
So how do we actually see the Sevens and two is that it's can be conveyed.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:17,040 --> 00:00:18,860
|
| 15 |
+
It's mixing up.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:18,930 --> 00:00:22,180
|
| 19 |
+
So now let's talk a bit about it first.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:22,180 --> 00:00:27,210
|
| 23 |
+
Firstly this is actually an under use function I don't see that many could be division the the scientists
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:27,690 --> 00:00:30,800
|
| 27 |
+
using this technique to identify to classify as weakness.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:30,840 --> 00:00:35,850
|
| 31 |
+
I think it's crucial because by looking at what is misclassifying you can actually figure out oh I need
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:35,850 --> 00:00:39,240
|
| 35 |
+
more of this type of data to make my classifier smarter.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:39,240 --> 00:00:43,520
|
| 39 |
+
We need to augment more we add some robustness to much ossify.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:43,890 --> 00:00:49,590
|
| 43 |
+
So viewing the misclassified tested it can tell us a lot of things sometimes what is confusing are the
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:49,590 --> 00:00:51,690
|
| 47 |
+
classes looking similar even to us.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:52,080 --> 00:00:56,550
|
| 51 |
+
Maybe it's a more complex pattern maybe we need to add more deeply as and maybe our training day to
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:56,550 --> 00:01:02,340
|
| 55 |
+
his mislabel that actually happens quite a bit to me because I tend to label a lot of my data sets myself
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:02,370 --> 00:01:07,660
|
| 59 |
+
which is tedious and exhausting and prone to making errors sometimes.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:07,680 --> 00:01:09,370
|
| 63 |
+
So let's see how we do this.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:09,380 --> 00:01:11,310
|
| 67 |
+
No no I buy that book.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:11,330 --> 00:01:16,110
|
| 71 |
+
But before I go ahead let me just show you what it disk actually tells us.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:16,110 --> 00:01:19,560
|
| 75 |
+
This is some reallife say that it has been misled classified by a philosopher.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:20,020 --> 00:01:23,900
|
| 79 |
+
So it should be data a date or fix for you.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:24,180 --> 00:01:30,060
|
| 83 |
+
But basically what happens here is that this is a data input here or in page number.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:30,090 --> 00:01:33,350
|
| 87 |
+
So it actually was a 6 0 all classified picked at 0.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:33,660 --> 00:01:38,410
|
| 91 |
+
Now this clearly there's a 6 0 classified as doing something very wrong here.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:38,520 --> 00:01:39,470
|
| 95 |
+
This one isn't it.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:39,480 --> 00:01:45,270
|
| 99 |
+
But we can kind of forgive our classified slightly because it sort of looks like someone wrote to intentionally
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:45,700 --> 00:01:49,340
|
| 103 |
+
and then maybe their pen skipped and then wasn't able to continue with it.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:49,410 --> 00:01:55,340
|
| 107 |
+
So possibly cause and because it too is most pronounced tick tick a part of this number.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:55,500 --> 00:01:58,210
|
| 111 |
+
I can see why they classified towards two.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:58,470 --> 00:01:59,900
|
| 115 |
+
This one was a 9.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:00,150 --> 00:02:02,420
|
| 119 |
+
Clearly an 0 predicted it was a 9.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:02,430 --> 00:02:04,460
|
| 123 |
+
How ever it actually wasn't it.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:04,470 --> 00:02:06,900
|
| 127 |
+
So when I said clearly it was 9 it wasn't it.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:06,900 --> 00:02:14,220
|
| 131 |
+
Someone wrote it very poorly and basically made this bottom cycle of the very small or perhaps it was
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:14,220 --> 00:02:15,070
|
| 135 |
+
missed classified data.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:15,120 --> 00:02:15,920
|
| 139 |
+
We don't even know.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:16,140 --> 00:02:18,290
|
| 143 |
+
But let's trust our data for the.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:18,390 --> 00:02:20,370
|
| 147 |
+
And let's assume this wasn't it.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:20,400 --> 00:02:22,460
|
| 151 |
+
That was basically mystified.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:23,970 --> 00:02:29,230
|
| 155 |
+
This one is for when it actually looks like a nine to me to be honest I wasn't good.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:29,520 --> 00:02:32,380
|
| 159 |
+
I get scolded for that all the time in high school and private school.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:32,730 --> 00:02:33,330
|
| 163 |
+
So yeah.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:33,360 --> 00:02:34,820
|
| 167 |
+
So we understand that one.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:34,890 --> 00:02:38,570
|
| 171 |
+
This one I say is definitely a 6 or G even.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:38,570 --> 00:02:41,010
|
| 175 |
+
I mean this would be digits to be fair.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:02:41,460 --> 00:02:47,460
|
| 179 |
+
So we can see five good strong should've gotten that as a 6 because basically how it you know you don't
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:02:47,460 --> 00:02:49,010
|
| 183 |
+
do a 5 like this.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:02:49,080 --> 00:02:50,230
|
| 187 |
+
So yeah.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:02:50,250 --> 00:02:56,620
|
| 191 |
+
So let's go into our I buy them book and see how we actually create plots or generate images like this.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:02:57,120 --> 00:02:57,420
|
| 195 |
+
OK.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:02:57,430 --> 00:03:03,330
|
| 199 |
+
So how do we find a misclassified data basically from what I find in the book from from our Python code
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:03,390 --> 00:03:04,430
|
| 203 |
+
basically.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:04,530 --> 00:03:06,690
|
| 207 |
+
So let's think about as quickly right.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:06,810 --> 00:03:10,230
|
| 211 |
+
We have test data labels and test data.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:10,440 --> 00:03:12,340
|
| 215 |
+
And we have our training data which.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:12,360 --> 00:03:15,680
|
| 219 |
+
So how do we how do we figure out which labels have been wrong.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:15,960 --> 00:03:17,310
|
| 223 |
+
And that's actually fairly easy.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:17,310 --> 00:03:25,170
|
| 227 |
+
All we need to do is compare white tests with y prediction and that's what this function and P absolute
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:25,170 --> 00:03:27,060
|
| 231 |
+
does right.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:27,120 --> 00:03:32,790
|
| 235 |
+
It creates a basically it creates an array that stores a value of 1 when a mistress vission occurs.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:03:32,790 --> 00:03:35,500
|
| 239 |
+
And basically we used Asare know this result.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:03:35,760 --> 00:03:42,120
|
| 243 |
+
We basically create a matrix here that basically when the result is greater than 1 which means that
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:03:42,120 --> 00:03:48,750
|
| 247 |
+
it's basically misclassified we get indices here and these indices now will correspond to the actual
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:03:48,960 --> 00:03:54,250
|
| 251 |
+
digit invite us that all right and why Treen that was actually mis classified.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:03:54,420 --> 00:03:56,220
|
| 255 |
+
So why Treen 247.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:03:56,310 --> 00:04:03,450
|
| 259 |
+
If you put some brackets in and go to for some that was an actual mistrustfully classified data image
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:03,540 --> 00:04:04,390
|
| 263 |
+
input.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:04,740 --> 00:04:08,290
|
| 267 |
+
So let's run this when we get this here.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:08,910 --> 00:04:11,700
|
| 271 |
+
And this does it quite quickly as you can see.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:11,700 --> 00:04:15,930
|
| 275 |
+
This is providing you actually have why predict we got predict.
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:04:15,930 --> 00:04:21,150
|
| 279 |
+
Basically if you remember correctly from this hair model that predicts like glasses and then we generated
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:04:21,150 --> 00:04:24,840
|
| 283 |
+
our confusion and classification report.
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:04:24,840 --> 00:04:30,120
|
| 287 |
+
So now let's display it using open Zeevi So actually commented on some lines say and that's because
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:04:30,120 --> 00:04:35,850
|
| 291 |
+
if you wanted to load a model that would assume we didn't run this model and you just wanted to load
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:04:35,940 --> 00:04:42,000
|
| 295 |
+
a model can load it in here and classify and just change the model to classify here and basically do
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:04:42,000 --> 00:04:42,710
|
| 299 |
+
that same thing.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:04:42,750 --> 00:04:48,230
|
| 303 |
+
And I've come to this line here as I was a Princip and I was just printing the labels.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:04:48,300 --> 00:04:48,860
|
| 307 |
+
OK.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:04:49,110 --> 00:04:53,730
|
| 311 |
+
We're going to actually display it in all in one image for the first 10 misclassification.
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:04:53,730 --> 00:04:54,810
|
| 315 |
+
So let's take a look.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:04:57,390 --> 00:05:01,640
|
| 319 |
+
So you know it is no let's look at this.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:05:01,760 --> 00:05:02,580
|
| 323 |
+
Exactly.
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:05:02,900 --> 00:05:08,470
|
| 327 |
+
So what it tells us is that this was the input image for this is what it predicted in green.
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:05:08,690 --> 00:05:10,830
|
| 331 |
+
And this is the actual true value for.
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:05:11,180 --> 00:05:12,000
|
| 335 |
+
So it's interesting.
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:05:12,080 --> 00:05:13,180
|
| 339 |
+
Let's take a look at another one.
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:05:15,180 --> 00:05:17,560
|
| 343 |
+
This one is actually a 6 and 0.
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:05:18,100 --> 00:05:19,680
|
| 347 |
+
Then keep going.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:05:19,680 --> 00:05:21,130
|
| 351 |
+
It's an 8 and actually.
|
| 352 |
+
|
| 353 |
+
89
|
| 354 |
+
00:05:21,140 --> 00:05:22,410
|
| 355 |
+
But it predicted a 4.
|
| 356 |
+
|
| 357 |
+
90
|
| 358 |
+
00:05:22,680 --> 00:05:23,490
|
| 359 |
+
Same for this one.
|
| 360 |
+
|
| 361 |
+
91
|
| 362 |
+
00:05:23,490 --> 00:05:24,030
|
| 363 |
+
This one.
|
| 364 |
+
|
| 365 |
+
92
|
| 366 |
+
00:05:24,020 --> 00:05:25,780
|
| 367 |
+
This one's This is pretty cool.
|
| 368 |
+
|
| 369 |
+
93
|
| 370 |
+
00:05:26,010 --> 00:05:30,630
|
| 371 |
+
So you can keep going and go through all the misclassification if you want to see what is actually confusing.
|
| 372 |
+
|
| 373 |
+
94
|
| 374 |
+
00:05:30,670 --> 00:05:31,360
|
| 375 |
+
You'll classify.
|
11. Confusion Matrix and Viewing Misclassifications/11.1 - 11.2 - MNIST Confusion Matrix Analysis and Viewing Misclassifications.ipynb
ADDED
|
@@ -0,0 +1,484 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"# Confusion Matrix Analysis and Viewing Misclassifications"
|
| 8 |
+
]
|
| 9 |
+
},
|
| 10 |
+
{
|
| 11 |
+
"cell_type": "code",
|
| 12 |
+
"execution_count": 1,
|
| 13 |
+
"metadata": {},
|
| 14 |
+
"outputs": [
|
| 15 |
+
{
|
| 16 |
+
"name": "stdout",
|
| 17 |
+
"output_type": "stream",
|
| 18 |
+
"text": [
|
| 19 |
+
"x_train shape: (60000, 28, 28, 1)\n",
|
| 20 |
+
"60000 train samples\n",
|
| 21 |
+
"10000 test samples\n",
|
| 22 |
+
"Number of Classes: 10\n",
|
| 23 |
+
"Model: \"sequential\"\n",
|
| 24 |
+
"_________________________________________________________________\n",
|
| 25 |
+
"Layer (type) Output Shape Param # \n",
|
| 26 |
+
"=================================================================\n",
|
| 27 |
+
"conv2d (Conv2D) (None, 26, 26, 32) 320 \n",
|
| 28 |
+
"_________________________________________________________________\n",
|
| 29 |
+
"conv2d_1 (Conv2D) (None, 24, 24, 64) 18496 \n",
|
| 30 |
+
"_________________________________________________________________\n",
|
| 31 |
+
"max_pooling2d (MaxPooling2D) (None, 12, 12, 64) 0 \n",
|
| 32 |
+
"_________________________________________________________________\n",
|
| 33 |
+
"dropout (Dropout) (None, 12, 12, 64) 0 \n",
|
| 34 |
+
"_________________________________________________________________\n",
|
| 35 |
+
"flatten (Flatten) (None, 9216) 0 \n",
|
| 36 |
+
"_________________________________________________________________\n",
|
| 37 |
+
"dense (Dense) (None, 128) 1179776 \n",
|
| 38 |
+
"_________________________________________________________________\n",
|
| 39 |
+
"dropout_1 (Dropout) (None, 128) 0 \n",
|
| 40 |
+
"_________________________________________________________________\n",
|
| 41 |
+
"dense_1 (Dense) (None, 10) 1290 \n",
|
| 42 |
+
"=================================================================\n",
|
| 43 |
+
"Total params: 1,199,882\n",
|
| 44 |
+
"Trainable params: 1,199,882\n",
|
| 45 |
+
"Non-trainable params: 0\n",
|
| 46 |
+
"_________________________________________________________________\n",
|
| 47 |
+
"None\n",
|
| 48 |
+
"Train on 60000 samples, validate on 10000 samples\n",
|
| 49 |
+
"60000/60000 [==============================] - 117s 2ms/sample - loss: 0.7767 - accuracy: 0.7570 - val_loss: 0.2555 - val_accuracy: 0.9265\n",
|
| 50 |
+
"Test loss: 0.25551080925762654\n",
|
| 51 |
+
"Test accuracy: 0.9265\n"
|
| 52 |
+
]
|
| 53 |
+
}
|
| 54 |
+
],
|
| 55 |
+
"source": [
|
| 56 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 57 |
+
"import tensorflow as tf\n",
|
| 58 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 59 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 60 |
+
"from tensorflow.keras.layers import Dense, Dropout, Flatten\n",
|
| 61 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D\n",
|
| 62 |
+
"from tensorflow.keras.optimizers import SGD \n",
|
| 63 |
+
"from tensorflow.keras.utils import to_categorical\n",
|
| 64 |
+
"\n",
|
| 65 |
+
"# Training Parameters\n",
|
| 66 |
+
"batch_size = 64\n",
|
| 67 |
+
"epochs = 1\n",
|
| 68 |
+
"\n",
|
| 69 |
+
"# loads the MNIST dataset\n",
|
| 70 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 71 |
+
"\n",
|
| 72 |
+
"# Lets store the number of rows and columns\n",
|
| 73 |
+
"img_rows = x_train[0].shape[0]\n",
|
| 74 |
+
"img_cols = x_train[1].shape[0]\n",
|
| 75 |
+
"\n",
|
| 76 |
+
"# Getting our date in the right 'shape' needed for Keras\n",
|
| 77 |
+
"# We need to add a 4th dimenion to our date thereby changing our\n",
|
| 78 |
+
"# Our original image shape of (60000,28,28) to (60000,28,28,1)\n",
|
| 79 |
+
"x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
|
| 80 |
+
"x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
|
| 81 |
+
"\n",
|
| 82 |
+
"# store the shape of a single image \n",
|
| 83 |
+
"input_shape = (img_rows, img_cols, 1)\n",
|
| 84 |
+
"\n",
|
| 85 |
+
"# change our image type to float32 data type\n",
|
| 86 |
+
"x_train = x_train.astype('float32')\n",
|
| 87 |
+
"x_test = x_test.astype('float32')\n",
|
| 88 |
+
"\n",
|
| 89 |
+
"# Normalize our data by changing the range from (0 to 255) to (0 to 1)\n",
|
| 90 |
+
"x_train /= 255\n",
|
| 91 |
+
"x_test /= 255\n",
|
| 92 |
+
"\n",
|
| 93 |
+
"print('x_train shape:', x_train.shape)\n",
|
| 94 |
+
"print(x_train.shape[0], 'train samples')\n",
|
| 95 |
+
"print(x_test.shape[0], 'test samples')\n",
|
| 96 |
+
"\n",
|
| 97 |
+
"# Now we one hot encode outputs\n",
|
| 98 |
+
"y_train = to_categorical(y_train)\n",
|
| 99 |
+
"y_test = to_categorical(y_test)\n",
|
| 100 |
+
"\n",
|
| 101 |
+
"# Let's count the number columns in our hot encoded matrix \n",
|
| 102 |
+
"print (\"Number of Classes: \" + str(y_test.shape[1]))\n",
|
| 103 |
+
"\n",
|
| 104 |
+
"num_classes = y_test.shape[1]\n",
|
| 105 |
+
"num_pixels = x_train.shape[1] * x_train.shape[2]\n",
|
| 106 |
+
"\n",
|
| 107 |
+
"# create model\n",
|
| 108 |
+
"model = Sequential()\n",
|
| 109 |
+
"\n",
|
| 110 |
+
"model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape))\n",
|
| 111 |
+
"model.add(Conv2D(64, (3, 3), activation='relu'))\n",
|
| 112 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 113 |
+
"model.add(Dropout(0.25))\n",
|
| 114 |
+
"model.add(Flatten())\n",
|
| 115 |
+
"model.add(Dense(128, activation='relu'))\n",
|
| 116 |
+
"model.add(Dropout(0.5))\n",
|
| 117 |
+
"model.add(Dense(num_classes, activation='softmax'))\n",
|
| 118 |
+
"\n",
|
| 119 |
+
"model.compile(loss = 'categorical_crossentropy',\n",
|
| 120 |
+
" optimizer = SGD(0.01),\n",
|
| 121 |
+
" metrics = ['accuracy'])\n",
|
| 122 |
+
"\n",
|
| 123 |
+
"# We can use the summary function to display our model layers and parameters\n",
|
| 124 |
+
"print(model.summary())\n",
|
| 125 |
+
"\n",
|
| 126 |
+
"history = model.fit(x_train, y_train,\n",
|
| 127 |
+
" batch_size=batch_size,\n",
|
| 128 |
+
" epochs=epochs,\n",
|
| 129 |
+
" verbose=1,\n",
|
| 130 |
+
" validation_data=(x_test, y_test))\n",
|
| 131 |
+
"\n",
|
| 132 |
+
"score = model.evaluate(x_test, y_test, verbose=0)\n",
|
| 133 |
+
"print('Test loss:', score[0])\n",
|
| 134 |
+
"print('Test accuracy:', score[1])"
|
| 135 |
+
]
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"cell_type": "markdown",
|
| 139 |
+
"metadata": {},
|
| 140 |
+
"source": [
|
| 141 |
+
"#### Let's save our history file"
|
| 142 |
+
]
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"cell_type": "code",
|
| 146 |
+
"execution_count": 2,
|
| 147 |
+
"metadata": {},
|
| 148 |
+
"outputs": [],
|
| 149 |
+
"source": [
|
| 150 |
+
"import pickle \n",
|
| 151 |
+
"\n",
|
| 152 |
+
"pickle_out = open(\"MNIST_history.pickle\",\"wb\")\n",
|
| 153 |
+
"pickle.dump(history.history, pickle_out)\n",
|
| 154 |
+
"pickle_out.close()"
|
| 155 |
+
]
|
| 156 |
+
},
|
| 157 |
+
{
|
| 158 |
+
"cell_type": "markdown",
|
| 159 |
+
"metadata": {},
|
| 160 |
+
"source": [
|
| 161 |
+
"#### Loading out saved history is as simple as these two lines"
|
| 162 |
+
]
|
| 163 |
+
},
|
| 164 |
+
{
|
| 165 |
+
"cell_type": "code",
|
| 166 |
+
"execution_count": 3,
|
| 167 |
+
"metadata": {},
|
| 168 |
+
"outputs": [
|
| 169 |
+
{
|
| 170 |
+
"name": "stdout",
|
| 171 |
+
"output_type": "stream",
|
| 172 |
+
"text": [
|
| 173 |
+
"{'loss': [0.7766780077775319], 'accuracy': [0.75701666], 'val_loss': [0.255510806620121], 'val_accuracy': [0.9265]}\n"
|
| 174 |
+
]
|
| 175 |
+
}
|
| 176 |
+
],
|
| 177 |
+
"source": [
|
| 178 |
+
"pickle_in = open(\"MNIST_history.pickle\",\"rb\")\n",
|
| 179 |
+
"saved_history = pickle.load(pickle_in)\n",
|
| 180 |
+
"print(saved_history)"
|
| 181 |
+
]
|
| 182 |
+
},
|
| 183 |
+
{
|
| 184 |
+
"cell_type": "code",
|
| 185 |
+
"execution_count": 5,
|
| 186 |
+
"metadata": {},
|
| 187 |
+
"outputs": [
|
| 188 |
+
{
|
| 189 |
+
"data": {
|
| 190 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEJCAYAAACZjSCSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAe3ElEQVR4nO3dfXhU5bnv8e9tCMYSBIQKamxDC1ohhhBDEEXlRVC0YlW2EPEF+4LaWq0c3UUvq5Ztz+ZYVNRiW3RDa1XYtG6VFgQqJUdRq4AiClwYpCgRjgKtQBBfovf5Y4Y4hAkJmayZJM/vc11zOWutZ9bcd4j5zVpr5hlzd0REJFyHZLoAERHJLAWBiEjgFAQiIoFTEIiIBE5BICISOAWBiEjgIg0CMzvbzNaZ2Xozm5hk+9fMbImZvWZmq8zsnCjrERGR/VlUnyMwsyzgLWAYUAksA8rcfU3CmOnAa+7+azPrBcx39/xIChIRkaTaRLjvUmC9u28AMLPZwPnAmoQxDhwev98B2FzfTrt06eL5+flNW2ka7N69m3bt2mW6jLQKrefQ+gX13JKsWLFim7t/Ndm2KIPgGGBTwnIl0L/WmDuARWb2Y6AdcGayHZnZeGA8QNeuXZkyZUqTFxu1qqoqcnNzM11GWoXWc2j9gnpuSQYPHvxOXduiDAJLsq72eagy4HfufreZDQD+YGYF7v7FPg9ynw5MBygpKfFBgwZFUW+kysvLaYl1pyK0nkPrF9RzaxHlxeJK4NiE5Tz2P/XzPWAOgLu/BOQAXSKsSUREaokyCJYBPc2su5m1BcYAc2uNeRcYCmBmJxALgq0R1iQiIrVEdmrI3avN7FpgIZAFzHD31WY2CVju7nOB/wU8ZGY3EDttNM41HapIvT777DMqKyv5+OOPM1pHhw4dWLt2bUZrSLfm3nNOTg55eXlkZ2c3+DFRXiPA3ecD82utuy3h/hrg1ChrEInc0nuhzyVpfcrKykrat29Pfn4+Zskux6XHrl27aN++fcaePxOac8/uzvbt26msrKR79+4Nfpw+WSySqqoP4IX70vqUH3/8MZ07d85oCEjzY2Z07tz5oI8UFQQiqTr1enj9cdp+8q+0Pq1CQJJpzO+FgkAkVe27QZ8yjt30P5mupF73/vWtTJcgzZCCQKQpHHcWx1bOhTs67Htb8p+Zrmwf9y2uaJL9DBo0iIULF+6zburUqfzwhz884OP2fhBr8+bNjBo1qs59L1++/ID7mTp1Kh999FHN8jnnnMOHH37YkNKTeumll+jevTtFRUUUFRWRm5vL8ccfT1FREZdffvlB7euLL75g8uTJdW7Py8tLqdYoKAhEUvXpbpj/76z91vVwx459b4NvznR1kSgrK2P27Nn7rJs9ezZlZWUNevzRRx/Nn/70p0Y/f+0gmD9/Ph07dmz0/hYsWMCUKVNYuXIlK1eupKSkhMcee4yVK1fyyCOPHNS+6guC5khBIJKqeTdCXgnvdxuS6UrSZtSoUfzlL3/hk08+AWDjxo1s3ryZgQMHUlVVxdChQykuLubEE0/k6aef3u/xGzdupKCgAIA9e/YwZswYCgsLGT16NHv27KkZd80111BSUkLv3r25/fbbAbj//vvZvHkzgwcPZvDgwQDk5+ezbds2AO655x4KCgooKChg6tSpNc93wgkn8IMf/IDevXszfPjwfZ5n8eLFnHlm0hluAKiurmbChAmUlpYyYMAAHn74YQDee+89Bg4cSFFREQUFBbz44otMnDiRXbt2HdTRxLZt2xg5ciSFhYWccsopvPnmmwD87W9/o0+fPhQVFVFcXMzu3buTPmeqIn37qEir99pjsPlV+MHf4MVlGSkhf+K8SMZvnHxunds6d+5MaWkpzz77LGPGjGH27NmMHj0aMyMnJ4cnn3ySww8/nG3btnHyySczcuTIOi9i/vrXv+YrX/kKq1atYtWqVRQXF9ds+8UvfsERRxzB559/ztChQ1m1ahXXXXcd99xzD0uWLKFLl30nIlixYgUzZ87k5Zdfxt3p378/Z5xxBp06daKiooJZs2bx0EMPcfHFF/PEE09w6aWXsm3bNrKzs+nQoUOd/U6fPp0jjzySV155hW3btjFs2DCGDx/OrFmzOO+88/jpT3/K559/zp49eygtLeXhhx9m5cqVDfo5A/zsZz+jf//+zJ07l0WLFjFu3DiWL1/OL3/5S6ZPn07//v2pqqoiJyeHRx99dL/nTJWOCERSsXsr/NvvoG3Lm40yVWVlZTWndxJPC7k7t9xyC4WFhZx55pm89957vP/++3Xu57nnnuPSSy8FoLCwkMLCwpptc+bMobi4mL59+7J69WrWrFlT124AWLp0KRdccAHt2rUjNzeXCy+8kOeffx6g5hoAwEknncTGjRsBWLRoEcOHDz/gfhctWsTMmTMpKipiyJAhfPjhh1RUVNCvXz8efvhhfv7zn/Pmm282ejK6pUuXctlllwEwfPhwNm/ezO7duzn11FP5yU9+wgMPPMDOnTvJyspqsudMpCMCkVQM/EmmKzjgK/fa8ifOO6jxB/Kd73yHG264gVdffZU9e/bUvJJ/7LHH2Lp1KytWrCA7O5v8/Px639ee7GjhH//4B1OmTGHZsmV06tSJcePG1bufA01McOihh9bcz8rKqnkl/cwzzzBhwoR69/vggw8ydOjQ/T5QVl5ezrx58xg7diw333wzo0ePPuC+GlL33uVbb72VkSNHMm/ePPr160d5eTlDhgzZ7znHjh170M+ZSEcEItIoubm5nHbaaXz3u9/d5yLxjh07OPLII8nOzmbJkiW8806dsx8DcPrpp/PYY48B8Oabb7Jq1SoAdu7cSbt27ejQoQPvv/8+zzzzTM1j2rdvz65du5Lu66mnnuKjjz5i9+7dPPnkk5x22ml1Pre7s2rVqpojhbqcddZZPPjgg1RXVwOwbt069uzZwzvvvEO3bt0YP34848aN47XXXqNNm9jr671jGyLxZ/Dss8+Sl5dHu3btePvttyksLOTmm2+mb9++rFu3LulzpkpHBCLSaKNGjWLs2LH7vINo7NixnHfeeZSUlFBUVMS3vvWtA+7jmmuu4corr6SwsJCioiJKS0sB6NOnD3379qV379584xvf4NRTv5yNZvz48YwYMYKjjjqKJUuW1KwvLi5m3LhxNfv4/ve/T9++fWtOA9W2YsUK+vbtW++HsK666ireffddioqK+OKLL+jWrRtPP/00ixcv5p577iE7O5vc3FweffRRAL73ve9RWFhISUlJ0ncd9e7du+Y5L7nkEiZNmlTzM8jNzWXmzJkATJkyheeff55DDjmEwsJChg8fzqOPPpr0OVMR2VdVRqWkpMTre49xc9Qa5zCvT2g9p7PftWvXcsIJJxz045ry1BA073l3GuLOO++kR48ejBkzpsGPaQk9J/v9MLMV7l6SbLyOCEQCcv3QnpkuoVm59dZbM11Cs6BrBCIBuWHYcZkuQZohBYGISOAUBCIigVMQiIgETkEgIhI4BYFICJbeC7vqnubhYG3fvp2ioiJOPfVUunXrxjHHHFMzhfOnn37aoH1ceeWVrFu37oBjpk2bVvNBq1QNHDjwoOb/CYnePioSgr1fp3n2/26S3XXu3JmVK1eya9cu7r77bnJzc7nxxhv3GePuuDuHHJL89ebeD00dyI9+9KMmqVcOTEcEIiGIf51mUx4VJLN+/XoKCgq4+uqrKS4uZsuWLYwfP75mKulJkybVjN37Cr26upqOHTsyceJE+vTpw4ABA/jggw+A2Pv8904lPXDgQCZOnEhpaSnHH398zfTLu3fv5qKLLqJPnz6UlZVRUlLS4Ff+e/bs4YorruDEE0+kuLiY5557DoA33niDfv36UVRURGFhIRs2bGDXrl2MGDGCU045hYKCgpS+T6G50RGBSEt3R93TJ+/n7oP4HMEdOw6+FmDNmjXMnDmT3/zmNwBMnjyZI444gurqagYPHsyoUaPo1avXPo/ZsWMHZ5xxBpMnT2bChAnMmDGDiRMn7rdvd+eVV15h7ty5TJo0iQULFvDAAw/QrVs3nnjiCV5//fV9prGuz/3330/btm154403WL16Neeccw4VFRU8+OCD3HjjjYwePZpPPvkEd+fpp58mPz+fOXPm0L59e3bsaNzPpznSEYGINKlvfvOb9OvXr2Z51qxZFBcXU1xczNq1a5NOJX3YYYcxYsQIYN8pomu78MIL9xuzdOnSmiki+vTpQ+/evRtca+L0z7179+boo49m/fr1nHLKKdx5553cddddbNq0iZycHAoLC1mwYAG33347L7zwwgG/v6Cl0RGBSEvXkFfun+6G6YNj02YXXRJpOe3affndDBUVFdx333288sordOzYkUsvvTTpVNJt27atuZ+VlVXnzJ17p5JOHJPKfGl1Pfayyy5jwIABzJs3j2HDhvH73/+e008/neXLl/PEE09w00038e1vf5tbbrml0c/dnOiIQCQE8a/TjDoEatu5cyft27fn8MMPZ8uWLft94X1TGDhwIHPmzAFi5/br+/KaRInTP69du5YtW7bQo0cPNmzYQI8ePbj++us599xzWbVqFe+99x65ubmUlZUxYcIEXn311SbvJVN0RCDS2iV+nWaaFRcX06tXLwoKCvabSrqp/PjHP+byyy+nsLCQ4uJiCgoK6jxtc9ZZZ5GdnQ3AaaedxowZM7jqqqs48cQTyc7O5pFHHqFt27Y8/vjjzJo1i+zsbI4++mjuvPPOmu8jBsjJyam5BtIaaBrqNAltSmYIr+dmOw310qlw3Flw5MFPW12f5jAlc3V1NdXV1eTk5FBRUcHw4cOpqKio+YKYptYceq6PpqEWkX01g6/TjFJVVRVDhw6luroad+e3v/1tZCHQWumnJSItWseOHVmxYkWmy2jRIr1YbGZnm9k6M1tvZvu9KdjM7jWzlfHbW2b2YZT1iLQmLe20rqRHY34vIjsiMLMsYBowDKgElpnZXHevuaTv7jckjP8x0DeqekRak5ycHLZv307nzp3r/b5dCYe7s337dnJycg7qcVGeGioF1rv7BgAzmw2cD9T13q4y4PYI6xFpNfLy8qisrGTr1q0ZrePjjz8+6D86LV1z7zknJ4e8vLyDekyUQXAMsClhuRLon2ygmX0d6A4kfX+bmY0HxgN07dqV8vLyJi00Haqqqlpk3akIrefQ+oVYz7m5uZkuI61aQs/vvPPOQY2PMgiSHa/WdfJqDPAnd/882UZ3nw5Mh9jbR1viWxJDeyslhNdzaP2Cem4torxYXAkcm7CcB2yuY+wYYFaEtYiISB2iDIJlQE8z625mbYn9sZ9be5CZHQ90Al6KsBYREalDZEHg7tXAtcBCYC0wx91Xm9kkMxuZMLQMmO16L5yISEZE+oEyd58PzK+17rZay3dEWYOIiByYZh8VEQmcgkBEJHAKAhGRwCkIREQCpyAQEQmcgkBEJHAKAhGRwCkIREQCpyAQEQmcgkBEJHAKAhGRwCkIREQCpyAQEQmcgkBEJHAKAhGRwCkIREQCpyAQEQmcgkBEJHAKAhGRwCkIREQCpyAQEQmcgkBEJHAKAhGRwCkIREQCpyAQEQmcgkBEJHAKAhGRwCkIREQCpyAQEQlcpEFgZmeb2TozW29mE+sYc7GZrTGz1Wb2eJT1iIjI/tpEtWMzywKmAcOASmCZmc119zUJY3oCNwOnuvu/zOzIqOoREZHkojwiKAXWu/sGd/8UmA2cX2vMD4Bp7v4vAHf/IMJ6REQkiciOCIBjgE0Jy5VA/1pjjgMwsxeALOAOd19Qe0dmNh4YD9C1a1fKy8ujqDdSVVVVLbLuVITWc2j9gnpuLaIMAkuyzpM8f09gEJAHPG9mBe7+4T4Pcp8OTAcoKSnxQYMGNXmxUSsvL6cl1p2K0HoOrV9Qz61FlKeGKoFjE5bzgM1Jxjzt7p+5+z+AdcSCQURE0iTKIFgG9DSz7mbWFhgDzK015ilgMICZdSF2qmhDhDWJiEgtkQWBu1cD1wILgbXAHHdfbWaTzGxkfNhCYLuZrQGWADe5+/aoahIRkf1FeY0Ad58PzK+17raE+w5MiN9ERCQD9MliEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERALXoCAws2+a2aHx+4PM7Doz6xhtaSIikg4NPSJ4AvjczHoA/wV0Bx6PrCoREUmbhgbBF+5eDVwATHX3G4CjoitLRETSpaFB8JmZlQFXAH+Jr8uOpiQREUmnhgbBlcAA4Bfu/g8z6w48Gl1ZIiKSLm0aMsjd1wDXAZhZJ6C9u0+OsjAREUmPhr5rqNzMDjezI4DXgZlmdk+0pYmISDo09NRQB3ffCVwIzHT3k4AzoytLRETSpaFB0MbMjgIu5suLxSIi0go0NAgmAQuBt919mZl9A6iIriwREUmXhl4s/iPwx4TlDcBFURUlIiLp09CLxXlm9qSZfWBm75vZE2aW14DHnW1m68xsvZlNTLJ9nJltNbOV8dv3G9OEiIg0XkNPDc0E5gJHA8cAf46vq5OZZQHTgBFAL6DMzHolGfrf7l4Uvz3c4MpFRKRJNDQIvuruM929On77HfDVeh5TCqx39w3u/ikwGzg/hVpFRCQCDbpGAGwzs0uBWfHlMmB7PY85BtiUsFwJ9E8y7iIzOx14C7jB3TfVHmBm44HxAF27dqW8vLyBZTcfVVVVLbLuVITWc2j9gnpuLRoaBN8FfgXcCzjwIrFpJw7EkqzzWst/Bma5+ydmdjXwe2DIfg9ynw5MBygpKfFBgwY1sOzmo7y8nJZYdypC6zm0fkE9txYNOjXk7u+6+0h3/6q7H+nu3yH24bIDqQSOTVjOAzbX2u92d/8kvvgQcFID6xYRkSaSyjeUTahn+zKgp5l1N7O2wBhiF5xrxD+kttdIYG0K9YiISCM09NRQMslO/dRw92ozu5bYB9GygBnuvtrMJgHL3X0ucJ2ZjQSqgX8C41KoR0REGiGVIKh9vn//Ae7zgfm11t2WcP9m4OYUahARkRQdMAjMbBfJ/+AbcFgkFYmISFodMAjcvX26ChERkcxI5WKxiIi0AgoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAIXaRCY2dlmts7M1pvZxAOMG2VmbmYlUdYjIiL7iywIzCwLmAaMAHoBZWbWK8m49sB1wMtR1SIiInWL8oigFFjv7hvc/VNgNnB+knH/AdwFfBxhLSIiUoc2Ee77GGBTwnIl0D9xgJn1BY5197+Y2Y117cjMxgPjAbp27Up5eXnTVxuxqqqqFll3KkLrObR+QT23FlEGgSVZ5zUbzQ4B7gXG1bcjd58OTAcoKSnxQYMGNU2FaVReXk5LrDsVofUcWr+gnluLKE8NVQLHJiznAZsTltsDBUC5mW0ETgbm6oKxiEh6RRkEy4CeZtbdzNoCY4C5eze6+w537+Lu+e6eD/wdGOnuyyOsSUREaoksCNy9GrgWWAisBea4+2ozm2RmI6N6XhEROThRXiPA3ecD82utu62OsYOirEVERJLTJ4tFRAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAKnIBARCZyCQEQkcAoCEZHAKQhERAIXaRCY2dlmts7M1pvZxCTbrzazN8xspZktNbNeUdYjIiL7iywIzCwLmAaMAHoBZUn+0D/u7ie6exFwF3BPVPWIiEhyUR4RlALr3X2Du38KzAbOTxzg7jsTFtsBHmE9IiKSRJsI930MsClhuRLoX3uQmf0ImAC0BYYk25GZjQfGA3Tt2pXy8vKmrjVyVVVVLbLuVITWc2j9gnpuLaIMAkuybr9X/O4+DZhmZpcAtwJXJBkzHZgOUFJS4oMGDWraStOgvLycllh3KkLrObR+QT23FlGeGqoEjk1YzgM2H2D8bOA7EdYjIiJJRBkEy4CeZtbdzNoCY4C5iQPMrGfC4rlARYT1iIhIEpGdGnL3ajO7FlgIZAEz3H21mU0Clrv7XOBaMzsT+Az4F0lOC4mISLSivEaAu88H5tdad1vC/eujfH4REamfPlksIhI4BYGISOAUBCIigVMQiIgETkEgIhI4BYGISOAUBCJN5MmKTzNdgkijKAhEmsjTb3+W6RJEGkVBICISOAWBiEjgFAQiIoGLdK4hkdbq3r++xX2L958sN3/ivH2Wrx/akxuGHZeuskQaRUEg0gg3DDtuvz/w+RPnsXHyuRmqSKTxdGpIRCRwCgIRkcApCEREAqcgEGki538zO9MliDSKgkCkiVzQs22mSxBpFAWBiEjgFAQiIoFTEIiIBM7cPdM1HBQz2wq8k+k6GqELsC3TRaRZaD2H1i+o55bk6+7+1WQbWlwQtFRmttzdSzJdRzqF1nNo/YJ6bi10akhEJHAKAhGRwCkI0md6pgvIgNB6Dq1fUM+tgq4RiIgETkcEIiKBUxCIiAROQZAiMzvbzNaZ2Xozm5hk+9fNbLGZrTKzcjPLS9j2NTNbZGZrzWyNmeWns/bGSrHnu8xsdbzn+83M0lt945jZDDP7wMzerGO7xftZH++7OGHbFWZWEb9dkb6qG6+x/ZpZkZm9FP83XmVmo9NbeeOl8m8c3364mb1nZr9KT8VNyN11a+QNyALeBr4BtAVeB3rVGvNH4Ir4/SHAHxK2lQPD4vdzga9kuqcoewZOAV6I7yMLeAkYlOmeGtj36UAx8GYd288BngEMOBl4Ob7+CGBD/L+d4vc7ZbqfCPs9DugZv380sAXomOl+ouw5Yft9wOPArzLdy8HedESQmlJgvbtvcPdPgdnA+bXG9AIWx+8v2bvdzHoBbdz9rwDuXuXuH6Wn7JQ0umfAgRxiAXIokA28H3nFTcDdnwP+eYAh5wOPeMzfgY5mdhRwFvBXd/+nu/8L+CtwdvQVp6ax/br7W+5eEd/HZuADIOmnWZubFP6NMbOTgK7AougrbXoKgtQcA2xKWK6Mr0v0OnBR/P4FQHsz60zsldOHZvY/Zvaamf3SzLIirzh1je7Z3V8iFgxb4reF7r424nrTpa6fS0N+Xi1RvX2ZWSmx0H87jXVFKWnPZnYIcDdwU0aqagIKgtQkO79d+/24NwJnmNlrwBnAe0A10AY4Lb69H7FTLeMiq7TpNLpnM+sBnADkEfufaoiZnR5lsWlU18+lIT+vluiAfcVfKf8BuNLdv0hbVdGqq+cfAvPdfVOS7S1Cm0wX0MJVAscmLOcBmxMHxA+PLwQws1zgInffYWaVwGvuviG+7Sli5x3/Kx2FpyCVnscDf3f3qvi2Z4j1/Fw6Co9YXT+XSmBQrfXlaasqOnX+HpjZ4cA84Nb4KZTWoq6eBwCnmdkPiV3ra2tmVe6+3xspmisdEaRmGdDTzLqbWVtgDDA3cYCZdYkfOgLcDMxIeGwnM9t7/nQIsCYNNacqlZ7fJXak0MbMsokdLbSWU0Nzgcvj7yw5Gdjh7luAhcBwM+tkZp2A4fF1LV3SfuO/E08SO5f+x8yW2OSS9uzuY939a+6eT+xo+JGWFAKgI4KUuHu1mV1L7H/sLGCGu682s0nAcnefS+zV4H+amRN75fuj+GM/N7MbgcXxt1CuAB7KRB8HI5WegT8RC7w3iB1SL3D3P6e7h8Yws1nE+uoSP5q7ndjFbtz9N8B8Yu8qWQ98BFwZ3/ZPM/sPYgEKMMndD3RBsllobL/AxcTefdPZzMbF141z95VpK76RUui5xdMUEyIigdOpIRGRwCkIREQCpyAQEQmcgkBEJHAKAhGRwCkIROLM7HMzW5lwa7L3gptZfl2zWopkmj5HIPKlPe5elOkiRNJNRwQi9TCzjWb2f8zslfitR3x94vcuLDazr8XXdzWzJ83s9fjtlPiusszsofhc/YvM7LD4+Oss9n0Uq8xsdobalIApCES+dFitU0OJX6qy091LgV8BU+PrfkVsOoFC4DHg/vj6+4H/6+59iM1vvzq+vicwzd17Ax/y5QytE4G+8f1cHVVzInXRJ4tF4uITheUmWb8RGOLuG+JzJP0/d+9sZtuAo9z9s/j6Le7excy2Annu/knCPvKJfS9Bz/jyT4Fsd7/TzBYAVcBTwFN7J+UTSRcdEYg0jNdxv64xyXyScP9zvrxGdy4wDTgJWGFmunYnaaUgEGmY0Qn/fSl+/0Vis68CjAWWxu8vBq4BMLOs+LTMScVnaT3W3ZcA/w50JDaVsUja6JWHyJcOM7PEWTIXJEwnfKiZvUzsxVNZfN11wAwzuwnYypezUV4PTDez7xF75X8NsW9kSyYLeNTMOhD74pN73f3DJutIpAF0jUCkHvFrBCXuvi3TtYhEQaeGREQCpyMCEZHA6YhARCRwCgIRkcApCEREAqcgEBEJnIJARCRw/x/oJ0Z2X27fKgAAAABJRU5ErkJggg==\n",
|
| 191 |
+
"text/plain": [
|
| 192 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 193 |
+
]
|
| 194 |
+
},
|
| 195 |
+
"metadata": {
|
| 196 |
+
"needs_background": "light"
|
| 197 |
+
},
|
| 198 |
+
"output_type": "display_data"
|
| 199 |
+
}
|
| 200 |
+
],
|
| 201 |
+
"source": [
|
| 202 |
+
"# Plotting our loss charts\n",
|
| 203 |
+
"import matplotlib.pyplot as plt\n",
|
| 204 |
+
"\n",
|
| 205 |
+
"history_dict = history.history\n",
|
| 206 |
+
"\n",
|
| 207 |
+
"loss_values = history_dict['loss']\n",
|
| 208 |
+
"val_loss_values = history_dict['val_loss']\n",
|
| 209 |
+
"epochs = range(1, len(loss_values) + 1)\n",
|
| 210 |
+
"\n",
|
| 211 |
+
"line1 = plt.plot(epochs, val_loss_values, label='Validation/Test Loss')\n",
|
| 212 |
+
"line2 = plt.plot(epochs, loss_values, label='Training Loss')\n",
|
| 213 |
+
"plt.setp(line1, linewidth=2.0, marker = '+', markersize=10.0)\n",
|
| 214 |
+
"plt.setp(line2, linewidth=2.0, marker = '4', markersize=10.0)\n",
|
| 215 |
+
"plt.xlabel('Epochs') \n",
|
| 216 |
+
"plt.ylabel('Loss')\n",
|
| 217 |
+
"plt.grid(True)\n",
|
| 218 |
+
"plt.legend()\n",
|
| 219 |
+
"plt.show()"
|
| 220 |
+
]
|
| 221 |
+
},
|
| 222 |
+
{
|
| 223 |
+
"cell_type": "code",
|
| 224 |
+
"execution_count": 7,
|
| 225 |
+
"metadata": {},
|
| 226 |
+
"outputs": [
|
| 227 |
+
{
|
| 228 |
+
"data": {
|
| 229 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEGCAYAAABy53LJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXxU9b3/8deHsFkTFkHBCwrYUitSCBBBflAN0rLYK6igglrFqrhWr9YF13pR79XeuhZB0apFJZHaC6UFhIrErWiRCihwkUXQACKyBwEJfH5/zMk4DJNkDslkEvJ+Ph7zyDnf8z3f+XxCyCdnme8xd0dERCRZddIdgIiI1CwqHCIiEooKh4iIhKLCISIioahwiIhIKHXTHUBVaN68ubdt2zbdYYSyc+dOjjzyyHSHUaWUc+2gnGuO+fPnf+3uR8e314rC0bZtWz788MN0hxFKQUEBubm56Q6jSinn2kE51xxmtiZRu05ViYhIKCocIiISigqHiIiEUiuucYhUV5OXf0vJqe+9e/dSWFjI7t270xpTqjVu3JilS5emO4wqVd1zbtiwIa1bt6ZevXpJ9VfhEEmjv6zcyxPBcmFhIVlZWbRt2xYzS2tcqbRjxw6ysrLSHUaVqs45uzubNm2isLCQdu3aJbWPTlWJVBO7d++mWbNmh3XRkOrHzGjWrFmoI10VDpFqREVD0iHsz11KC4eZDTCzZWa2wsxGJdjexsxmm9kiMysws9ZBe7aZzTWzxcG2C2L2edHMPjOzBcErO5U5iFR3j/3903SHILVMygqHmWUATwEDgQ7AcDPrENftd8AEd+8EjAb+O2j/BrjE3U8GBgCPm1mTmP1udffs4LUgVTmIVKbH/v4pbUdNO+AFRJcLt+xiUeFWNmwPd3H8idnLKyW+3NxcZs6ceUDb448/zrXXXlvmfpmZmQCsW7eOoUOHljp2eR/Cffzxx/nmm2+i62eeeSZbt25NJvSE5s6dS7t27cjOziY7O5vMzExOPPFEsrOzueSSS0KNtX//fh566KEy+8ybNw8zY/bs2Yccc43h7il5AT2BmTHrdwB3xPVZDLQOlg3YXspYC4H2wfKLwNAwsXTr1s1rmjlz5qQ7hCpXG3Nuc/vfostLliyp8BgV8fTTT/uIESMOaOvRo4e//fbbZe535JFHljv26aef7vPmzXN39+3btyfs06ZNG9+4cWOS0Zbv3nvv9ddeey1hDGHt3bvXGzduXGafm266yXv37u2XX375QdtKy/lQY0mFRD9/wIee4HdqKu+qagV8EbNeCPSI67MQGAI8AZwDZJlZM3ffVNLBzLoD9YGVMfs9aGb3ArOBUe6+J/7NzWwkMBKgRYsWFBQUVDihqlRUVFTjYq6o2pgzEM25cePG7Nix45DGONT9YvXv35+77rqLr7/+mgYNGrBmzRrWrl1L586dWb9+PcOHD2fr1q3s3buXe+65h5///OcHvP+aNWs4//zz+eCDD9i1axfXXHMNy5Yt48QTT6SoqIidO3eyY8cObrzxRhYsWMCuXbsYPHgwd911F+PGjWPdunWcfvrpNGvWjGnTptGxY0feeustmjVrxpgxY3jppZcAuOSSS7juuutYs2YNQ4YMoWfPnnzwwQcce+yx5Ofnc8QRRwAwa9Ysrrzyyuj3Zt++fdEYAIqLi7nnnnuYO3cue/bs4eqrr+bSSy9l3bp1jBgxgp07d1JcXMyTTz7J1KlT2bFjB506deLkk09m/PjxB3zv9u/fz2uvvcbf/vY3BgwYwKZNm6hfvz4AL7/8MmPGjKFOnTp07tyZcePGsWHDBm688UbWrFmDmfHEE09w1FFHcckll/Dee+8B8Oijj1JcXMxtt91Gv3796N27N//4xz8466yzaNu2Lb/73e/Yu3cvzZo147nnnuPoo49mx44d3HLLLSxcuBAz46677uKrr75i1apVPPDAAwA899xzrFmzhvvvv/+AHHbv3p30/79UFo5EV1vin1N7CzDGzEYAbwNrgeLoAGbHAi8Bl7r7/qD5DuBLIsVkPHA7kdNcB76R+/hgOzk5OV7T5ompqXPbVERtzJnXp0VzXrp0afSWzZLTWMn68YNvJ9Vv9UM/L3VbVlYWPXr04L333mPw4MH87W9/Y9iwYTRq1Ijvfe97TJ06lUaNGvH1119z6qmncsEFF0QvqmZlZZGZmUmdOnXIysri2WefpXHjxnzyyScsWrSIrl27cuSRR5KVlcVvfvMb2rRpw759++jbty+fffYZt912G2PHjuWtt96iefPmQOSCbWZmJp9++ikTJ05k3rx5uDs9evSgf//+NG3alJUrV/Lqq6+SnZ3N+eefz6xZs7j44ov5+uuvo59NKJGRkRGNAWDs2LG0bt2a+fPns2fPHk499VQGDRrElClTOPvss7n99tvZt28fu3btIjc3lwkTJrBo0aKE37uCggJOOukkOnXqRO/evXnvvfcYNGgQCxcu5IknnmDmzJm0adOGzZs3k5WVxYgRIxg4cCDXX389xcXFfPPNN3z11VfR7x9AgwYNyMjIICsri4yMDHbv3h0tKlu2bIl+/59++mmeeeYZHn74Ye677z7+7d/+jYkTJ+LubN26lbp165Kdnc0jjzxC3bp1ycvL48UXXzzo9uCGDRvSpUuXpH6OUlk4CoHjYtZbA+tiO7j7OuBcADPLBIa4+7ZgvREwDbjb3d+P2Wd9sLjHzF4gUnxEpBIMHz6c/Px8Bg8eTH5+Ps8//zwQOaV955138vbbb1OnTh3Wrl3Lhg0baNmyZcJx3n77bW644QYAOnXqRKdOnaLbJk+ezIQJEyguLmb9+vUsWbLkgO3x3n33Xc4555zo7LLnnnsu77zzDoMGDYpewwDo1q0bq1evBiJHG/369Ssz11mzZrF06VLy8/MB2LZtG8uXL+eUU07hqquuYvfu3Zx99tl07tyZ4uLiMsfKy8tj2LBhAAwbNoy8vDwGDRrEm2++yQUXXMBRRx0FEP1aUFAQfd+6devSqFEjvvrqqzLfo2R8gM8//5zzzz+fL7/8kj179vDDH/4QgDfeeIMpU6YAkcLbtGlTAE477TRmzJjBCSecQEZGBh06xF9uDieVhWMe0N7M2hE5khgGXBjbwcyaA5uDo4k7gOeD9vrAZCIXzv8Ut8+x7r7eIn/qnA18ksIcRFJq8PcTf1K3rCODeG1HTQvVvyxnn302N998M//617/YtWsXXbt2BeCVV15h48aNzJ8/n3r16tG2bdty7/tPdIvnZ599xpNPPsn8+fNp2rQpI0aMKHecyKn2xBo0aBBdzsjIYNeuXQDMmDGDm2++udxxx44dS9++fQ/aVlBQwLRp07jooou44447uOCCCxKMELF3714mT57M9OnT+c///E/279/P1q1b2blzJ+5e6q2u8e1169Zl//790fXdu3dTt+53v6Jjp2W/7rrruPPOOznzzDN54403ohfuS3u/K664gkcffZS2bdty2WWXlZpLslJ2V5W7FwPXAzOBpcAkd19sZqPNbFDQLRdYZmafAi2AB4P284HTgBEJbrt9xcw+Bj4GmgMPpCoHkVQ7p339dIdwgMzMTHJzc/nlL3/J8OHDo+3btm3jmGOOoV69esyZM4c1axLOth112mmn8corrwBET1cBbN++nSOPPJLGjRuzYcMGZsyYEd0nKysr4bWa0047jSlTpvDNN9+wc+dOJk+ezE9+8pNS39vdWbRoUfRIpDT9+/dn7Nix0aOJZcuWsWvXLtasWUPLli0ZOXIkI0aM4KOPPor+Ak905DFr1ixOOeUUvvjiC1avXs3nn3/OWWedxdSpU/npT39Kfn4+mzdvBoh+7dOnD08//TQQufayfft2WrZsybp169iyZQu7d+9m2rTST1du27aNVq1a4e788Y9/jLb369ePMWPGRL8PW7ZsAaBXr16sXLmSP/3pT2UWwWSldMoRd58OTI9ruzdm+TXgtQT7vQy8XMqYZ1RymCISY/jw4Zx77rnRUykAF110EWeddRY5OTlkZ2fzox/9qMwxrrnmGi677DI6depEdnY23bt3B6Bz587RC8wnnHACvXr1iu4zcuRIBg4cyLHHHsucOXOi7V27dmXEiBHRMa644gq6dOkSPS0Vb/78+XTp0qXcD7VdddVVfP7559ECc8wxx/CXv/yF2bNn8+ijj1KvXj0yMzN5+eXIr6LLL7+cTp06kZOTw4QJE6Lj5OXlcc455xww9pAhQ3jhhRf461//ym233cbAgQOpX78+3bp14w9/+ANjxozhyiuv5JlnnqFu3bo888wzdO/enTvvvJNTTjmFE044oczTSffddx/nnHMOrVu3pnv37qxfHzmD/5vf/IZrr72Wjh07kpGRwf3338+gQZG/04cOHcr//d//0bhx4zK/L0lJdKvV4fbS7bg1Q23POd2341aVyrw1NZH777/f8/LyUvoeYaU652T079/fCwoKSt1eXW7HFZEqcGPf9ukOoVq5++670x1CtbJp0yZ69uxJt27dOP300ytlTBUOkRrupp/9MN0hSDXWrFkzPv20cqel0SSHIiISigqHiIiEosIhIiKhqHCIiEgoKhwiNdW7j8GODZU23KZNm6JTkLds2ZJWrVpF17/99tukxrjssstYtmxZmX3Gjx8f/XBgZdiwYQN169blD3/4Q6WNKWXTXVUiNVXRV/DeEzDgvypluGbNmrFgQeTxNvfddx+ZmZnccsuBU8GV3Mdfp07ivzlfeOGFct9n5MiRlfr87VdffZWePXuSl5fH5ZdfXmnjxisuLj5gCpDaTEccIjVVrxth4cRKPepIZMWKFXTs2JGrr76arl27sn79ekaOHElOTg4nn3wyo0d/Nzl17969WbBgAcXFxTRp0oRRo0bRuXNnevbsGZ3Eb/To0Tz++OPR/qNGjaJ79+6ceOKJ/OMf/wBg586dDBkyhM6dOzN8+HBycnKiRS1eXl4ejz/+OKtWreLLL7+Mtk+bNo2uXbvSuXPn6ISHO3bs4NJLL+XHP/4xnTp1YsqUKdFYS+Tn53PFFVcAcPHFF/PrX/+aPn36cOedd/L+++/Ts2dPunTpQq9evVi+PPIQreLiYm666SY6duxIp06dGDt2LDNnzuS8886LjjtjxgzOP//8Cv97VAcqnyLV0X0hpoV4JMTnOO7bFj4WYMmSJbzwwgvR+ZUeeughjjrqKIqLi+nTpw9Dhw49aIqMbdu2cfrpp/PQQw9x88038/zzzzNq1EFPkMbd+ec//8nUqVMZPXo0r7/+Or///e9p2bIlf/7zn1m4cGF0ssV4q1evZsuWLXTr1o2hQ4cyadIkbrjhBr788kuuueYa3nnnneh05hA5kjr66KP5+OOPo9OOl2flypXMnj2bOnXqsG3bNt59910yMjJ4/fXXufvuu3n11VejzxNZuHAhGRkZbN68mSZNmnDDDTdEn83xwgsvVMoEg9WBjjhEpFzf//73OeWUU6LreXl5dO3ala5du7J06VKWLFly0D5HHHEEAwcOBA6c8jzeueeee1Cfd999NzqNeOfOnTn55JMT7puXlxedtK9kOnOIPDa2T58+tGnTBvhuOvM33niD6667Djhw2vGynHfeedFTc1u3buXcc8+lY8eO3HLLLSxevDg67tVXX01GRkb0/erUqcOFF17IxIkT2bx5M/Pnzy93qveaQkccItVRMkcG3+6E8X2g939A9oXl96+A2Cm9ly9fzhNPPME///lPmjRpwsUXX5xwavSSJ+BBZMrz0p5pUTI1emwfL2Mq9Vh5eXls2rQpOkPsunXr+Oyzz0qdXjxRe506dQ54v/hcYnO/66676N+/P9deey0rVqxgwIABpY4L8Mtf/pIhQ4awe/duLrjggmhhqel0xCFSU027BVrnpLxoxNu+fTtZWVk0atSI9evXM3PmzEp/j969ezNp0iQAPv7444RHNEuWLGHfvn2sXbuW1atXs3r1am699Vby8/Pp1asXb775ZnT695JTVYmmHa9Tpw5NmzZl+fLl7N+/n8mTJ5caV8l05gAvvvhitL1fv36MGzeOffv2HfB+xx13HM2bN+exxx5jxIgRFfumVCMqHCI10UevwLp/wZn/U+Vv3bVrVzp06EDHjh258sorD5gavbL86le/Yu3atXTq1IlHHnmEjh07HjQd+MSJExNOZz5x4kRatGjBuHHjGDx4MJ07d+aiiy4CItOOb9iwgY4dO5Kdnc0777wDwMMPP8yAAQPo27fvAY+bjXf77bdz6623HpTzVVddRcuWLenUqROdO3eOFj2ACy+8kDZt2kSf0ndYSDRl7uH20rTqNUNtzznUtOrvPOa+4dCmYU+3ZKYY37t3r+/atcvd3T/99FNv27at7927N9WhpcRVV13l48aNS3cY5dK06iKHu97/ke4IUqqoqIi+fftSXFyMu0cfeFTTZGdn07RpUx588MHyO9cgNe9fQkQOe02aNGH+/PnpDqPCSj57kuiRuDWZrnGIVCOe5N1EIpUp7M+dCodINdGwYUM2bdqk4iFVyt3ZtGkTDRs2THqflJ6qMrMBwBNABvCcuz8Ut70N8DxwNLAZuNjdC4NtlwIlz4B8wN3/GLR3A14EjgCmAze6/qfJYaB169YUFhaycePGdIeSUrt37w71S+pwUN1zbtiwYZl3k8VLWeEwswzgKeBnQCEwz8ymunvsDdm/Aya4+x/N7Azgv4FfmNlRwG+AHMCB+cG+W4BxwEjgfSKFYwAwI1V5iFSVevXq0a5du3SHkXIFBQV06dIl3WFUqcMt51SequoOrHD3Ve7+LZAPDI7r0wGYHSzPidneH/i7u28OisXfgQFmdizQyN3nBkcZE4CzU5iDiIjESeWpqlbAFzHrhUCPuD4LgSFETmedA2SZWbNS9m0VvAoTtB/EzEYSOTKhRYsWFBQUHGoeaVFUVFTjYq4o5Vw7KOeaL5WF4+CJWyKnnWLdAowxsxHA28BaoLiMfZMZM9LoPh4YD5CTk+O5ublJBV1dFBQUUNNirijlXDso55ovlYWjEDguZr01sC62g7uvA84FMLNMYIi7bzOzQiA3bt+CYMzWce0HjCkiIqmVymsc84D2ZtbOzOoDw4CpsR3MrLmZlcRwB5E7rABmAv3MrKmZNQX6ATPdfT2ww8xOtchUlJcAf0lhDiIiEidlhcPdi4HriRSBpcAkd19sZqPNbFDQLRdYZmafAi2AB4N9NwP3Eyk+84DRQRvANcBzwApgJbqjSkSkSqX0cxzuPp3ILbOxbffGLL8GvFbKvs/z3RFIbPuHQMfKjVRERJKlT46LiEgoKhwiIhKKCoeIiISiwiEiIqGocIiISCgqHCIiEooKh4iIhKLCISIioahwiIhIKCocIiISigqHiIiEosIhIiKhqHCIiEgoKhwiIhKKCoeIiISiwiEiIqGocIiISCgqHCIiEooKh4iIhKLCISIioaS0cJjZADNbZmYrzGxUgu3Hm9kcM/vIzBaZ2ZlB+0VmtiDmtd/MsoNtBcGYJduOSWUOIiJyoLqpGtjMMoCngJ8BhcA8M5vq7ktiut0NTHL3cWbWAZgOtHX3V4BXgnF+DPzF3RfE7HeRu3+YqthFRKR0qTzi6A6scPdV7v4tkA8MjuvjQKNguTGwLsE4w4G8lEUpIiKhmLunZmCzocAAd78iWP8F0MPdr4/pcywwC2gKHAn81N3nx42zEhjs7p8E6wVAM2Af8GfgAU+QhJmNBEYCtGjRolt+fn6l55hKRUVFZGZmpjuMKqWcawflXHP06dNnvrvnxLen7FQVYAna4n/BDwdedPdHzKwn8JKZdXT3/QBm1gP4pqRoBC5y97VmlkWkcPwCmHDQG7mPB8YD5OTkeG5uboUTqkoFBQXUtJgrSjnXDsq55kvlqapC4LiY9dYcfCrqcmASgLvPBRoCzWO2DyPuNJW7rw2+7gAmEjklJiIiVSSVhWMe0N7M2plZfSJFYGpcn8+BvgBmdhKRwrExWK8DnEfk2ghBW10zax4s1wP+HfgEERGpMik7VeXuxWZ2PTATyACed/fFZjYa+NDdpwK/Bp41s5uInMYaEXO94jSg0N1XxQzbAJgZFI0M4A3g2VTlICIiB0vlNQ7cfTqRW2xj2+6NWV4C9Cpl3wLg1Li2nUC3Sg9URESSpk+Oi4hIKCocIiISigqHiIiEosIhIiKhqHCIiEgoKhwiIhKKCoeIiISiwiEiIqGocIiISCgqHCIiEooKh4iIhKLCISIioahwiIhIKOUWDjO73syaVkUwIiJS/SVzxNESmGdmk8xsgJkleiSsiIjUEuUWDne/G2gP/AEYASw3s/8ys++nODYREamGkrrGETyV78vgVQw0BV4zs9+mMDYREamGyn0CoJndAFwKfA08B9zq7nuDZ4IvB25LbYgiIlKdJPPo2ObAue6+JrbR3feb2b+nJiwREamukjlVNR3YXLJiZllm1gPA3ZemKjAREamekikc44CimPWdQVu5gruwlpnZCjMblWD78WY2x8w+MrNFZnZm0N7WzHaZ2YLg9XTMPt3M7ONgzCd1l5eISNVKpnBYcHEciJyiIrlrIxnAU8BAoAMw3Mw6xHW7G5jk7l2AYcDYmG0r3T07eF0d0z4OGEnkTq/2wIAkchARkUqSTOFYZWY3mFm94HUjsCqJ/boDK9x9lbt/C+QDg+P6ONAoWG4MrCtrQDM7Fmjk7nODYjYBODuJWEREpJIkc3H8auBJIkcHDswm8hd/eVoBX8SsFwI94vrcB8wys18BRwI/jdnWzsw+ArYDd7v7O8GYhXFjtkr05mY2siTOFi1aUFBQkETI1UdRUVGNi7milHPtoJxrvnILh7t/ReQ0UliJrj143Ppw4EV3f8TMegIvmVlHYD1wvLtvMrNuwBQzOznJMUviHg+MB8jJyfHc3NxDSCF9CgoKqGkxV5Ryrh2Uc82XzLWKhsDlwMlAw5J2d/9lObsWAsfFrLfm4FNRlxNco3D3ucF7NQ+K1Z6gfb6ZrQR+GIzZupwxRUQkhZK5xvESkfmq+gNvEfllvSOJ/eYB7c2snZnVJ3LUMjWuz+dAXwAzO4lIYdpoZkcHF9cxsxOIXARf5e7rgR1mdmpwN9UlwF+SiEVERCpJMoXjB+5+D7DT3f8I/Bz4cXk7uXsxcD0wE1hK5O6pxWY22swGBd1+DVxpZguBPGBEcNH7NGBR0P4acLW7l3yW5Boin2BfAawEZiSZq4iIVIJkLo7vDb5uDa4/fAm0TWZwd59O5AOEsW33xiwvAXol2O/PwJ9LGfNDoGMy7y8iIpUvmcIxPngex91ETjVlAvekNCoREam2yiwcwUSG2919C/A2cEKVRCUiItVWmdc4gk+JX19FsYiISA2QzMXxv5vZLWZ2nJkdVfJKeWQiIlItJXONo+TzGtfFtDk6bSUiUisl88nxdlURiIiI1AzJfHL8kkTt7j6h8sMREZHqLplTVafELDck8knvfxGZmVZERGqZZE5V/Sp23cwaE5mGREREaqFk7qqK9w2RuaNERKQWSuYax1/5buryOkSe5jcplUGJiEj1lcw1jt/FLBcDa9y9sLTOIiJyeEumcHwOrHf33QBmdoSZtXX31SmNTEREqqVkrnH8Cdgfs74vaBMRkVoomcJR192/LVkJluunLiQREanOkikcG2MevISZDQa+Tl1IIiJSnSVzjeNq4BUzGxOsFxJ5ZKuIiNRCyXwAcCVwqpllAubuyTxvXEREDlPlnqoys/8ysybuXuTuO8ysqZk9UBXBiYhI9ZPMNY6B7r61ZCV4GuCZqQtJRESqs2QKR4aZNShZMbMjgAZl9I8yswFmtszMVpjZqATbjzezOWb2kZktMrMzg/afmdl8M/s4+HpGzD4FwZgLgtcxycQiIiKVI5mL4y8Ds83shWD9MuCP5e1kZhnAU8DPiFxQn2dmU919SUy3u4FJ7j7OzDoA04G2RO7aOsvd15lZR2Am0Cpmv4vc/cMkYhcRkUqWzMXx35rZIuCngAGvA22SGLs7sMLdVwGYWT4wGIgtHA40CpYbA+uC9/wops9ioKGZNXD3PUm8r4iIpFCys+N+SeTT40OIPI9jaRL7tAK+iFkv5MCjBoD7gIvNrJDI0cavONgQ4KO4ovFCcJrqHjOz5FIQEZHKUOoRh5n9EBgGDAc2Aa8SuR23T5JjJ/qF7nHrw4EX3f0RM+sJvGRmHd19fxDDycDDQL+YfS5y97VmlgX8GfgFCR4qZWYjgZEALVq0oKCgIMmwq4eioqIaF3NFKefaQTkfBtw94YvIEcZbwA9i2laV1j/B/j2BmTHrdwB3xPVZDBwXOz5wTLDcGvgU6FXGe4wAxpQXS7du3bymmTNnTrpDqHLKuXZQzjUH8KEn+J1a1qmqIUROUc0xs2fNrC+JjyJKMw9ob2btzKw+kaOXqXF9Pidy6gszO4nIo2k3mlkTYFpQaN4r6Wxmdc2sebBcD/h34JMQMYmISAWVWjjcfbK7XwD8CCgAbgJamNk4M+tX2n4x+xcD1xO5I2opkbunFpvZ6Ji5r34NXGlmC4E8YERQ5a4HfgDcE3fbbQNgZnCxfgGwFnj2kDIXEZFDksxdVTuBV4jMV3UUcB4wCpiVxL7TiVz0jm27N2Z5CdArwX4PAKV9Or1bee8rIiKpE+qZ4+6+2d2fcfczyu8tIiKHo1CFQ0RERIVDRERCUeEQEZFQVDhERCQUFQ4REQlFhUNEREJR4RARkVBUOEREJBQVDhERCUWFQ0REQlHhEBGRUFQ4REQkFBUOEREJRYVDRERCUeEQEZFQVDhERCQUFQ4REQlFhUNEREJR4RARkVBUOEREJJSUFg4zG2Bmy8xshZmNSrD9eDObY2YfmdkiMzszZtsdwX7LzKx/smOKiEhqpaxwmFkG8BQwEOgADDezDnHd7gYmuXsXYBgwNti3Q7B+MjAAGGtmGUmOKSIiKZTKI47uwAp3X+Xu3wL5wOC4Pg40CpYbA+uC5cFAvrvvcffPgBXBeMmMKSIiKVQ3hWO3Ar6IWS8EesT1uQ+YZWa/Ao4Efhqz7/tx+7YKlssbEwAzGwmMBGjRogUFBQWhE0inoqKiGhdzRSnn2kE513ypLByWoM3j1tD5lG4AAAvGSURBVIcDL7r7I2bWE3jJzDqWsW+iI6T4MSON7uOB8QA5OTmem5ubbNzVQkFBATUt5opSzrWDcq75Ulk4CoHjYtZb892pqBKXE7mGgbvPNbOGQPNy9i1vTBERSaFUXuOYB7Q3s3ZmVp/Ixe6pcX0+B/oCmNlJQENgY9BvmJk1MLN2QHvgn0mOKSIiKZSyIw53Lzaz64GZQAbwvLsvNrPRwIfuPhX4NfCsmd1E5JTTCHd3YLGZTQKWAMXAde6+DyDRmKnKQUREDpbKU1W4+3RgelzbvTHLS4Bepez7IPBgMmOKiEjV0SfHRUQkFBUOEREJRYVDRERCUeEQEZFQVDhERCQUFQ4REQlFhUNEREJR4RARkVBUOEREJBQVDhERCUWFQ0REQlHhEBGRUFQ4REQkFBUOEREJRYVDRERCUeEQEZFQVDhERCQUFQ4REQlFhUNEREJR4RARkVBSWjjMbICZLTOzFWY2KsH2x8xsQfD61My2Bu19YtoXmNluMzs72PaimX0Wsy07lTmIiMiB6qZqYDPLAJ4CfgYUAvPMbKq7Lynp4+43xfT/FdAlaJ8DZAftRwErgFkxw9/q7q+lKnYRESldKo84ugMr3H2Vu38L5AODy+g/HMhL0D4UmOHu36QgRhERCcncPTUDmw0FBrj7FcH6L4Ae7n59gr5tgPeB1u6+L27bm8Cj7v63YP1FoCewB5gNjHL3PQnGHAmMBGjRokW3/Pz8Sswu9YqKisjMzEx3GFVKOdcOyrnm6NOnz3x3z4lvT9mpKsAStJVWpYYBryUoGscCPwZmxjTfAXwJ1AfGA7cDow96I/fxwXZycnI8Nzc3ZPjpVVBQQE2LuaKUc+2gnGu+VJ6qKgSOi1lvDawrpe8wEp+mOh+Y7O57Sxrcfb1H7AFeIHJKTEREqkgqC8c8oL2ZtTOz+kSKw9T4TmZ2ItAUmJtgjIOuewRHIZiZAWcDn1Ry3CIiUoaUnapy92Izu57IaaYM4Hl3X2xmo4EP3b2kiAwH8j3uYouZtSVyxPJW3NCvmNnRRE6FLQCuTlUOIiJysFRe48DdpwPT49rujVu/r5R9VwOtErSfUXkRiohIWPrkuIiIhKLCISIioahwiIhIKCocIiISigqHiIiEosIhIiKhqHCIiEgoKhwiIhKKCoeIiISiwiEiIqGocIiISCgqHCLp8O5jsGNDuqMQOSQqHCLpUPQVvPdEuqMQOSQqHCLp0OtGWDiR+nu2pDsSkdBUOETSIasldB7OcV/8b7ojEQktpc/jEJEy/LA/x70/GO5rfGD76aOgzx3piUkkCSocIunw7U6YfhtLf3QjJw0bne5oRELRqSqRdJh2C7TOYUNLPdBSah4VDpGq9tErsO5fcOb/pDsSkUOiwiFS1XZuhPNehPpHpjsSkUOS0sJhZgPMbJmZrTCzUQm2P2ZmC4LXp2a2NWbbvphtU2Pa25nZB2a23MxeNbP6qcxBpNL1/g845qR0RyFyyFJWOMwsA3gKGAh0AIabWYfYPu5+k7tnu3s28Hsg9t7EXSXb3H1QTPvDwGPu3h7YAlyeqhxERORgqTzi6A6scPdV7v4tkA8MLqP/cCCvrAHNzIAzgNeCpj8CZ1dCrCIikqRU3o7bCvgiZr0Q6JGoo5m1AdoBb8Y0NzSzD4Fi4CF3nwI0A7a6e3HMmK1KGXMkMBKgRYsWFBQUHHomaVBUVFTjYq4o5Vw7KOeaL5WFwxK0eSl9hwGvufu+mLbj3X2dmZ0AvGlmHwPbkx3T3ccD4wFycnI8Nzc36cCrg4KCAmpazBWlnGsH5VzzpbJwFALHxay3BtaV0ncYcF1sg7uvC76uMrMCoAvwZ6CJmdUNjjrKGjNq/vz5X5vZmtAZpFdz4Ot0B1HFlHPtoJxrjjaJGlNZOOYB7c2sHbCWSHG4ML6TmZ0INAXmxrQ1Bb5x9z1m1hzoBfzW3d3M5gBDiVwzuRT4S3mBuPvRlZBPlTKzD909J91xVCXlXDso55ovZRfHgyOC64GZwFJgkrsvNrPRZhZ7l9RwIN/dY085nQR8aGYLgTlErnEsCbbdDtxsZiuIXPP4Q6pyEBGRg9mBv6+lujjc/kJJhnKuHZRzzadPjldf49MdQBoo59pBOddwOuIQEZFQdMQhIiKhqHCIiEgoKhxpkMTkj23MbLaZLTKzAjNrHbPteDObZWZLzWyJmbWtytgPVQVz/q2ZLQ5yfjKYeqZaM7PnzewrM/uklO0W5LIiyLlrzLZLg0k8l5vZpVUXdcUcas5mlm1mc4N/40VmdkHVRn7oKvLvHGxvZGZrzWxM1URcSdxdryp8ARnASuAEoD6wEOgQ1+dPwKXB8hnASzHbCoCfBcuZwPfSnVMqcwb+H/BeMEYGkc/75KY7pyRyPg3oCnxSyvYzgRlEZlg4FfggaD8KWBV8bRosN013PinO+YdA+2D534D1QJN055PKnGO2PwFMBMakO5cwLx1xVL1kJn/sAMwOlueUbA9mF67r7n8HcPcid/+masKukEPOmciUMg2JFJwGQD1gQ8ojriB3fxvYXEaXwcAEj3ifyIwIxwL9gb+7+2Z33wL8HRiQ+ogr7lBzdvdP3X15MMY64CugRnxotwL/zphZN6AFMCv1kVYuFY6ql2jyx/iJGhcCQ4Llc4AsM2tG5C+zrWb2v2b2kZn9TzB9fXV3yDm7+1wihWR98Jrp7ktTHG9VKO17ksz3qqYqNzcz607kj4SVVRhXKiXM2czqAI8At6YlqgpS4ah6yUz+eAtwupl9BJxOZMqWYiJTxPwk2H4KkVM/I1IWaeU55JzN7AdEZhJoTeQ/4Rlmdloqg60ipX1PwkwOWtOUmVvwl/hLwGXuvr/Kokqt0nK+Fpju7l8k2F7tpXKuKkms3Mkfg8P1cwHMLBMY4u7bzKwQ+MjdVwXbphA5b1rdp12pSM4jgffdvSjYNoNIzm9XReApVNr3pBDIjWsvqLKoUqvUnwMzawRMA+4OTukcLkrLuSfwEzO7lsi1yvpmVuTuB904Uh3piKPqRSd/tMhjb4cBU2M7mFnz4FAW4A7g+Zh9m5pZyfnfM4AlVH8VyflzIkcidc2sHpGjkcPhVNVU4JLgrptTgW3uvp7I3G79zKypRSb77Be0HQ4S5hz8TEwmci3gT+kNsdIlzNndL3L34929LZGj7Qk1pWiAjjiqnLsXm1nJ5I8ZwPMeTP4IfOjuU4n8xfnfZuZE/rK+Lth3n5ndAswObkmdDzybjjzCqEjORJ72eAbwMZFD/Nfd/a9VnUNYZpZHJKfmwZHib4hc2MfdnwamE7njZgXwDXBZsG2zmd1PpNgCjHb3si6+VhuHmjNwPpG7k5qZ2YigbYS7L6iy4A9RBXKu0TTliIiIhKJTVSIiEooKh4iIhKLCISIioahwiIhIKCocIiISigqHyCEys31mtiDmVWn34ZtZ29JmXBVJN32OQ+TQ7XL37HQHIVLVdMQhUsnMbLWZPWxm/wxePwjaY585MtvMjg/aW5jZZDNbGLz+XzBUhpk9GzynYpaZHRH0v8Eiz2JZZGb5aUpTajEVDpFDd0TcqarYBxBtd/fuwBjg8aBtDJGpJToBrwBPBu1PAm+5e2ciz3ZYHLS3B55y95OBrXw3e/AooEswztWpSk6kNPrkuMghCialy0zQvho4w91XBfNrfenuzczsa+BYd98btK939+ZmthFo7e57YsZoS+S5HO2D9duBeu7+gJm9DhQBU4ApJRNAilQVHXGIpIaXslxan0T2xCzv47trkj8HngK6AfPNTNcqpUqpcIikxgUxX+cGy/8gMjMwwEXAu8HybOAaADPLCKYYTyiYQfg4d58D3AY0ITItt0iV0V8qIofuCDOLncH19ZipsRuY2QdE/jgbHrTdADxvZrcCG/luptQbgfFmdjmRI4triDztMJEM4GUza0zkIUGPufvWSstIJAm6xiFSyYJrHDnu/nW6YxFJBZ2qEhGRUHTEISIioeiIQ0REQlHhEBGRUFQ4REQkFBUOEREJRYVDRERC+f8p1HH4rJNybgAAAABJRU5ErkJggg==\n",
|
| 230 |
+
"text/plain": [
|
| 231 |
+
"<Figure size 432x288 with 1 Axes>"
|
| 232 |
+
]
|
| 233 |
+
},
|
| 234 |
+
"metadata": {
|
| 235 |
+
"needs_background": "light"
|
| 236 |
+
},
|
| 237 |
+
"output_type": "display_data"
|
| 238 |
+
}
|
| 239 |
+
],
|
| 240 |
+
"source": [
|
| 241 |
+
"# Plotting our accuracy charts\n",
|
| 242 |
+
"import matplotlib.pyplot as plt\n",
|
| 243 |
+
"\n",
|
| 244 |
+
"history_dict = history.history\n",
|
| 245 |
+
"\n",
|
| 246 |
+
"acc_values = history_dict['accuracy']\n",
|
| 247 |
+
"val_acc_values = history_dict['val_accuracy']\n",
|
| 248 |
+
"epochs = range(1, len(loss_values) + 1)\n",
|
| 249 |
+
"\n",
|
| 250 |
+
"line1 = plt.plot(epochs, val_acc_values, label='Validation/Test Accuracy')\n",
|
| 251 |
+
"line2 = plt.plot(epochs, acc_values, label='Training Accuracy')\n",
|
| 252 |
+
"plt.setp(line1, linewidth=2.0, marker = '+', markersize=10.0)\n",
|
| 253 |
+
"plt.setp(line2, linewidth=2.0, marker = '4', markersize=10.0)\n",
|
| 254 |
+
"plt.xlabel('Epochs') \n",
|
| 255 |
+
"plt.ylabel('Accuracy')\n",
|
| 256 |
+
"plt.grid(True)\n",
|
| 257 |
+
"plt.legend()\n",
|
| 258 |
+
"plt.show()"
|
| 259 |
+
]
|
| 260 |
+
},
|
| 261 |
+
{
|
| 262 |
+
"cell_type": "markdown",
|
| 263 |
+
"metadata": {},
|
| 264 |
+
"source": [
|
| 265 |
+
"#### Now let's display our Confusion Matrix and Classification Report"
|
| 266 |
+
]
|
| 267 |
+
},
|
| 268 |
+
{
|
| 269 |
+
"cell_type": "code",
|
| 270 |
+
"execution_count": 8,
|
| 271 |
+
"metadata": {},
|
| 272 |
+
"outputs": [
|
| 273 |
+
{
|
| 274 |
+
"name": "stdout",
|
| 275 |
+
"output_type": "stream",
|
| 276 |
+
"text": [
|
| 277 |
+
" precision recall f1-score support\n",
|
| 278 |
+
"\n",
|
| 279 |
+
" 0 0.94 0.98 0.96 980\n",
|
| 280 |
+
" 1 0.95 0.99 0.97 1135\n",
|
| 281 |
+
" 2 0.94 0.91 0.92 1032\n",
|
| 282 |
+
" 3 0.88 0.94 0.91 1010\n",
|
| 283 |
+
" 4 0.91 0.94 0.93 982\n",
|
| 284 |
+
" 5 0.96 0.85 0.90 892\n",
|
| 285 |
+
" 6 0.93 0.96 0.95 958\n",
|
| 286 |
+
" 7 0.90 0.94 0.92 1028\n",
|
| 287 |
+
" 8 0.92 0.88 0.90 974\n",
|
| 288 |
+
" 9 0.95 0.87 0.91 1009\n",
|
| 289 |
+
"\n",
|
| 290 |
+
" accuracy 0.93 10000\n",
|
| 291 |
+
" macro avg 0.93 0.93 0.93 10000\n",
|
| 292 |
+
"weighted avg 0.93 0.93 0.93 10000\n",
|
| 293 |
+
"\n",
|
| 294 |
+
"[[ 964 0 1 3 0 2 7 1 2 0]\n",
|
| 295 |
+
" [ 0 1120 4 2 0 1 3 0 5 0]\n",
|
| 296 |
+
" [ 12 4 940 14 10 0 11 19 21 1]\n",
|
| 297 |
+
" [ 1 2 11 950 1 5 0 18 17 5]\n",
|
| 298 |
+
" [ 1 4 6 0 925 1 18 3 2 22]\n",
|
| 299 |
+
" [ 18 3 4 60 10 754 16 4 19 4]\n",
|
| 300 |
+
" [ 13 3 4 2 3 9 920 1 3 0]\n",
|
| 301 |
+
" [ 2 21 20 2 9 0 0 963 3 8]\n",
|
| 302 |
+
" [ 7 10 8 33 10 11 11 24 856 4]\n",
|
| 303 |
+
" [ 11 9 3 18 49 3 1 39 3 873]]\n"
|
| 304 |
+
]
|
| 305 |
+
}
|
| 306 |
+
],
|
| 307 |
+
"source": [
|
| 308 |
+
"from sklearn.metrics import classification_report,confusion_matrix\n",
|
| 309 |
+
"import numpy as np\n",
|
| 310 |
+
"\n",
|
| 311 |
+
"y_pred = model.predict_classes(x_test)\n",
|
| 312 |
+
"\n",
|
| 313 |
+
"print(classification_report(np.argmax(y_test,axis=1), y_pred))\n",
|
| 314 |
+
"print(confusion_matrix(np.argmax(y_test,axis=1), y_pred))"
|
| 315 |
+
]
|
| 316 |
+
},
|
| 317 |
+
{
|
| 318 |
+
"cell_type": "markdown",
|
| 319 |
+
"metadata": {},
|
| 320 |
+
"source": [
|
| 321 |
+
"### Displaying our misclassified data"
|
| 322 |
+
]
|
| 323 |
+
},
|
| 324 |
+
{
|
| 325 |
+
"cell_type": "code",
|
| 326 |
+
"execution_count": 9,
|
| 327 |
+
"metadata": {},
|
| 328 |
+
"outputs": [
|
| 329 |
+
{
|
| 330 |
+
"name": "stdout",
|
| 331 |
+
"output_type": "stream",
|
| 332 |
+
"text": [
|
| 333 |
+
"Indices of misclassifed data are: \n",
|
| 334 |
+
"\n",
|
| 335 |
+
"(array([ 8, 33, 62, 66, 73, 77, 121, 124, 151, 193, 195,\n",
|
| 336 |
+
" 217, 233, 241, 247, 259, 290, 300, 313, 318, 320, 321,\n",
|
| 337 |
+
" 340, 341, 349, 352, 359, 362, 381, 403, 406, 412, 435,\n",
|
| 338 |
+
" 444, 445, 448, 464, 478, 479, 483, 495, 502, 507, 511,\n",
|
| 339 |
+
" 515, 528, 530, 543, 551, 565, 578, 582, 591, 606, 610,\n",
|
| 340 |
+
" 613, 619, 624, 628, 659, 667, 684, 689, 691, 707, 717,\n",
|
| 341 |
+
" 720, 728, 740, 791, 839, 844, 924, 939, 944, 947, 950,\n",
|
| 342 |
+
" 951, 956, 965, 975, 982, 992, 1003, 1014, 1032, 1033, 1039,\n",
|
| 343 |
+
" 1044, 1062, 1068, 1082, 1089, 1101, 1107, 1112, 1114, 1119, 1128,\n",
|
| 344 |
+
" 1152, 1181, 1192, 1198, 1200, 1204, 1206, 1224, 1226, 1228, 1232,\n",
|
| 345 |
+
" 1234, 1242, 1243, 1247, 1253, 1256, 1260, 1270, 1272, 1283, 1289,\n",
|
| 346 |
+
" 1299, 1319, 1326, 1378, 1393, 1402, 1409, 1423, 1433, 1440, 1453,\n",
|
| 347 |
+
" 1465, 1466, 1476, 1500, 1514, 1525, 1527, 1530, 1549, 1553, 1554,\n",
|
| 348 |
+
" 1559, 1581, 1587, 1601, 1609, 1621, 1634, 1640, 1678, 1681, 1709,\n",
|
| 349 |
+
" 1716, 1717, 1718, 1722, 1732, 1737, 1751, 1754, 1772, 1782, 1790,\n",
|
| 350 |
+
" 1819, 1850, 1874, 1878, 1899, 1901, 1917, 1930, 1938, 1940, 1948,\n",
|
| 351 |
+
" 1952, 1955, 1968, 1970, 1973, 1982, 1984, 2016, 2024, 2035, 2037,\n",
|
| 352 |
+
" 2040, 2043, 2044, 2052, 2053, 2068, 2070, 2093, 2098, 2099, 2109,\n",
|
| 353 |
+
" 2118, 2125, 2129, 2130, 2135, 2138, 2177, 2182, 2183, 2185, 2186,\n",
|
| 354 |
+
" 2189, 2192, 2208, 2215, 2224, 2232, 2266, 2272, 2293, 2299, 2325,\n",
|
| 355 |
+
" 2339, 2362, 2369, 2371, 2378, 2381, 2386, 2387, 2393, 2394, 2395,\n",
|
| 356 |
+
" 2404, 2406, 2408, 2414, 2422, 2425, 2447, 2460, 2488, 2515, 2526,\n",
|
| 357 |
+
" 2528, 2542, 2545, 2556, 2559, 2560, 2578, 2582, 2586, 2589, 2598,\n",
|
| 358 |
+
" 2604, 2607, 2610, 2631, 2635, 2648, 2654, 2670, 2695, 2698, 2730,\n",
|
| 359 |
+
" 2740, 2751, 2758, 2760, 2770, 2771, 2780, 2810, 2812, 2832, 2850,\n",
|
| 360 |
+
" 2863, 2896, 2905, 2914, 2925, 2927, 2930, 2945, 2953, 2970, 2986,\n",
|
| 361 |
+
" 2990, 2995, 3005, 3060, 3073, 3078, 3100, 3102, 3110, 3114, 3117,\n",
|
| 362 |
+
" 3130, 3132, 3133, 3136, 3139, 3145, 3157, 3167, 3189, 3206, 3225,\n",
|
| 363 |
+
" 3240, 3269, 3284, 3289, 3316, 3319, 3329, 3330, 3333, 3369, 3405,\n",
|
| 364 |
+
" 3406, 3414, 3436, 3475, 3503, 3520, 3549, 3552, 3558, 3565, 3567,\n",
|
| 365 |
+
" 3573, 3578, 3580, 3597, 3598, 3604, 3618, 3629, 3662, 3664, 3681,\n",
|
| 366 |
+
" 3687, 3702, 3709, 3716, 3718, 3726, 3732, 3751, 3757, 3763, 3767,\n",
|
| 367 |
+
" 3769, 3776, 3778, 3780, 3796, 3806, 3808, 3811, 3817, 3818, 3821,\n",
|
| 368 |
+
" 3833, 3836, 3838, 3846, 3848, 3853, 3855, 3862, 3869, 3876, 3893,\n",
|
| 369 |
+
" 3902, 3906, 3924, 3926, 3929, 3941, 3946, 3954, 3962, 3968, 3976,\n",
|
| 370 |
+
" 3984, 3985, 3998, 4000, 4031, 4063, 4065, 4068, 4072, 4075, 4076,\n",
|
| 371 |
+
" 4078, 4093, 4111, 4131, 4140, 4145, 4152, 4154, 4159, 4163, 4176,\n",
|
| 372 |
+
" 4201, 4205, 4211, 4212, 4224, 4238, 4248, 4255, 4265, 4271, 4272,\n",
|
| 373 |
+
" 4284, 4289, 4294, 4297, 4300, 4302, 4306, 4313, 4315, 4317, 4341,\n",
|
| 374 |
+
" 4355, 4360, 4374, 4380, 4405, 4425, 4433, 4435, 4449, 4451, 4454,\n",
|
| 375 |
+
" 4477, 4497, 4498, 4500, 4505, 4521, 4523, 4540, 4548, 4567, 4571,\n",
|
| 376 |
+
" 4575, 4601, 4615, 4633, 4639, 4640, 4662, 4671, 4724, 4731, 4735,\n",
|
| 377 |
+
" 4751, 4785, 4807, 4808, 4814, 4823, 4828, 4829, 4837, 4863, 4874,\n",
|
| 378 |
+
" 4876, 4879, 4880, 4886, 4890, 4910, 4943, 4950, 4952, 4956, 4966,\n",
|
| 379 |
+
" 4978, 4990, 5001, 5009, 5015, 5065, 5067, 5068, 5100, 5135, 5140,\n",
|
| 380 |
+
" 5210, 5217, 5246, 5299, 5311, 5331, 5360, 5457, 5522, 5562, 5600,\n",
|
| 381 |
+
" 5601, 5611, 5634, 5642, 5677, 5734, 5735, 5749, 5757, 5821, 5842,\n",
|
| 382 |
+
" 5852, 5862, 5867, 5874, 5887, 5888, 5891, 5913, 5922, 5936, 5937,\n",
|
| 383 |
+
" 5955, 5957, 5972, 5973, 5981, 5982, 5985, 6035, 6042, 6043, 6059,\n",
|
| 384 |
+
" 6071, 6081, 6091, 6112, 6124, 6157, 6166, 6168, 6172, 6173, 6304,\n",
|
| 385 |
+
" 6347, 6385, 6400, 6421, 6425, 6426, 6505, 6517, 6555, 6560, 6568,\n",
|
| 386 |
+
" 6569, 6571, 6574, 6576, 6577, 6597, 6598, 6603, 6625, 6641, 6642,\n",
|
| 387 |
+
" 6651, 6662, 6706, 6721, 6725, 6740, 6744, 6746, 6755, 6765, 6775,\n",
|
| 388 |
+
" 6784, 6785, 6793, 6817, 6870, 6872, 6894, 6895, 6906, 6919, 6926,\n",
|
| 389 |
+
" 7003, 7035, 7043, 7094, 7121, 7130, 7198, 7212, 7235, 7372, 7432,\n",
|
| 390 |
+
" 7434, 7451, 7459, 7473, 7492, 7498, 7579, 7580, 7637, 7672, 7673,\n",
|
| 391 |
+
" 7756, 7777, 7779, 7786, 7797, 7821, 7823, 7849, 7859, 7886, 7888,\n",
|
| 392 |
+
" 7905, 7918, 7921, 7945, 7991, 8020, 8044, 8062, 8072, 8081, 8091,\n",
|
| 393 |
+
" 8094, 8095, 8165, 8183, 8196, 8198, 8246, 8279, 8332, 8339, 8408,\n",
|
| 394 |
+
" 8410, 8426, 8431, 8457, 8477, 8520, 8522, 8530, 8639, 8912, 9007,\n",
|
| 395 |
+
" 9009, 9010, 9015, 9016, 9019, 9024, 9026, 9036, 9045, 9168, 9211,\n",
|
| 396 |
+
" 9245, 9280, 9316, 9422, 9427, 9433, 9446, 9456, 9465, 9482, 9530,\n",
|
| 397 |
+
" 9554, 9560, 9587, 9610, 9624, 9634, 9642, 9664, 9679, 9680, 9692,\n",
|
| 398 |
+
" 9698, 9700, 9712, 9716, 9719, 9726, 9729, 9740, 9741, 9744, 9745,\n",
|
| 399 |
+
" 9749, 9751, 9752, 9755, 9768, 9770, 9777, 9779, 9780, 9792, 9808,\n",
|
| 400 |
+
" 9811, 9832, 9839, 9858, 9867, 9874, 9883, 9888, 9890, 9892, 9893,\n",
|
| 401 |
+
" 9905, 9925, 9941, 9944, 9959, 9970, 9975, 9980, 9982], dtype=int64),)\n"
|
| 402 |
+
]
|
| 403 |
+
}
|
| 404 |
+
],
|
| 405 |
+
"source": [
|
| 406 |
+
"import cv2\n",
|
| 407 |
+
"import numpy as np\n",
|
| 408 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 409 |
+
"\n",
|
| 410 |
+
"# loads the MNIST dataset\n",
|
| 411 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 412 |
+
"\n",
|
| 413 |
+
"# Use numpy to create an array that stores a value of 1 when a misclassification occurs\n",
|
| 414 |
+
"result = np.absolute(y_test - y_pred)\n",
|
| 415 |
+
"result_indices = np.nonzero(result > 0)\n",
|
| 416 |
+
"\n",
|
| 417 |
+
"# Display the indices of mislassifications\n",
|
| 418 |
+
"print(\"Indices of misclassifed data are: \\n\\n\" + str(result_indices))"
|
| 419 |
+
]
|
| 420 |
+
},
|
| 421 |
+
{
|
| 422 |
+
"cell_type": "markdown",
|
| 423 |
+
"metadata": {},
|
| 424 |
+
"source": [
|
| 425 |
+
"### Displaying the misclassifications"
|
| 426 |
+
]
|
| 427 |
+
},
|
| 428 |
+
{
|
| 429 |
+
"cell_type": "code",
|
| 430 |
+
"execution_count": 10,
|
| 431 |
+
"metadata": {},
|
| 432 |
+
"outputs": [],
|
| 433 |
+
"source": [
|
| 434 |
+
"import cv2 \n",
|
| 435 |
+
"#from keras.models import load_model\n",
|
| 436 |
+
"\n",
|
| 437 |
+
"#classifier = load_model('/home/deeplearningcv/DeepLearningCV/Trained Models/mnist_simple_cnn.h5')\n",
|
| 438 |
+
"\n",
|
| 439 |
+
"def draw_test(name, pred, input_im, true_label):\n",
|
| 440 |
+
" BLACK = [0,0,0]\n",
|
| 441 |
+
" expanded_image = cv2.copyMakeBorder(input_im, 0, 0, 0, imageL.shape[0]*2 ,cv2.BORDER_CONSTANT,value=BLACK)\n",
|
| 442 |
+
" expanded_image = cv2.cvtColor(expanded_image, cv2.COLOR_GRAY2BGR)\n",
|
| 443 |
+
" cv2.putText(expanded_image, str(pred), (152, 70) , cv2.FONT_HERSHEY_COMPLEX_SMALL,4, (0,255,0), 2)\n",
|
| 444 |
+
" cv2.putText(expanded_image, str(true_label), (250, 70) , cv2.FONT_HERSHEY_COMPLEX_SMALL,4, (0,0,255), 2)\n",
|
| 445 |
+
" cv2.imshow(name, expanded_image)\n",
|
| 446 |
+
"\n",
|
| 447 |
+
"for i in range(0,10):\n",
|
| 448 |
+
"\n",
|
| 449 |
+
" input_im = x_test[result_indices[0][i]]\n",
|
| 450 |
+
" #print(y_test[result_indices[0][i]])\n",
|
| 451 |
+
" imageL = cv2.resize(input_im, None, fx=4, fy=4, interpolation = cv2.INTER_CUBIC) \n",
|
| 452 |
+
" input_im = input_im.reshape(1,28,28,1) \n",
|
| 453 |
+
" \n",
|
| 454 |
+
" ## Get Prediction\n",
|
| 455 |
+
" res = str(model.predict_classes(input_im, 1, verbose = 0)[0])\n",
|
| 456 |
+
" draw_test(\"Prediction\", res, imageL, y_test[result_indices[0][i]]) \n",
|
| 457 |
+
" cv2.waitKey(0)\n",
|
| 458 |
+
"\n",
|
| 459 |
+
"cv2.destroyAllWindows()"
|
| 460 |
+
]
|
| 461 |
+
}
|
| 462 |
+
],
|
| 463 |
+
"metadata": {
|
| 464 |
+
"kernelspec": {
|
| 465 |
+
"display_name": "Python 3",
|
| 466 |
+
"language": "python",
|
| 467 |
+
"name": "python3"
|
| 468 |
+
},
|
| 469 |
+
"language_info": {
|
| 470 |
+
"codemirror_mode": {
|
| 471 |
+
"name": "ipython",
|
| 472 |
+
"version": 3
|
| 473 |
+
},
|
| 474 |
+
"file_extension": ".py",
|
| 475 |
+
"mimetype": "text/x-python",
|
| 476 |
+
"name": "python",
|
| 477 |
+
"nbconvert_exporter": "python",
|
| 478 |
+
"pygments_lexer": "ipython3",
|
| 479 |
+
"version": "3.7.4"
|
| 480 |
+
}
|
| 481 |
+
},
|
| 482 |
+
"nbformat": 4,
|
| 483 |
+
"nbformat_minor": 2
|
| 484 |
+
}
|
12. Optimizers, Adaptive Learning Rate & Callbacks/12.2 Checkpointing Models and Early Stopping.ipynb
ADDED
|
@@ -0,0 +1,277 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "code",
|
| 5 |
+
"execution_count": 1,
|
| 6 |
+
"metadata": {},
|
| 7 |
+
"outputs": [
|
| 8 |
+
{
|
| 9 |
+
"name": "stdout",
|
| 10 |
+
"output_type": "stream",
|
| 11 |
+
"text": [
|
| 12 |
+
"x_train shape: (60000, 28, 28, 1)\n",
|
| 13 |
+
"60000 train samples\n",
|
| 14 |
+
"10000 test samples\n",
|
| 15 |
+
"Number of Classes: 10\n",
|
| 16 |
+
"Model: \"sequential\"\n",
|
| 17 |
+
"_________________________________________________________________\n",
|
| 18 |
+
"Layer (type) Output Shape Param # \n",
|
| 19 |
+
"=================================================================\n",
|
| 20 |
+
"conv2d (Conv2D) (None, 26, 26, 32) 320 \n",
|
| 21 |
+
"_________________________________________________________________\n",
|
| 22 |
+
"conv2d_1 (Conv2D) (None, 24, 24, 64) 18496 \n",
|
| 23 |
+
"_________________________________________________________________\n",
|
| 24 |
+
"max_pooling2d (MaxPooling2D) (None, 12, 12, 64) 0 \n",
|
| 25 |
+
"_________________________________________________________________\n",
|
| 26 |
+
"dropout (Dropout) (None, 12, 12, 64) 0 \n",
|
| 27 |
+
"_________________________________________________________________\n",
|
| 28 |
+
"flatten (Flatten) (None, 9216) 0 \n",
|
| 29 |
+
"_________________________________________________________________\n",
|
| 30 |
+
"dense (Dense) (None, 128) 1179776 \n",
|
| 31 |
+
"_________________________________________________________________\n",
|
| 32 |
+
"dropout_1 (Dropout) (None, 128) 0 \n",
|
| 33 |
+
"_________________________________________________________________\n",
|
| 34 |
+
"dense_1 (Dense) (None, 10) 1290 \n",
|
| 35 |
+
"=================================================================\n",
|
| 36 |
+
"Total params: 1,199,882\n",
|
| 37 |
+
"Trainable params: 1,199,882\n",
|
| 38 |
+
"Non-trainable params: 0\n",
|
| 39 |
+
"_________________________________________________________________\n",
|
| 40 |
+
"None\n"
|
| 41 |
+
]
|
| 42 |
+
}
|
| 43 |
+
],
|
| 44 |
+
"source": [
|
| 45 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 46 |
+
"from tensorflow.keras.utils import to_categorical\n",
|
| 47 |
+
"import tensorflow as tf\n",
|
| 48 |
+
"from tensorflow.keras.optimizers import SGD \n",
|
| 49 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 50 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 51 |
+
"from tensorflow.keras.layers import Dense, Dropout, Flatten\n",
|
| 52 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D\n",
|
| 53 |
+
"from tensorflow.keras import backend as K\n",
|
| 54 |
+
"from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\n",
|
| 55 |
+
"import os\n",
|
| 56 |
+
"\n",
|
| 57 |
+
"# Training Parameters\n",
|
| 58 |
+
"batch_size = 64\n",
|
| 59 |
+
"epochs = 15\n",
|
| 60 |
+
"\n",
|
| 61 |
+
"# loads the MNIST dataset\n",
|
| 62 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 63 |
+
"\n",
|
| 64 |
+
"# Lets store the number of rows and columns\n",
|
| 65 |
+
"img_rows = x_train[0].shape[0]\n",
|
| 66 |
+
"img_cols = x_train[1].shape[0]\n",
|
| 67 |
+
"\n",
|
| 68 |
+
"# Getting our date in the right 'shape' needed for Keras\n",
|
| 69 |
+
"# We need to add a 4th dimenion to our date thereby changing our\n",
|
| 70 |
+
"# Our original image shape of (60000,28,28) to (60000,28,28,1)\n",
|
| 71 |
+
"x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
|
| 72 |
+
"x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
|
| 73 |
+
"\n",
|
| 74 |
+
"# store the shape of a single image \n",
|
| 75 |
+
"input_shape = (img_rows, img_cols, 1)\n",
|
| 76 |
+
"\n",
|
| 77 |
+
"# change our image type to float32 data type\n",
|
| 78 |
+
"x_train = x_train.astype('float32')\n",
|
| 79 |
+
"x_test = x_test.astype('float32')\n",
|
| 80 |
+
"\n",
|
| 81 |
+
"# Normalize our data by changing the range from (0 to 255) to (0 to 1)\n",
|
| 82 |
+
"x_train /= 255\n",
|
| 83 |
+
"x_test /= 255\n",
|
| 84 |
+
"\n",
|
| 85 |
+
"print('x_train shape:', x_train.shape)\n",
|
| 86 |
+
"print(x_train.shape[0], 'train samples')\n",
|
| 87 |
+
"print(x_test.shape[0], 'test samples')\n",
|
| 88 |
+
"\n",
|
| 89 |
+
"# Now we one hot encode outputs\n",
|
| 90 |
+
"y_train = to_categorical(y_train)\n",
|
| 91 |
+
"y_test = to_categorical(y_test)\n",
|
| 92 |
+
"\n",
|
| 93 |
+
"# Let's count the number columns in our hot encoded matrix \n",
|
| 94 |
+
"print (\"Number of Classes: \" + str(y_test.shape[1]))\n",
|
| 95 |
+
"\n",
|
| 96 |
+
"num_classes = y_test.shape[1]\n",
|
| 97 |
+
"num_pixels = x_train.shape[1] * x_train.shape[2]\n",
|
| 98 |
+
"\n",
|
| 99 |
+
"# create model\n",
|
| 100 |
+
"model = Sequential()\n",
|
| 101 |
+
"\n",
|
| 102 |
+
"model.add(Conv2D(32, kernel_size=(3, 3),\n",
|
| 103 |
+
" activation='relu',\n",
|
| 104 |
+
" input_shape=input_shape))\n",
|
| 105 |
+
"model.add(Conv2D(64, (3, 3), activation='relu'))\n",
|
| 106 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 107 |
+
"model.add(Dropout(0.25))\n",
|
| 108 |
+
"model.add(Flatten())\n",
|
| 109 |
+
"model.add(Dense(128, activation='relu'))\n",
|
| 110 |
+
"model.add(Dropout(0.5))\n",
|
| 111 |
+
"model.add(Dense(num_classes, activation='softmax'))\n",
|
| 112 |
+
"\n",
|
| 113 |
+
"model.compile(loss = 'categorical_crossentropy',\n",
|
| 114 |
+
" optimizer = SGD(0.01),\n",
|
| 115 |
+
" metrics = ['accuracy'])\n",
|
| 116 |
+
"\n",
|
| 117 |
+
"print(model.summary())\n",
|
| 118 |
+
"\n",
|
| 119 |
+
" \n",
|
| 120 |
+
"checkpoint = ModelCheckpoint(\"MNIST_Checkpoint.h5\",\n",
|
| 121 |
+
" monitor=\"val_loss\",\n",
|
| 122 |
+
" mode=\"min\",\n",
|
| 123 |
+
" save_best_only = True,\n",
|
| 124 |
+
" verbose=1)\n",
|
| 125 |
+
"callbacks = [checkpoint]\n"
|
| 126 |
+
]
|
| 127 |
+
},
|
| 128 |
+
{
|
| 129 |
+
"cell_type": "code",
|
| 130 |
+
"execution_count": 2,
|
| 131 |
+
"metadata": {},
|
| 132 |
+
"outputs": [
|
| 133 |
+
{
|
| 134 |
+
"name": "stdout",
|
| 135 |
+
"output_type": "stream",
|
| 136 |
+
"text": [
|
| 137 |
+
"Train on 60000 samples, validate on 10000 samples\n",
|
| 138 |
+
"\n",
|
| 139 |
+
"Epoch 00001: val_loss improved from inf to 0.24715, saving model to MNIST_Checkpoint.h5\n",
|
| 140 |
+
"60000/60000 - 97s - loss: 0.7167 - accuracy: 0.7759 - val_loss: 0.2471 - val_accuracy: 0.9275\n",
|
| 141 |
+
"Test loss: 0.24714763118624689\n",
|
| 142 |
+
"Test accuracy: 0.9275\n"
|
| 143 |
+
]
|
| 144 |
+
}
|
| 145 |
+
],
|
| 146 |
+
"source": [
|
| 147 |
+
"history = model.fit(x_train, y_train,\n",
|
| 148 |
+
" batch_size = batch_size,\n",
|
| 149 |
+
" epochs = epochs,\n",
|
| 150 |
+
" verbose = 2,\n",
|
| 151 |
+
" callbacks = callbacks,\n",
|
| 152 |
+
" validation_data = (x_test, y_test))\n",
|
| 153 |
+
"\n",
|
| 154 |
+
"score = model.evaluate(x_test, y_test, verbose=0)\n",
|
| 155 |
+
"print('Test loss:', score[0])\n",
|
| 156 |
+
"print('Test accuracy:', score[1])"
|
| 157 |
+
]
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"cell_type": "markdown",
|
| 161 |
+
"metadata": {},
|
| 162 |
+
"source": [
|
| 163 |
+
"### Adding Multiple Call Backs & Early Stopping\n",
|
| 164 |
+
"\n",
|
| 165 |
+
"We can use other call back methods to monitor our training process such as **Early Stopping**. Checkout the Keras documentation for more:\n",
|
| 166 |
+
"- https://keras.io/callbacks/"
|
| 167 |
+
]
|
| 168 |
+
},
|
| 169 |
+
{
|
| 170 |
+
"cell_type": "code",
|
| 171 |
+
"execution_count": 4,
|
| 172 |
+
"metadata": {},
|
| 173 |
+
"outputs": [],
|
| 174 |
+
"source": [
|
| 175 |
+
"from tensorflow.keras.callbacks import EarlyStopping\n",
|
| 176 |
+
"\n",
|
| 177 |
+
"earlystop = EarlyStopping(monitor = 'val_loss', # value being monitored for improvement\n",
|
| 178 |
+
" min_delta = 0, #Abs value and is the min change required before we stop\n",
|
| 179 |
+
" patience = 3, #Number of epochs we wait before stopping \n",
|
| 180 |
+
" verbose = 1,\n",
|
| 181 |
+
" restore_best_weights = True) #keeps the best weigths once stopped\n",
|
| 182 |
+
"\n",
|
| 183 |
+
"# we put our call backs into a callback list\n",
|
| 184 |
+
"callbacks = [earlystop, checkpoint]"
|
| 185 |
+
]
|
| 186 |
+
},
|
| 187 |
+
{
|
| 188 |
+
"cell_type": "markdown",
|
| 189 |
+
"metadata": {},
|
| 190 |
+
"source": [
|
| 191 |
+
"### We can attempt to run again to see if it worked!"
|
| 192 |
+
]
|
| 193 |
+
},
|
| 194 |
+
{
|
| 195 |
+
"cell_type": "code",
|
| 196 |
+
"execution_count": 5,
|
| 197 |
+
"metadata": {},
|
| 198 |
+
"outputs": [
|
| 199 |
+
{
|
| 200 |
+
"name": "stdout",
|
| 201 |
+
"output_type": "stream",
|
| 202 |
+
"text": [
|
| 203 |
+
"Train on 60000 samples, validate on 10000 samples\n",
|
| 204 |
+
"Epoch 1/3\n",
|
| 205 |
+
"\n",
|
| 206 |
+
"Epoch 00001: val_loss improved from 0.24715 to 0.18733, saving model to MNIST_Checkpoint.h5\n",
|
| 207 |
+
"60000/60000 - 105s - loss: 0.3593 - accuracy: 0.8905 - val_loss: 0.1873 - val_accuracy: 0.9437\n",
|
| 208 |
+
"Epoch 2/3\n",
|
| 209 |
+
"\n",
|
| 210 |
+
"Epoch 00002: val_loss improved from 0.18733 to 0.15683, saving model to MNIST_Checkpoint.h5\n",
|
| 211 |
+
"60000/60000 - 105s - loss: 0.3018 - accuracy: 0.9084 - val_loss: 0.1568 - val_accuracy: 0.9525\n",
|
| 212 |
+
"Epoch 3/3\n",
|
| 213 |
+
"\n",
|
| 214 |
+
"Epoch 00003: val_loss improved from 0.15683 to 0.13865, saving model to MNIST_Checkpoint.h5\n",
|
| 215 |
+
"60000/60000 - 108s - loss: 0.2658 - accuracy: 0.9205 - val_loss: 0.1386 - val_accuracy: 0.9578\n",
|
| 216 |
+
"Test loss: 0.1386499687358737\n",
|
| 217 |
+
"Test accuracy: 0.9578\n"
|
| 218 |
+
]
|
| 219 |
+
}
|
| 220 |
+
],
|
| 221 |
+
"source": [
|
| 222 |
+
"history = model.fit(x_train, y_train,\n",
|
| 223 |
+
" batch_size=64,\n",
|
| 224 |
+
" epochs=3,\n",
|
| 225 |
+
" verbose=2,\n",
|
| 226 |
+
" callbacks = callbacks,\n",
|
| 227 |
+
" validation_data=(x_test, y_test))\n",
|
| 228 |
+
"\n",
|
| 229 |
+
"\n",
|
| 230 |
+
"score = model.evaluate(x_test, y_test, verbose=0)\n",
|
| 231 |
+
"print('Test loss:', score[0])\n",
|
| 232 |
+
"print('Test accuracy:', score[1])"
|
| 233 |
+
]
|
| 234 |
+
},
|
| 235 |
+
{
|
| 236 |
+
"cell_type": "markdown",
|
| 237 |
+
"metadata": {},
|
| 238 |
+
"source": [
|
| 239 |
+
"### Another useful callback is Reducing our learning Rate on Plateau\n",
|
| 240 |
+
"\n",
|
| 241 |
+
"We can avoid having our oscillate around the global minimum by attempting to reduce the Learn Rate by a certain fact. If no improvement is seen in our monitored metric (val_loss typically), we wait a certain number of epochs (patience) then this callback reduces the learning rate by a factor"
|
| 242 |
+
]
|
| 243 |
+
},
|
| 244 |
+
{
|
| 245 |
+
"cell_type": "code",
|
| 246 |
+
"execution_count": 6,
|
| 247 |
+
"metadata": {},
|
| 248 |
+
"outputs": [],
|
| 249 |
+
"source": [
|
| 250 |
+
"from tensorflow.keras.callbacks import ReduceLROnPlateau\n",
|
| 251 |
+
"\n",
|
| 252 |
+
"reduce_lr = ReduceLROnPlateau(monitor = 'val_loss', factor = 0.2, patience = 3, verbose = 1, min_delta = 0.0001)"
|
| 253 |
+
]
|
| 254 |
+
}
|
| 255 |
+
],
|
| 256 |
+
"metadata": {
|
| 257 |
+
"kernelspec": {
|
| 258 |
+
"display_name": "Python 3",
|
| 259 |
+
"language": "python",
|
| 260 |
+
"name": "python3"
|
| 261 |
+
},
|
| 262 |
+
"language_info": {
|
| 263 |
+
"codemirror_mode": {
|
| 264 |
+
"name": "ipython",
|
| 265 |
+
"version": 3
|
| 266 |
+
},
|
| 267 |
+
"file_extension": ".py",
|
| 268 |
+
"mimetype": "text/x-python",
|
| 269 |
+
"name": "python",
|
| 270 |
+
"nbconvert_exporter": "python",
|
| 271 |
+
"pygments_lexer": "ipython3",
|
| 272 |
+
"version": "3.7.4"
|
| 273 |
+
}
|
| 274 |
+
},
|
| 275 |
+
"nbformat": 4,
|
| 276 |
+
"nbformat_minor": 2
|
| 277 |
+
}
|
12. Optimizers, Adaptive Learning Rate & Callbacks/12.3 Building a Fruit Classifer.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
12. Optimizers, Learning Rates & Callbacks with Fruit Classification/1. Introduction to the types of Optimizers, Learning Rates & Callbacks.srt
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,910 --> 00:00:06,970
|
| 3 |
+
I say welcome to chapter 12 where we talk about the different types of optimizes what a living grades.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,980 --> 00:00:12,380
|
| 7 |
+
How do we even manipulate holding rates during training and what are callbacks very important Cara's
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:12,470 --> 00:00:14,870
|
| 11 |
+
utility.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:14,880 --> 00:00:16,870
|
| 15 |
+
So these are the tree sections in the chapter.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:16,870 --> 00:00:23,060
|
| 19 |
+
Firstly we just go through the types of optimizers and adaptive learning methods we didn't use we haven't
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:23,120 --> 00:00:28,290
|
| 23 |
+
we didn't take a look at Carrot's callbacks and look at some tree important callbacks.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:28,290 --> 00:00:30,030
|
| 27 |
+
There are many more but these are the important ones.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:30,030 --> 00:00:35,930
|
| 31 |
+
I would advise you to know checkpoint early stopping and adjusting the learning rates that plateau.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:36,420 --> 00:00:41,950
|
| 35 |
+
And lastly we're going to use these callbacks and build or fruit classify so stay statement.
|
12. Optimizers, Learning Rates & Callbacks with Fruit Classification/2. Types Optimizers and Adaptive Learning Rate Methods.srt
ADDED
|
@@ -0,0 +1,439 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,530 --> 00:00:06,240
|
| 3 |
+
OK so let's start of Chapter 11 point sorry twelve point one by looking at the types of optimizers we
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,240 --> 00:00:10,520
|
| 7 |
+
have available in Karris and look at some adaptive learning methods.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:10,530 --> 00:00:12,540
|
| 11 |
+
So let's dive in.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:12,600 --> 00:00:15,560
|
| 15 |
+
So optimizes what exactly optimizes.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:15,570 --> 00:00:21,540
|
| 19 |
+
Now you may have remembered in a neural net explanation optimizes with the algorithm we use to minimize
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:21,550 --> 00:00:22,400
|
| 23 |
+
our loss.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:22,560 --> 00:00:27,120
|
| 27 |
+
And some examples of this which I should be familiar to you know would be really in the sense stochastic
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:27,120 --> 00:00:28,940
|
| 31 |
+
really innocent and many batched.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:30,380 --> 00:00:35,930
|
| 35 |
+
So Cara's actually comes with a lot more itemizes than those we have the standard stochastic Grilli
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:35,930 --> 00:00:39,560
|
| 39 |
+
and dissent armis Prop 8 a grad in adulthood.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:39,610 --> 00:00:41,300
|
| 43 |
+
Adam Adam Max and Adam
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:44,800 --> 00:00:52,320
|
| 47 |
+
so in a quick aside about snow constant rates are generally bad especially if you start off too big.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:52,400 --> 00:00:58,440
|
| 51 |
+
So I imagine after 50 long epochs ebox when we're thinking we're close to Convergence but then the problem
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:58,550 --> 00:01:05,970
|
| 55 |
+
is I will integrate basically bounces around and loss and our sorry our training center at Teslas So
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:06,180 --> 00:01:07,890
|
| 59 |
+
basically stop increasing.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:07,890 --> 00:01:09,170
|
| 63 |
+
That's a bad situation.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:09,300 --> 00:01:14,760
|
| 67 |
+
When you're training so you always want to use a learning rate that's small as small as possible without
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:14,760 --> 00:01:17,630
|
| 71 |
+
being too small because it's too small or small of learning.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:17,640 --> 00:01:21,730
|
| 75 |
+
It would just simply take forever to train.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:22,210 --> 00:01:23,580
|
| 79 |
+
So there's so many choices.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:23,630 --> 00:01:24,790
|
| 83 |
+
What's the difference.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:24,800 --> 00:01:29,510
|
| 87 |
+
The main difference in these algorithms is how they manipulate learning rates to allow faster convergence
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:29,540 --> 00:01:31,700
|
| 91 |
+
and better validation accuracy.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:31,700 --> 00:01:38,150
|
| 95 |
+
Some require like a gradient descent setting off some manual parameters or even adjusting or learning
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:38,160 --> 00:01:43,460
|
| 99 |
+
which you will come to shortly and then some of them use a stick approach to provide adaptive learning
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:43,460 --> 00:01:45,120
|
| 103 |
+
rates which are quite cool.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:45,170 --> 00:01:47,690
|
| 107 |
+
We'll actually see some of the comparisons shortly.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:49,760 --> 00:01:54,910
|
| 111 |
+
So let's talk a bit about stochastic really and descent and the parameters Karris allows us to control.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:55,130 --> 00:02:00,920
|
| 115 |
+
So by default cursor uses a constantly integrates in SAGD itemizes at stochastic really and descent
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:01,590 --> 00:02:08,360
|
| 119 |
+
and however we can set these parameters off momentum to key and also turn off or on something called
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:08,450 --> 00:02:10,340
|
| 123 |
+
noster of momentum.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:10,340 --> 00:02:12,980
|
| 127 |
+
So let's talk about a bit about momentum.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:12,980 --> 00:02:19,220
|
| 131 |
+
Momentum is a technique that accelerates SAGD by pushing degree and steps along the relevant direction
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:19,670 --> 00:02:23,680
|
| 135 |
+
but reducing the jump in oscillations away from relevant directions.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:23,690 --> 00:02:29,360
|
| 139 |
+
So basically it basically encourages O'Grady and descent to head into the direction that is basically
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:29,360 --> 00:02:30,380
|
| 143 |
+
reducing loss.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:30,440 --> 00:02:33,340
|
| 147 |
+
So it doesn't actually go away from that spot.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:34,480 --> 00:02:42,200
|
| 151 |
+
And big jumps at least to kids is something that the kids are able great every batch of it not Iput
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:42,260 --> 00:02:42,700
|
| 155 |
+
Depok.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:42,710 --> 00:02:44,300
|
| 159 |
+
So be careful how you set about size.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:44,300 --> 00:02:44,930
|
| 163 |
+
By the way.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:45,050 --> 00:02:49,040
|
| 167 |
+
This is it only at a time but size becomes relevant in training.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:49,460 --> 00:02:54,230
|
| 171 |
+
A good rule of thumb though is for setting the Dekhi is equal to leaning read divided by a number of
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:54,230 --> 00:02:54,890
|
| 175 |
+
ebox.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:02:54,950 --> 00:02:55,920
|
| 179 |
+
OK.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:02:56,450 --> 00:03:02,090
|
| 183 |
+
So that's how we choose the key value noster off basically is a guy who actually developed a method
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:03:02,450 --> 00:03:08,480
|
| 187 |
+
that solves the problem of oscillating around r.m.r not us losing oscillating around on minima basically
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:03:08,480 --> 00:03:13,410
|
| 191 |
+
means getting rid of too big for us to actually converge at a minimum point.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:13,910 --> 00:03:18,140
|
| 195 |
+
And this happens when a minute momentum is high and unable to slow down.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:18,530 --> 00:03:23,370
|
| 199 |
+
So this makes a big jump then a small correction and after that the gradient is calculated.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:23,570 --> 00:03:25,530
|
| 203 |
+
That's not Esther of weeks.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:25,550 --> 00:03:33,160
|
| 207 |
+
So all of those encourage you to use Nassor of being if you actually want to get fast to.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:33,170 --> 00:03:38,440
|
| 211 |
+
So this is a good illustration here of Nestor off woman to illustrate it.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:38,510 --> 00:03:42,120
|
| 215 |
+
It was taken from this source it seems to try one stone for its course.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:43,980 --> 00:03:48,930
|
| 219 |
+
So basically this is actually all the other algorithms combined here.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:49,530 --> 00:03:53,350
|
| 223 |
+
And basically it shows you how women actually looks as well.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:53,370 --> 00:03:54,230
|
| 227 |
+
So take a look.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:54,510 --> 00:03:56,250
|
| 231 |
+
It's actually quite interesting.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:56,250 --> 00:04:04,070
|
| 235 |
+
This is SDD Witt moment with sort of enabled by the way so you can see it's taking a while to get here.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:04:04,350 --> 00:04:08,710
|
| 239 |
+
But it will eventually get here although the others have gotten their way quickly.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:04:11,890 --> 00:04:14,360
|
| 243 |
+
I actually was wrong in one thing.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:04:14,360 --> 00:04:16,360
|
| 247 |
+
I just remember them taking it on my second screen.
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:04:16,580 --> 00:04:20,090
|
| 251 |
+
Actually no momentum actually had noster of enabled here.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:04:20,280 --> 00:04:22,260
|
| 255 |
+
SDD was just plain vanilla yesterday.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:04:23,990 --> 00:04:29,680
|
| 259 |
+
So I can see all of these advanced optimizers got there eventually except for yesterday which took forever.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:31,660 --> 00:04:34,600
|
| 263 |
+
So let's talk a bit about more of those other algorithms here.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:34,660 --> 00:04:35,720
|
| 267 |
+
Some of these here.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:36,100 --> 00:04:39,120
|
| 271 |
+
Let's start talking about the ones that are available in Paris.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:39,520 --> 00:04:44,870
|
| 275 |
+
So we just saw we set up parameters to control that getting Richard you and living Richard ules are
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:04:44,890 --> 00:04:46,050
|
| 279 |
+
basically either.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:04:46,150 --> 00:04:52,900
|
| 283 |
+
How are we leading rates adapt over to treating process based be it based on number of e-books have
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:04:52,900 --> 00:04:55,290
|
| 287 |
+
been completed or other parameters.
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:04:55,320 --> 00:04:56,070
|
| 291 |
+
OK.
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:04:56,830 --> 00:05:00,430
|
| 295 |
+
That's why it's adaptive learning repentance and each book differently.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:05:00,430 --> 00:05:03,130
|
| 299 |
+
So let's talk quickly about that.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:05:03,360 --> 00:05:09,490
|
| 303 |
+
This performs large of the more sparse parameters and smaller of the ads for less pass parameters.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:05:09,530 --> 00:05:12,240
|
| 307 |
+
Is this actually about English.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:05:12,340 --> 00:05:19,480
|
| 311 |
+
It is the Tuss it is us well-suited for spazzed do how ever will because the learning rate is always
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:05:19,480 --> 00:05:24,060
|
| 315 |
+
decreasing monotonically after many books learning slows down to a crawl.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:05:24,580 --> 00:05:31,640
|
| 319 |
+
So it adults actually solves this monotonically decreasing gradient basically an integral and occurs
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:05:31,660 --> 00:05:35,730
|
| 323 |
+
in allograft armis proper is actually similar to integrate it adults.
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:05:35,760 --> 00:05:41,410
|
| 327 |
+
Sorry couldn't find much information to explain this but just remember these are similar probably discovered
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:05:41,440 --> 00:05:48,430
|
| 331 |
+
and have similar discovered separately but have similar methods of action an atom which is not really
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:05:48,520 --> 00:05:55,360
|
| 335 |
+
item its name but item it's similar to either Delta but still has momentum for learning rates for each
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:05:55,360 --> 00:05:56,070
|
| 339 |
+
parameter.
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:05:56,400 --> 00:05:57,910
|
| 343 |
+
Each of the parameters separately.
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:05:58,240 --> 00:06:05,240
|
| 347 |
+
I'll correct these little mistakes here and there before this should be decided to you guys.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:06:05,300 --> 00:06:07,640
|
| 351 |
+
So what does a good learning read look like.
|
| 352 |
+
|
| 353 |
+
89
|
| 354 |
+
00:06:07,640 --> 00:06:10,580
|
| 355 |
+
So this is Holan a loss graph here.
|
| 356 |
+
|
| 357 |
+
90
|
| 358 |
+
00:06:10,770 --> 00:06:16,010
|
| 359 |
+
Epoxied This is how it would look if we had a very high rate a large rate.
|
| 360 |
+
|
| 361 |
+
91
|
| 362 |
+
00:06:16,130 --> 00:06:20,660
|
| 363 |
+
Basically we would never find a convergence zone because we'd be bouncing around everywhere and a lot
|
| 364 |
+
|
| 365 |
+
92
|
| 366 |
+
00:06:20,660 --> 00:06:22,940
|
| 367 |
+
of problem just get worse over time.
|
| 368 |
+
|
| 369 |
+
93
|
| 370 |
+
00:06:23,270 --> 00:06:27,560
|
| 371 |
+
A little learning it will eventually get there however it'll take a while.
|
| 372 |
+
|
| 373 |
+
94
|
| 374 |
+
00:06:27,640 --> 00:06:29,930
|
| 375 |
+
Highly integrated will eventually get here too.
|
| 376 |
+
|
| 377 |
+
95
|
| 378 |
+
00:06:30,240 --> 00:06:35,780
|
| 379 |
+
But this could actually could be interchangeable but basically it will get here to good living rates
|
| 380 |
+
|
| 381 |
+
96
|
| 382 |
+
00:06:35,780 --> 00:06:41,560
|
| 383 |
+
have nice gradual smooth and decreasing steps and basically converge at lower points over time.
|
| 384 |
+
|
| 385 |
+
97
|
| 386 |
+
00:06:43,080 --> 00:06:49,320
|
| 387 |
+
And finally if you go to Icarus you slash optimizes it brings up a list of all optimizes and actually
|
| 388 |
+
|
| 389 |
+
98
|
| 390 |
+
00:06:49,320 --> 00:06:56,220
|
| 391 |
+
how to use decode here and how to what settings that are available to optimize this so you can see as
|
| 392 |
+
|
| 393 |
+
99
|
| 394 |
+
00:06:56,220 --> 00:06:59,570
|
| 395 |
+
she does here basically we have some of these parameters here.
|
| 396 |
+
|
| 397 |
+
100
|
| 398 |
+
00:07:00,400 --> 00:07:03,080
|
| 399 |
+
Noster of decie I mentioned to you before.
|
| 400 |
+
|
| 401 |
+
101
|
| 402 |
+
00:07:04,070 --> 00:07:10,130
|
| 403 |
+
Armis prop all of these are available here and some explanations on what they do what to what you can
|
| 404 |
+
|
| 405 |
+
102
|
| 406 |
+
00:07:10,130 --> 00:07:13,480
|
| 407 |
+
tweak is all available on Kerrison site.
|
| 408 |
+
|
| 409 |
+
103
|
| 410 |
+
00:07:13,480 --> 00:07:15,350
|
| 411 |
+
So take a look.
|
| 412 |
+
|
| 413 |
+
104
|
| 414 |
+
00:07:15,680 --> 00:07:22,630
|
| 415 |
+
Just so you know in best practice I find Adam to be one of the best optimizers.
|
| 416 |
+
|
| 417 |
+
105
|
| 418 |
+
00:07:22,650 --> 00:07:27,270
|
| 419 |
+
I don't even actually have to go for or from these default values.
|
| 420 |
+
|
| 421 |
+
106
|
| 422 |
+
00:07:27,280 --> 00:07:29,520
|
| 423 |
+
I actually just put it to be slightly smaller.
|
| 424 |
+
|
| 425 |
+
107
|
| 426 |
+
00:07:29,780 --> 00:07:38,000
|
| 427 |
+
But all these values are usually fine also what's quite good to use as well is SAGD with a very low
|
| 428 |
+
|
| 429 |
+
108
|
| 430 |
+
00:07:38,000 --> 00:07:38,890
|
| 431 |
+
living rate.
|
| 432 |
+
|
| 433 |
+
109
|
| 434 |
+
00:07:39,020 --> 00:07:41,830
|
| 435 |
+
You can set the momentum and key according to your parameters.
|
| 436 |
+
|
| 437 |
+
110
|
| 438 |
+
00:07:42,000 --> 00:07:44,900
|
| 439 |
+
I mentioned before and always used sort of as true.
|
12. Optimizers, Learning Rates & Callbacks with Fruit Classification/3. Keras Callbacks and Checkpoint, Early Stopping and Adjust Learning Rates that Pl.srt
ADDED
|
@@ -0,0 +1,379 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,450 --> 00:00:02,510
|
| 3 |
+
I look at the chapter twelve point two.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:02,560 --> 00:00:08,580
|
| 7 |
+
We talk about clawbacks checkpoints stopping and adjusting learning rates that apply to how we do this
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:08,640 --> 00:00:11,400
|
| 11 |
+
automatically occurs all made possible by callbacks.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:11,400 --> 00:00:13,590
|
| 15 |
+
Let's take a look and see what's done.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:13,680 --> 00:00:15,920
|
| 19 |
+
So let's talk about checkpoint service.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:16,020 --> 00:00:23,640
|
| 23 |
+
Now as you saw in previous training models when we train to actually the Katsaris dogs gasifier we saw
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:23,640 --> 00:00:24,820
|
| 27 |
+
we had 25 bucks.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:24,840 --> 00:00:29,250
|
| 31 |
+
But our best results were somewhere at maybe 14 or 15.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:29,250 --> 00:00:32,680
|
| 35 |
+
We got like 75 76 or 75 percent.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:33,120 --> 00:00:38,280
|
| 39 |
+
So what if we wanted to save those we had after e.g. park and maybe keep the best model.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:38,280 --> 00:00:45,770
|
| 43 |
+
So after every book what if Kara saved the model and maybe just kept the best model in memory so that
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:46,110 --> 00:00:51,240
|
| 47 |
+
after every epoch we just keep we keep saving the best and best model.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:51,330 --> 00:00:57,570
|
| 51 |
+
So at the end of it even making that Katsaris towards example at the end after 25 bucks the model that
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:57,570 --> 00:01:03,780
|
| 55 |
+
we save is isn't last model after the 25 bucks but it would be model number 14.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:03,780 --> 00:01:09,510
|
| 59 |
+
I mean after IPAC 14 so this is quite useful in actually I guess getting If you want to leave something
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:09,510 --> 00:01:12,510
|
| 63 |
+
running overnight and you don't want to actually have to sit down and babysit.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:12,540 --> 00:01:13,910
|
| 67 |
+
It's training.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:13,920 --> 00:01:15,420
|
| 71 |
+
You can just use this.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:15,420 --> 00:01:21,680
|
| 75 |
+
So let's see how we use this checkpointing inside of Chris so checkpointing is quite easy.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:21,680 --> 00:01:27,910
|
| 79 |
+
First of all we just need to imported from Chris Kodaks And basically this is how much our checkpoint
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:28,090 --> 00:01:30,850
|
| 83 |
+
checkpoint object is created.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:30,850 --> 00:01:36,280
|
| 87 |
+
So we initially sit here with model checkpoint we specify a full tool for where we want our model to
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:36,280 --> 00:01:40,080
|
| 91 |
+
be saved and we specify what value we want to monitor.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:40,270 --> 00:01:46,690
|
| 95 |
+
So after every epoch if our loss basically increases it does we just use loss to actually want to what
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:46,690 --> 00:01:47,810
|
| 99 |
+
model is best.
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:48,190 --> 00:01:49,380
|
| 103 |
+
We use a more minimum.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:49,390 --> 00:01:55,050
|
| 107 |
+
So that means we get the minimum loss obviously and see if desolately is true.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:55,060 --> 00:01:58,900
|
| 111 |
+
The reason I said this to true is because why would you want to see if models that aren't that good
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:59,430 --> 00:02:05,830
|
| 115 |
+
it just memory and space and if was equal one that gives us a nice feedback when it's actually doing
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:05,830 --> 00:02:07,100
|
| 119 |
+
the checkpointing which we'll see.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:07,100 --> 00:02:13,830
|
| 123 |
+
You know I point I piped in the book quite soon and basically we just have a variable here called callbacks.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:13,900 --> 00:02:16,500
|
| 127 |
+
We put this object here into disarray.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:16,870 --> 00:02:23,080
|
| 131 |
+
And when retraining we just have this new line here called X equal callbacks and basically just takes
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:23,140 --> 00:02:25,410
|
| 135 |
+
all the callbacks we just created one here.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:25,660 --> 00:02:31,540
|
| 139 |
+
However if we wanted to create another type of callback you just put it in Missouri as well and that's
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:31,540 --> 00:02:37,420
|
| 143 |
+
it we look at the lowest value for loss and see if only the best model.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:37,600 --> 00:02:40,750
|
| 147 |
+
So how do we stop training once all stops getting better.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:40,750 --> 00:02:44,620
|
| 151 |
+
Now this is another useful feature that can save us valuable computing resources.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:44,620 --> 00:02:47,460
|
| 155 |
+
What if after 20 bucks you stop seeing any improvements.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:47,460 --> 00:02:53,890
|
| 159 |
+
That is a validation so accuracy was no longer increasing or decreasing respectively.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:53,890 --> 00:02:58,140
|
| 163 |
+
So would you want to wait an expertise ebox and a risk of overfitting.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:58,480 --> 00:03:00,790
|
| 167 |
+
And we asked all the time to as well.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:03:00,790 --> 00:03:02,190
|
| 171 |
+
So no you don't.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:03:02,230 --> 00:03:08,200
|
| 175 |
+
So even after executing all of 50 ebox checkpointing and shows that we see only the best model but we're
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:03:08,200 --> 00:03:11,490
|
| 179 |
+
still running a pointless ebox which is wasteful.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:03:11,770 --> 00:03:13,710
|
| 183 |
+
So how do we stop this.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:03:13,980 --> 00:03:20,700
|
| 187 |
+
Now it is a new callback called early stopping it is stopping basically allows us to one value.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:03:20,710 --> 00:03:24,850
|
| 191 |
+
And if that value basically stops improving we stop training.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:24,850 --> 00:03:27,410
|
| 195 |
+
We don't run any more ebox.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:27,460 --> 00:03:29,260
|
| 199 |
+
So this is how something is configured.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:29,380 --> 00:03:33,720
|
| 203 |
+
Similarly for checkpointing we tell it what to monitor.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:33,730 --> 00:03:36,200
|
| 207 |
+
So we monitor validation most men.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:36,220 --> 00:03:39,540
|
| 211 |
+
We have some parameters like 1 million Mindell to patients.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:39,550 --> 00:03:40,020
|
| 215 |
+
All right.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:40,270 --> 00:03:41,440
|
| 219 |
+
So what is is here.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:42,040 --> 00:03:46,810
|
| 223 |
+
It's an absolute value and it's basically the minimum change required before we stop.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:46,810 --> 00:03:54,680
|
| 227 |
+
So basically if this is basically what size of change we look at and patients is actually a nice fairly
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:54,760 --> 00:03:56,330
|
| 231 |
+
nice parameters.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:56,350 --> 00:04:00,610
|
| 235 |
+
Well it tells us how many packs we wait before we stop.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:04:00,610 --> 00:04:08,050
|
| 239 |
+
So let's say we are looking at the park number 20 in my last example and our validation loss was basically
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:04:08,050 --> 00:04:14,410
|
| 243 |
+
point 1 and 21 it was point 1 again and in 22 it was point 1 again.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:04:14,530 --> 00:04:22,330
|
| 247 |
+
That means we waste with a tree box and now this is stopping callback will basically stop training short
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:04:22,370 --> 00:04:28,090
|
| 251 |
+
Primm prematurely which would be a good thing here because our Mindell to which we set a zero.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:04:28,300 --> 00:04:35,320
|
| 255 |
+
If we set it at a value Lexie's 0.5 0.1 basically it would heft it would basically look at values at
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:04:35,740 --> 00:04:37,480
|
| 259 |
+
up 2.1 as being a change.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:37,480 --> 00:04:45,100
|
| 263 |
+
So if nothing improved basically more than point 1 it will still we at least treat patients Delta ebox
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:45,100 --> 00:04:53,200
|
| 267 |
+
cycles here and then stop so what about this one here reducing linning great play too.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:53,420 --> 00:04:57,830
|
| 271 |
+
Now remember I tell you we have a number of optimizers before that.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:57,830 --> 00:05:04,340
|
| 275 |
+
Basically do linning rate adjustments on the fly use using things like momentum and decay and some advanced
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:05:04,400 --> 00:05:07,470
|
| 279 |
+
algorithms and Adam and so those sorts of things.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:05:07,670 --> 00:05:13,700
|
| 283 |
+
But Chris also actually has something that can actually force us to change the living rate that is been
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:05:13,700 --> 00:05:16,050
|
| 287 |
+
set in those optimizers.
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:05:16,130 --> 00:05:21,390
|
| 291 |
+
So what we can do is basically when it's on the plate too which means that we can avoid oscillating.
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:05:21,410 --> 00:05:23,880
|
| 295 |
+
Let's explain this quickly.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:05:24,140 --> 00:05:28,880
|
| 299 |
+
We can avoid having a loss oscillator on a global minimum by reducing lending it.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:05:28,880 --> 00:05:33,140
|
| 303 |
+
And how does scare us even notice if no improvement is seen an unwanted metric.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:05:33,170 --> 00:05:41,960
|
| 307 |
+
Typically validation loss just like is stopping we just wait a minimum number of ebox and then we change
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:05:42,080 --> 00:05:43,650
|
| 311 |
+
the learning rate.
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:05:43,670 --> 00:05:46,000
|
| 315 |
+
So and this is a factor that we can change it by.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:05:46,370 --> 00:05:54,020
|
| 319 |
+
And this is a minimum minimum Delta minimum size we can make or letting it be or the change.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:05:54,260 --> 00:05:54,850
|
| 323 |
+
OK.
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:05:55,490 --> 00:05:57,970
|
| 327 |
+
So this is a quite useful feature to use.
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:05:58,040 --> 00:06:02,150
|
| 331 |
+
I actually always use destry callbacks in my training.
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:06:02,150 --> 00:06:04,560
|
| 335 |
+
Going forward we will solve using those as well.
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:06:05,000 --> 00:06:09,830
|
| 339 |
+
And it basically saves you a lot of time you can just leave something walk away for a couple hours come
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:06:09,830 --> 00:06:13,830
|
| 343 |
+
back and you'll see if your training model was good or not.
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:06:13,870 --> 00:06:17,190
|
| 347 |
+
I've seen some people actually do some pretty cool things to do they hook up.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:06:17,210 --> 00:06:22,760
|
| 351 |
+
Basically the texting and notification systems should Eric Harris callbacks inside of them so they get
|
| 352 |
+
|
| 353 |
+
89
|
| 354 |
+
00:06:22,760 --> 00:06:24,300
|
| 355 |
+
a notification when training has stopped.
|
| 356 |
+
|
| 357 |
+
90
|
| 358 |
+
00:06:24,310 --> 00:06:25,010
|
| 359 |
+
Really.
|
| 360 |
+
|
| 361 |
+
91
|
| 362 |
+
00:06:25,220 --> 00:06:25,940
|
| 363 |
+
That's quite cool.
|
| 364 |
+
|
| 365 |
+
92
|
| 366 |
+
00:06:25,960 --> 00:06:27,570
|
| 367 |
+
Will be a fun project for you to do.
|
| 368 |
+
|
| 369 |
+
93
|
| 370 |
+
00:06:27,890 --> 00:06:30,290
|
| 371 |
+
So that's it finished up.
|
| 372 |
+
|
| 373 |
+
94
|
| 374 |
+
00:06:30,530 --> 00:06:36,820
|
| 375 |
+
We're not going to go into what I write in the book and tree or fruit classify using some of these methods.
|
| 376 |
+
|
| 377 |
+
95
|
| 378 |
+
00:06:36,830 --> 00:06:37,640
|
| 379 |
+
We just have to.
|
12. Optimizers, Learning Rates & Callbacks with Fruit Classification/4. Build a Fruit Classifier.srt
ADDED
|
@@ -0,0 +1,527 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,390 --> 00:00:06,030
|
| 3 |
+
Hi and welcome to 12. tree where we start building our fruit classifier and we start using some of these
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,030 --> 00:00:08,290
|
| 7 |
+
callbacks we learnt in the previous section.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:08,670 --> 00:00:13,110
|
| 11 |
+
So let's talk a bit about fruit datasets called fruit 360.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:13,110 --> 00:00:16,440
|
| 15 |
+
Basically it was it was part of a Kaggle competition.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:16,530 --> 00:00:18,530
|
| 19 |
+
This is a link to the actual dataset here.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:18,780 --> 00:00:21,460
|
| 23 |
+
It consists of 81 types of fruits.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:21,840 --> 00:00:28,770
|
| 27 |
+
That's 81 classes and approximately 45 images per class and all images are rendered by 100 pixels and
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:28,770 --> 00:00:30,000
|
| 31 |
+
in color.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:30,660 --> 00:00:33,040
|
| 35 |
+
So these are some examples of fruits here.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:34,030 --> 00:00:36,780
|
| 39 |
+
Honestly I can't identify some of these myself.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:37,420 --> 00:00:40,950
|
| 43 |
+
But you we're going to try and justify it to do just that.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:40,970 --> 00:00:43,480
|
| 47 |
+
So let's go to I put it in the book.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:43,810 --> 00:00:44,080
|
| 51 |
+
OK.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:44,080 --> 00:00:47,860
|
| 55 |
+
So here we are at Chapter 12 the building of a fruit classifier.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:00:47,890 --> 00:00:49,040
|
| 59 |
+
Let's bring up this file.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:00:50,100 --> 00:00:54,400
|
| 63 |
+
But before we begin I hope you downloaded your fruit trees 60 datasets.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:00:54,760 --> 00:00:58,290
|
| 67 |
+
And I wanted you to put that at File.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:00:58,290 --> 00:00:59,630
|
| 71 |
+
Let's go to it here.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:00,090 --> 00:01:03,450
|
| 75 |
+
Into this folder that it should have extracted into this for the here.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:03,930 --> 00:01:08,850
|
| 79 |
+
And basically I want you to make sure that it's named train and validation.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:08,850 --> 00:01:14,480
|
| 83 |
+
It should be as I've zipped it or compressed it correctly and you can take a look at the fruits here.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:14,520 --> 00:01:20,540
|
| 87 |
+
So you can take a look at some mangoes of mangos all look quite similar to each other.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:20,940 --> 00:01:21,210
|
| 91 |
+
OK.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:21,220 --> 00:01:22,450
|
| 95 |
+
So let's go back to this here.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:22,500 --> 00:01:33,610
|
| 99 |
+
So let's go back to the book and Firstly like we have done in our dogs with cats CNN we declare our
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:33,630 --> 00:01:39,380
|
| 103 |
+
doctors here and we just create some image data generators for training and validation and are trained
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:39,380 --> 00:01:44,400
|
| 107 |
+
and ready to and validation generates a notice with no categorical and binary.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:44,730 --> 00:01:48,780
|
| 111 |
+
And also notice we have to declare a number of classes here as well.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:48,860 --> 00:01:54,830
|
| 115 |
+
This dataset even though the data set is encoded into 100 by 100 pixels I'm going to use a resize as
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:01:54,840 --> 00:01:58,890
|
| 119 |
+
to the two Bertelli to basically make our training faster.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:01:59,010 --> 00:02:02,090
|
| 123 |
+
We use a similar scene and never used for Safar here as well.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:04,470 --> 00:02:10,680
|
| 127 |
+
And then we declare this is important but that's creates all callbacks So as you can see in the presentation
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:11,160 --> 00:02:17,430
|
| 131 |
+
this one is a checkpoint Colback and checkpointing basically ensures I received a best model after every
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:17,430 --> 00:02:17,810
|
| 135 |
+
box.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:17,820 --> 00:02:23,550
|
| 139 |
+
If you train for 20 bucks and the best model is an e-book 16 it'll be that will be the one we save here.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:24,060 --> 00:02:24,840
|
| 143 |
+
We need to specify.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:24,840 --> 00:02:28,910
|
| 147 |
+
Finally I'm actually didn't mention that in our slides but it's much just the directory it's actually
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:28,910 --> 00:02:32,590
|
| 151 |
+
a file and we want to see it as otherwise it will not work.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:32,630 --> 00:02:38,760
|
| 155 |
+
Ill be stopping here really stopping the problems as we have said here and it is something basically
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:38,760 --> 00:02:45,540
|
| 159 |
+
tells us when if this thing has stopped improving it'll stop letting the plateau which wasn't actually
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:45,540 --> 00:02:47,340
|
| 163 |
+
used in this example here.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:47,430 --> 00:02:50,640
|
| 167 |
+
However we could have used it in another example it really happens.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:50,640 --> 00:02:55,820
|
| 171 |
+
Unless you have training for exhaustive number of ebox but it's always good to have it just in case.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:55,920 --> 00:02:59,880
|
| 175 |
+
So we create our callbacks here and actually did not add it in here.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:03:00,150 --> 00:03:01,440
|
| 179 |
+
Quickly put it in.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:03:01,730 --> 00:03:07,620
|
| 183 |
+
And basically what we do when we compile sorry not comparable with that model is that we point our callbacks
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:03:07,620 --> 00:03:10,180
|
| 187 |
+
to a callback array here.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:03:10,410 --> 00:03:18,870
|
| 191 |
+
So as you can see you know we've trained for five hypoxia and basically you can see this up with this.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:19,290 --> 00:03:23,740
|
| 195 |
+
Basically this tells us that a model was saved to hear after every epoch.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:24,090 --> 00:03:29,460
|
| 199 |
+
And basically what happened is that after here it noted that valediction loss didn't improve didn't
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:29,460 --> 00:03:30,150
|
| 203 |
+
improve.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:30,330 --> 00:03:33,680
|
| 207 |
+
If we are trained for more ebox which I probably should have left it for this example.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:33,690 --> 00:03:39,330
|
| 211 |
+
But if we are trained for more POCs we would've initiated the stopping metric and this would have stopped
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:39,330 --> 00:03:40,260
|
| 215 |
+
training.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:40,260 --> 00:03:47,760
|
| 219 |
+
So basically I could have said it to train at 10 epochs which I probably should have done away with
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:47,760 --> 00:03:47,850
|
| 223 |
+
it.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:47,850 --> 00:03:49,020
|
| 227 |
+
I did find a box here.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:49,020 --> 00:03:49,620
|
| 231 |
+
Sorry.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:49,980 --> 00:03:51,410
|
| 235 |
+
And you can do it on your own.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:03:51,450 --> 00:03:58,140
|
| 239 |
+
Set it to return the box and train it and you'll see it's going to stop after maybe six ebox.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:03:58,230 --> 00:04:00,640
|
| 243 |
+
So let's take a look at the confusion metrics here.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:04:00,900 --> 00:04:04,250
|
| 247 |
+
It's not printed correctly and I'll show you how we solve this too.
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:04:04,470 --> 00:04:08,120
|
| 251 |
+
And that's because we have 81 classes in this year.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:04:08,260 --> 00:04:12,560
|
| 255 |
+
Our pacification report is well laid out.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:04:12,570 --> 00:04:17,570
|
| 259 |
+
But it's a bit tedious to read don't get a ton of information unless you actually drill down and see
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:17,570 --> 00:04:20,510
|
| 263 |
+
something like oh pomegranates are basically bad.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:20,890 --> 00:04:29,150
|
| 267 |
+
OK so here's another way to visualize the confusion matrix which was probably not best visualize at
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:29,150 --> 00:04:30,330
|
| 271 |
+
all like that.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:31,610 --> 00:04:32,270
|
| 275 |
+
So here we go.
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:04:32,300 --> 00:04:36,720
|
| 279 |
+
Now there's a problem here with this we can actually increase the plot size here.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:04:36,710 --> 00:04:40,570
|
| 283 |
+
Let's try 20 by 20.
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:04:40,700 --> 00:04:43,280
|
| 287 |
+
I'll probably take about maybe 10 seconds to run.
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:04:44,400 --> 00:04:47,200
|
| 291 |
+
Of Allision that did degenerate that isn't run.
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:04:47,200 --> 00:04:51,280
|
| 295 |
+
So let's quickly go back here and run this in the beginning.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:04:51,430 --> 00:04:54,980
|
| 299 |
+
It's good that actually you actually see these areas and see how I solve it.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:04:55,510 --> 00:04:55,960
|
| 303 |
+
OK.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:04:56,290 --> 00:04:59,790
|
| 307 |
+
That's because when the book was saved would actually run.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:05:00,040 --> 00:05:02,200
|
| 311 |
+
And let's run this this should work now.
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:05:04,170 --> 00:05:06,500
|
| 315 |
+
It's loading on model that's a model I see.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:05:06,520 --> 00:05:12,120
|
| 319 |
+
Prior to training that one I think it was a fairly good model if I'm not mistaken.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:05:23,260 --> 00:05:23,710
|
| 323 |
+
There we go.
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:05:23,740 --> 00:05:27,320
|
| 327 |
+
So this 20 20 did make a nice difference.
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:05:27,380 --> 00:05:29,360
|
| 331 |
+
We have a nice legend here as well.
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:05:29,530 --> 00:05:33,690
|
| 335 |
+
So you can see there's a nice diagonal in the middle here.
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:05:33,940 --> 00:05:35,570
|
| 339 |
+
So that is good.
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:05:35,590 --> 00:05:38,110
|
| 343 |
+
Now we can see little spots here and there.
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:05:38,170 --> 00:05:39,490
|
| 347 |
+
This one is here.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:05:39,500 --> 00:05:41,600
|
| 351 |
+
I'm not sure what it is corresponds to.
|
| 352 |
+
|
| 353 |
+
89
|
| 354 |
+
00:05:41,920 --> 00:05:42,700
|
| 355 |
+
Let's see.
|
| 356 |
+
|
| 357 |
+
90
|
| 358 |
+
00:05:42,700 --> 00:05:44,280
|
| 359 |
+
Looks like kumquats.
|
| 360 |
+
|
| 361 |
+
91
|
| 362 |
+
00:05:44,320 --> 00:05:50,300
|
| 363 |
+
And it's been mixed up with and I think this is mandarin's entirely sure.
|
| 364 |
+
|
| 365 |
+
92
|
| 366 |
+
00:05:50,330 --> 00:05:55,720
|
| 367 |
+
But you can analyze this on your own and take a look and see what's being confused as what.
|
| 368 |
+
|
| 369 |
+
93
|
| 370 |
+
00:05:55,720 --> 00:05:59,960
|
| 371 |
+
So this is a nice visual representation of all confusion matrix here.
|
| 372 |
+
|
| 373 |
+
94
|
| 374 |
+
00:06:00,690 --> 00:06:01,610
|
| 375 |
+
OK.
|
| 376 |
+
|
| 377 |
+
95
|
| 378 |
+
00:06:01,990 --> 00:06:04,550
|
| 379 |
+
So let's test this here.
|
| 380 |
+
|
| 381 |
+
96
|
| 382 |
+
00:06:04,570 --> 00:06:09,270
|
| 383 |
+
Now I've created this open see the thing that actually brings up our fruits and tells us to predict
|
| 384 |
+
|
| 385 |
+
97
|
| 386 |
+
00:06:09,280 --> 00:06:11,250
|
| 387 |
+
value and what it actually was.
|
| 388 |
+
|
| 389 |
+
98
|
| 390 |
+
00:06:11,470 --> 00:06:12,580
|
| 391 |
+
So let's run this.
|
| 392 |
+
|
| 393 |
+
99
|
| 394 |
+
00:06:12,590 --> 00:06:13,200
|
| 395 |
+
Here we are.
|
| 396 |
+
|
| 397 |
+
100
|
| 398 |
+
00:06:13,450 --> 00:06:16,890
|
| 399 |
+
So predicted a passion for it actually was a passion for it.
|
| 400 |
+
|
| 401 |
+
101
|
| 402 |
+
00:06:16,930 --> 00:06:19,090
|
| 403 |
+
And that is not whole depression.
|
| 404 |
+
|
| 405 |
+
102
|
| 406 |
+
00:06:19,120 --> 00:06:22,220
|
| 407 |
+
I've seen look but fair enough.
|
| 408 |
+
|
| 409 |
+
103
|
| 410 |
+
00:06:22,220 --> 00:06:26,760
|
| 411 |
+
Brandied is a red banana and it got it correctly as a red banana tomato tree.
|
| 412 |
+
|
| 413 |
+
104
|
| 414 |
+
00:06:26,760 --> 00:06:29,860
|
| 415 |
+
I think that's type tree good here.
|
| 416 |
+
|
| 417 |
+
105
|
| 418 |
+
00:06:30,830 --> 00:06:31,470
|
| 419 |
+
Group.
|
| 420 |
+
|
| 421 |
+
106
|
| 422 |
+
00:06:31,530 --> 00:06:32,640
|
| 423 |
+
White or green.
|
| 424 |
+
|
| 425 |
+
107
|
| 426 |
+
00:06:32,640 --> 00:06:34,390
|
| 427 |
+
Looks like a light green to me.
|
| 428 |
+
|
| 429 |
+
108
|
| 430 |
+
00:06:34,440 --> 00:06:35,520
|
| 431 |
+
Avocado ripe.
|
| 432 |
+
|
| 433 |
+
109
|
| 434 |
+
00:06:35,520 --> 00:06:35,910
|
| 435 |
+
Good to know.
|
| 436 |
+
|
| 437 |
+
110
|
| 438 |
+
00:06:35,920 --> 00:06:37,410
|
| 439 |
+
That's right.
|
| 440 |
+
|
| 441 |
+
111
|
| 442 |
+
00:06:37,950 --> 00:06:40,910
|
| 443 |
+
Goldberry said actually getting everything right.
|
| 444 |
+
|
| 445 |
+
112
|
| 446 |
+
00:06:40,980 --> 00:06:43,460
|
| 447 |
+
This is quite quite good classify.
|
| 448 |
+
|
| 449 |
+
113
|
| 450 |
+
00:06:44,190 --> 00:06:48,330
|
| 451 |
+
And basically we can take a look at old classified results which probably should have mentioned before
|
| 452 |
+
|
| 453 |
+
114
|
| 454 |
+
00:06:48,750 --> 00:06:50,530
|
| 455 |
+
it is 3 percent.
|
| 456 |
+
|
| 457 |
+
115
|
| 458 |
+
00:06:50,730 --> 00:06:52,410
|
| 459 |
+
But is that the one we actually use.
|
| 460 |
+
|
| 461 |
+
116
|
| 462 |
+
00:06:52,410 --> 00:06:53,190
|
| 463 |
+
No.
|
| 464 |
+
|
| 465 |
+
117
|
| 466 |
+
00:06:53,190 --> 00:06:57,600
|
| 467 |
+
The one we actually would be with checkpointing which is very useful.
|
| 468 |
+
|
| 469 |
+
118
|
| 470 |
+
00:06:57,610 --> 00:07:03,060
|
| 471 |
+
The one we actually use probably was one of about 92 points of nine percent almost 90 percent after
|
| 472 |
+
|
| 473 |
+
119
|
| 474 |
+
00:07:03,070 --> 00:07:04,050
|
| 475 |
+
destry pox.
|
| 476 |
+
|
| 477 |
+
120
|
| 478 |
+
00:07:04,140 --> 00:07:08,280
|
| 479 |
+
In each Epopt of what just over five under five minutes to run.
|
| 480 |
+
|
| 481 |
+
121
|
| 482 |
+
00:07:08,280 --> 00:07:12,250
|
| 483 |
+
So this is quite good for treating such a complicated dataset using OCP.
|
| 484 |
+
|
| 485 |
+
122
|
| 486 |
+
00:07:12,750 --> 00:07:14,950
|
| 487 |
+
So let's just run it want one more time.
|
| 488 |
+
|
| 489 |
+
123
|
| 490 |
+
00:07:14,970 --> 00:07:17,780
|
| 491 |
+
Let's see if we get Shirleys get 1 in 10 wrong.
|
| 492 |
+
|
| 493 |
+
124
|
| 494 |
+
00:07:17,850 --> 00:07:19,080
|
| 495 |
+
It's a good one.
|
| 496 |
+
|
| 497 |
+
125
|
| 498 |
+
00:07:19,080 --> 00:07:20,460
|
| 499 |
+
We're all very good.
|
| 500 |
+
|
| 501 |
+
126
|
| 502 |
+
00:07:20,520 --> 00:07:23,220
|
| 503 |
+
So yes pomegranates are a problem.
|
| 504 |
+
|
| 505 |
+
127
|
| 506 |
+
00:07:23,220 --> 00:07:25,790
|
| 507 |
+
I'll be fair to be fair to stars that I can apply to me.
|
| 508 |
+
|
| 509 |
+
128
|
| 510 |
+
00:07:25,830 --> 00:07:29,060
|
| 511 |
+
However this part of it is something like an apple good.
|
| 512 |
+
|
| 513 |
+
129
|
| 514 |
+
00:07:29,220 --> 00:07:32,610
|
| 515 |
+
So everything else is correct.
|
| 516 |
+
|
| 517 |
+
130
|
| 518 |
+
00:07:32,670 --> 00:07:36,630
|
| 519 |
+
So that's it for basically using carrots is called Back future.
|
| 520 |
+
|
| 521 |
+
131
|
| 522 |
+
00:07:36,860 --> 00:07:40,970
|
| 523 |
+
We've seen checkpoints stopping and leaning read adjustments on too.
|
| 524 |
+
|
| 525 |
+
132
|
| 526 |
+
00:07:41,340 --> 00:07:42,340
|
| 527 |
+
That's it for the Shapter.
|
12. Optimizers, Learning Rates & Callbacks with Fruit Classification/4.1 Download Fruits Dataset.html
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
<script type="text/javascript">window.location = "https://drive.google.com/file/d/1EtqEF0-JA-jWSsXDyeAmSgKBNqoOsLhj/view?usp=sharing";</script>
|
13. Batch Normalization & LeNet, AlexNet Clothing Classifier/1. Intro to Building LeNet, AlexNet in Keras & Understand Batch Normalization.srt
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,540 --> 00:00:06,100
|
| 3 |
+
Hi and welcome to chapter 13 we're actually going to build some famous CNN's one called Linette one
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,100 --> 00:00:07,850
|
| 7 |
+
called Alex and that's in Paris.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:07,870 --> 00:00:14,460
|
| 11 |
+
And then we're going to take a look at batched minimization and see how it improves or results easily.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:14,520 --> 00:00:16,140
|
| 15 |
+
So let's look at the contents in the Shapter.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:16,160 --> 00:00:18,870
|
| 19 |
+
We're going to build Lynette's and untested on this.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:18,900 --> 00:00:24,980
|
| 23 |
+
Similarly we're going to build Alex nuts and test it on Safad 10 Gelinas but minimization.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:25,050 --> 00:00:30,000
|
| 27 |
+
And then we're going to use Bachan of my station in building of clothing and apparel classifier which
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:30,000 --> 00:00:32,100
|
| 31 |
+
is based on the fashion edness isn't.
|
13. Batch Normalization & LeNet, AlexNet Clothing Classifier/2. Build LeNet and test on MNIST.srt
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,510 --> 00:00:05,620
|
| 3 |
+
Hi and welcome the chapter to ten point one where we actually start building Linnett and Paris.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:05,670 --> 00:00:08,880
|
| 7 |
+
So before we jump into Karris Let's actually talk a bit about that.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:08,970 --> 00:00:13,980
|
| 11 |
+
That is quite old is quite an old CNN it was actually developed by lican.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:13,980 --> 00:00:14,930
|
| 15 |
+
That's all it gets its name.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:14,940 --> 00:00:15,930
|
| 19 |
+
Ellie.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:16,090 --> 00:00:16,470
|
| 23 |
+
ELLIE.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:16,470 --> 00:00:17,150
|
| 27 |
+
Sorry.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:17,280 --> 00:00:18,830
|
| 31 |
+
And it was built in 1980.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:18,870 --> 00:00:23,290
|
| 35 |
+
That's over 20 years ago and it was actually very very effective.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:23,330 --> 00:00:27,540
|
| 39 |
+
On the amnestied a set in round writing didn't digit recognition.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:27,930 --> 00:00:31,110
|
| 43 |
+
And you can visit this website to learn more about Lynette's.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:31,230 --> 00:00:35,490
|
| 47 |
+
This is basically a sample example of how the net was constructed.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:35,610 --> 00:00:42,900
|
| 51 |
+
This was the input convolutional is feature NOPs generated a convolutional layer some more maps and
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:43,040 --> 00:00:44,790
|
| 55 |
+
Celine's here as well.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:00:44,830 --> 00:00:47,360
|
| 59 |
+
Disheartens was a map Max building here and building here.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:00:47,700 --> 00:00:51,110
|
| 63 |
+
So let's look at Linette here in action.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:00:51,120 --> 00:00:52,520
|
| 67 |
+
This was of.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:00:52,590 --> 00:00:58,250
|
| 71 |
+
It's like a 20 year old program where we're actually using an inch and CNN and justifying these digits.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:00:58,620 --> 00:01:00,780
|
| 75 |
+
So it is quite cool to see.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:00,800 --> 00:01:07,480
|
| 79 |
+
So now let's jump into cameras and actually start building that hi and welcome to.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:07,540 --> 00:01:08,690
|
| 83 |
+
I buy them a book.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:08,740 --> 00:01:12,480
|
| 87 |
+
We're actually going to build Linnett and tested on that.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:12,700 --> 00:01:17,070
|
| 91 |
+
So let's bring this up and I'm already seeing small Tipler right here.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:17,410 --> 00:01:19,490
|
| 95 |
+
This should be in list.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:19,510 --> 00:01:27,100
|
| 99 |
+
So basically we do our usual imposing images transforming all the It's a blah blah categorical one and
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:27,100 --> 00:01:31,010
|
| 103 |
+
puts all these things defining classes number of pixels.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:31,330 --> 00:01:34,010
|
| 107 |
+
And now this here is Linette.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:34,160 --> 00:01:35,680
|
| 111 |
+
It doesn't look like much does it.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:35,680 --> 00:01:39,200
|
| 115 |
+
However this is the actual model that I just showed you those lights.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:01:39,370 --> 00:01:45,670
|
| 119 |
+
Basically it has two sets of CERP which is basically convolution relo and pulling Dunhill then has a
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:01:45,670 --> 00:01:52,930
|
| 123 |
+
fully created layers and then basically we have all soft Max and all the usual optimizer.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:01:53,170 --> 00:02:01,270
|
| 127 |
+
However we are using it a delta and create to model his only 1.2 million premises again and we train
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:01,270 --> 00:02:03,480
|
| 131 |
+
ticket Trinite on edness and see if it doesn't.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:03,540 --> 00:02:06,590
|
| 135 |
+
And this latest model and these are the results.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:06,610 --> 00:02:12,670
|
| 139 |
+
After 10 ebox and we can see it actually got quite good at ninety nine point twenty one percent that
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:12,670 --> 00:02:14,190
|
| 143 |
+
is actually really impressive.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:14,440 --> 00:02:20,960
|
| 147 |
+
And if we go back to our presentation him this was let's bring this up right here.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:21,320 --> 00:02:22,860
|
| 151 |
+
This was Lynley dance performance here.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:22,860 --> 00:02:29,320
|
| 155 |
+
And I if you point one on this now that is CNN we used before of this what did he get.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:29,330 --> 00:02:31,460
|
| 159 |
+
He got very close to Tony Potts as well.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:31,470 --> 00:02:32,920
|
| 163 |
+
Nine nine point one five.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:33,230 --> 00:02:41,790
|
| 167 |
+
But you can see 20 years ago CNN was designed this simple CNN here actually got such good results.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:42,170 --> 00:02:46,790
|
| 171 |
+
I could imagine 20 years on hardware 20 years ago how long this would have taken to train do.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:47,060 --> 00:02:52,060
|
| 175 |
+
And now it is we're using this on a tiny laptop but just you of course keep you and the trains within
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:02:52,550 --> 00:02:54,080
|
| 179 |
+
maybe half a mile.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:02:54,740 --> 00:02:55,710
|
| 183 |
+
So that's quite impressive.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:02:55,800 --> 00:03:00,680
|
| 187 |
+
Now let's move on to Alex net which is a lot more complicated than Linette.
|
13. Batch Normalization & LeNet, AlexNet Clothing Classifier/3. Build AlexNet and test on CIFAR10.srt
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,270 --> 00:00:05,870
|
| 3 |
+
OK hi welcome to chapter in point two we're actually going to build Alec Nith now in Paris.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,750 --> 00:00:10,550
|
| 7 |
+
So let's learn a bit about Alex not Alex that was developed by Alex.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:10,710 --> 00:00:11,460
|
| 11 |
+
Was it a coincidence.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:11,460 --> 00:00:12,580
|
| 15 |
+
I don't know.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:12,580 --> 00:00:13,640
|
| 19 |
+
It was not.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:13,680 --> 00:00:16,580
|
| 23 |
+
Alex Cywinski much of it belongs in that I.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:16,890 --> 00:00:19,850
|
| 27 |
+
This person personally I'm not even sure if it's a man or woman.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:19,890 --> 00:00:21,280
|
| 31 |
+
Jeffrey Hinton from Toronto.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:21,300 --> 00:00:26,650
|
| 35 |
+
Famous guy did it because he was the winner of this competition in 2012.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:26,640 --> 00:00:32,590
|
| 39 |
+
So Alex not contains ately as the first five being convolutional is and the last three being Emslie.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:32,610 --> 00:00:34,880
|
| 43 |
+
So that's a bit of a different architecture.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:34,920 --> 00:00:40,710
|
| 47 |
+
We have to use them that we've used before and it has 60 million parameters.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:40,920 --> 00:00:46,970
|
| 51 |
+
As you can see Linette and CNN's we've used before had roughly just over a million parameters.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:47,280 --> 00:00:49,310
|
| 55 |
+
This one has quite a bit as I.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:00:49,560 --> 00:00:52,300
|
| 59 |
+
It has to be treated to use for over a week.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:00:52,610 --> 00:00:56,240
|
| 63 |
+
And I was into the 2012 technology which wasn't that bad.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:00:56,250 --> 00:01:00,190
|
| 67 |
+
To be fair this is the architecture diagram of it here.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:00,300 --> 00:01:04,140
|
| 71 |
+
This is from the actual people and I'm not sure why it actually cut out piece of it here.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:04,170 --> 00:01:07,430
|
| 75 |
+
Maybe it was a mistake they made in late fall.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:07,440 --> 00:01:08,270
|
| 79 |
+
I know.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:08,550 --> 00:01:10,810
|
| 83 |
+
But this looks kind of strange doesn't it.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:10,830 --> 00:01:14,480
|
| 87 |
+
Five convolutional is here and then tree really is here.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:14,970 --> 00:01:18,470
|
| 91 |
+
So we can recreate this structure quite easily in Paris.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:18,630 --> 00:01:20,620
|
| 95 |
+
And let's get into it now.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:21,240 --> 00:01:27,480
|
| 99 |
+
This is actually another visualization someone did of this that would get a bit more easy to visualize
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:27,480 --> 00:01:31,500
|
| 103 |
+
here with the numbers being clearer and the top half of it being visible
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:34,890 --> 00:01:35,350
|
| 107 |
+
OK.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:35,350 --> 00:01:38,940
|
| 111 |
+
So let's move on to building Alex into in Paris.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:38,970 --> 00:01:39,570
|
| 115 |
+
OK.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:01:39,760 --> 00:01:41,990
|
| 119 |
+
So to have it open in this window here.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:01:42,460 --> 00:01:44,650
|
| 123 |
+
So we're going to test it on safari.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:01:44,780 --> 00:01:46,870
|
| 127 |
+
Now let's see how it goes.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:01:46,900 --> 00:01:49,290
|
| 131 |
+
So let's load Safar usual.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:01:49,430 --> 00:01:54,730
|
| 135 |
+
So if we do a load loaded data sets and go and it does have the correct format.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:01:54,750 --> 00:01:56,030
|
| 139 |
+
It had one encoding.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:01:56,260 --> 00:01:57,180
|
| 143 |
+
That's good.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:01:57,190 --> 00:01:59,030
|
| 147 |
+
Now this is the interesting part.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:01:59,170 --> 00:02:06,470
|
| 151 |
+
Let's replicate our layers here to basically be in the same format as Alex not as you saw before.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:06,780 --> 00:02:08,490
|
| 155 |
+
So let's get to it here.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:09,130 --> 00:02:13,320
|
| 159 |
+
So we have all of these layers here we have the five cornflowers here.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:13,720 --> 00:02:17,760
|
| 163 |
+
And then with number filters in it in an appropriately sized.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:17,890 --> 00:02:19,880
|
| 167 |
+
And we have this tree absolutely as here.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:19,930 --> 00:02:21,650
|
| 171 |
+
Reportedly size as well.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:22,000 --> 00:02:24,710
|
| 175 |
+
And let's print out a summary of how this looks.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:02:24,730 --> 00:02:29,380
|
| 179 |
+
And this is giving a 78 million parameters while that is quite a bit.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:02:29,380 --> 00:02:36,190
|
| 183 |
+
And let's see if we can actually start treating this and I'll see you pretty sure it's going to take
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:02:36,280 --> 00:02:37,280
|
| 187 |
+
a while.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:02:37,300 --> 00:02:38,750
|
| 191 |
+
Let's see what happens.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:02:42,850 --> 00:02:45,680
|
| 195 |
+
I've actually seen a problem here already.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:02:45,890 --> 00:02:48,370
|
| 199 |
+
So I'm going to actually no it's fine.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:02:50,630 --> 00:02:56,330
|
| 203 |
+
I initially thought if I didn't have the boss equal want to hear it wouldn't actually start and show
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:02:56,440 --> 00:02:57,770
|
| 207 |
+
our trading data here.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:02:57,890 --> 00:03:02,800
|
| 211 |
+
But apparently vobis equal one in Omotola fit is the default.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:02,820 --> 00:03:04,860
|
| 215 |
+
So that's good to know.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:04,880 --> 00:03:10,460
|
| 219 |
+
So as you can see let's take a pause and look at this for a minute or less in a minute.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:10,460 --> 00:03:14,820
|
| 223 |
+
You can see we're slowly going through this.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:14,900 --> 00:03:23,160
|
| 227 |
+
Each batch is taking forever to process in using this model which no one can fault this system.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:23,180 --> 00:03:26,090
|
| 231 |
+
This is a huge model.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:26,150 --> 00:03:31,520
|
| 235 |
+
So basically if you were to train it overnight to get probably if you want to get a good sleep can probably
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:03:31,520 --> 00:03:38,270
|
| 239 |
+
train maybe for tree ebox at least maybe for pushing it and probably you probably would get some very
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:03:38,270 --> 00:03:39,560
|
| 243 |
+
very good accuracy.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:03:39,560 --> 00:03:43,610
|
| 247 |
+
I'm actually going to do experiments at night and leave this training over night and see what happens
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:03:44,840 --> 00:03:46,940
|
| 251 |
+
if I do any press mission slides.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:03:47,210 --> 00:03:52,640
|
| 255 |
+
I will add in a screenshot of my results after five Zeebox ebox.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:03:52,700 --> 00:03:57,090
|
| 259 |
+
So this concludes our chapter on building this famous CNN.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:03:57,140 --> 00:04:02,840
|
| 263 |
+
The point of this lesson was that I wanted to show you from diagrams like this we can easily start recreating
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:03,380 --> 00:04:04,520
|
| 267 |
+
stuff like this here.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:05,740 --> 00:04:09,830
|
| 271 |
+
It's quite simple to use keratoses model modeling adding modular.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:10,050 --> 00:04:17,950
|
| 275 |
+
I should say modular system of adding layers and it's so easy to just build these CNNs and hope this
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:04:18,010 --> 00:04:20,320
|
| 279 |
+
encourages you to actually experiment and build your own.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:04:20,350 --> 00:04:20,980
|
| 283 |
+
CNN's.
|
13. Batch Normalization & LeNet, AlexNet Clothing Classifier/4. Batch Normalization.srt
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,570 --> 00:00:02,590
|
| 3 |
+
Hi and welcome to STARTING POINT.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:02,640 --> 00:00:10,010
|
| 7 |
+
Well we talk about batched analyzation so Bachan on was proposed by these guys in 2015 and their paper
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:10,070 --> 00:00:16,160
|
| 11 |
+
titled naturalization accelerating deep that we're training by reducing internal covariate shift and
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:16,160 --> 00:00:20,830
|
| 15 |
+
we're going to see very shortly what shift actually is.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:20,830 --> 00:00:23,430
|
| 19 |
+
So previously you know a bit about normalization.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:23,450 --> 00:00:25,930
|
| 23 |
+
Basically it's a way of standardizing our values.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:26,120 --> 00:00:33,620
|
| 27 |
+
So we know images range from 0 to 255 but we apply a normalization factor by dividing it by 255 to make
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:33,650 --> 00:00:36,560
|
| 31 |
+
all values equal to be 0 and 1.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:36,860 --> 00:00:44,440
|
| 35 |
+
And we do just so that one particular feature of data or row of data you could say doesn't have basically
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:44,470 --> 00:00:47,870
|
| 39 |
+
a larger influence on the training.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:48,350 --> 00:00:52,850
|
| 43 |
+
So example of this being a problem in images you may not foresee it is happening because everything
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:52,850 --> 00:00:58,940
|
| 47 |
+
is basically pixels but imagine you have in your own that where one of the inputs is wait and yet one
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:58,940 --> 00:01:04,720
|
| 51 |
+
is height and height and feet and weight is in pounds weight is going to have a much larger value has
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:01:04,730 --> 00:01:05,780
|
| 55 |
+
being input.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:05,780 --> 00:01:07,850
|
| 59 |
+
So you have to standardize those values.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:10,300 --> 00:01:16,670
|
| 63 |
+
So botulinum however is used to normalize activations of input tensor before passing it into the next
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:16,700 --> 00:01:17,940
|
| 67 |
+
layer of network.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:17,950 --> 00:01:24,880
|
| 71 |
+
So what you'll see what this means shortly so remember before we begin training we have to initialize
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:24,880 --> 00:01:32,320
|
| 75 |
+
random weights here but what if wonderful we had randomly became very large just it of a random chance
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:32,350 --> 00:01:34,550
|
| 79 |
+
or maybe in some training.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:34,580 --> 00:01:39,810
|
| 83 |
+
When does training it coincidentally just ended up having an update that made it extremely large.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:40,000 --> 00:01:45,670
|
| 87 |
+
What happens now is that this cascades out the network causing the large weeds and imbalances and it
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:45,670 --> 00:01:47,650
|
| 91 |
+
basically sort of messes up our training.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:47,740 --> 00:01:50,170
|
| 95 |
+
It causes what we call instability in our training.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:52,020 --> 00:01:54,850
|
| 99 |
+
So botched norm is applied to earlier here.
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:55,110 --> 00:02:00,660
|
| 103 |
+
So if we apply Bajrami to this earlier what happens is that it normalizes the output from the activation
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:02:00,660 --> 00:02:07,110
|
| 107 |
+
functions of this layer and it does this by multiplying it by a parameter and then adding another parameter
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:02:07,230 --> 00:02:12,150
|
| 111 |
+
to the results and the result is that all the activations leaving at National nationalization layer
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:02:12,600 --> 00:02:15,910
|
| 115 |
+
will have approximately zero mean which is good.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:16,060 --> 00:02:21,360
|
| 119 |
+
The way it's not doing become in balance with extreme values since normalization is not included in
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:21,360 --> 00:02:27,660
|
| 123 |
+
the gradient process and this is how easy it is to add a Bashan until later.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:27,940 --> 00:02:33,410
|
| 127 |
+
So by adding it in here it's added to the layer right before this.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:33,430 --> 00:02:40,720
|
| 131 |
+
So this convolution layer and now the activation units will be passed to a batch from position Leo that
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:40,720 --> 00:02:42,640
|
| 135 |
+
will normalize it.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:42,970 --> 00:02:49,720
|
| 139 |
+
So it reduces the number of ebox it takes to for that to converge which is a great thing and it actually
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:49,930 --> 00:02:58,150
|
| 143 |
+
adds in regularisation which reduces witing how eval One problem is that it slows you down significantly
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:59,080 --> 00:03:01,410
|
| 147 |
+
though it does improve the stability of our training.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:03:01,630 --> 00:03:06,700
|
| 151 |
+
So it allows us to use larger learning learning rates as well so we can sort of compensate for the speed
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:03:07,210 --> 00:03:09,460
|
| 155 |
+
by using a slightly larger learning or it.
|
13. Batch Normalization & LeNet, AlexNet Clothing Classifier/5. Build a Clothing & Apparel Classifier (Fashion MNIST).srt
ADDED
|
@@ -0,0 +1,351 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,570 --> 00:00:06,540
|
| 3 |
+
Of a welcome to sing point for we're going to create a clothing apparel recognition system using the
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,540 --> 00:00:08,010
|
| 7 |
+
fashion industry to set.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:08,010 --> 00:00:09,300
|
| 11 |
+
So let's take a look at this data set.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:09,300 --> 00:00:14,240
|
| 15 |
+
Now it has 10 categories t shirt trousers bags ankle boots whatever.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:14,610 --> 00:00:22,050
|
| 19 |
+
And basically it's a direct copy of the amnesty in terms of images exactly the same size 28 by 28 rescale
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:22,110 --> 00:00:26,750
|
| 23 |
+
and 60000 twin examples would tend to Hausen test examples.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:26,790 --> 00:00:29,650
|
| 27 |
+
So it serves as a direct replacement for amnesty.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:29,670 --> 00:00:36,120
|
| 31 |
+
And the reason for this fall is because maybe about 10 to 15 years ago and this sort of became almost
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:36,120 --> 00:00:43,140
|
| 35 |
+
too easy like the top accuracy's were basically ninety nine point something and it stopped being fun
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:43,140 --> 00:00:48,240
|
| 39 |
+
to compete an edness we needed something more challenging and fashion and this was developed for this
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:48,240 --> 00:00:48,830
|
| 43 |
+
instead.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:49,080 --> 00:00:51,670
|
| 47 |
+
So let's take a look at some of the images and fashion edness.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:51,810 --> 00:00:57,930
|
| 51 |
+
You can definitely see this is a couple over a t shirt a shoe purse some that would definitely be easy
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:57,930 --> 00:01:03,210
|
| 55 |
+
for CNN to make out like purses and shoes definitely have a distinct shape and could be maybe mistaken
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:03,210 --> 00:01:09,110
|
| 59 |
+
for a shoe sometimes a dress could look like a T-shirt depending on the shape.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:09,300 --> 00:01:11,090
|
| 63 |
+
Basically these images look like X-rays.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:11,100 --> 00:01:17,720
|
| 67 |
+
However basically I think they were just Trishul and brought back maybe a little bit as well.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:17,790 --> 00:01:21,200
|
| 71 |
+
I can probably figure out how to prepare preprocess clothes like this.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:21,440 --> 00:01:21,680
|
| 75 |
+
OK.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:21,690 --> 00:01:22,010
|
| 79 |
+
Hi.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:22,050 --> 00:01:22,740
|
| 83 |
+
Welcome to all.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:22,770 --> 00:01:28,520
|
| 87 |
+
I buy the book on titting point for which is a fashion statement that and what we're going to do here.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:28,530 --> 00:01:34,260
|
| 91 |
+
Two things I wanted to make sure of when you downloaded the fashion amnesty to said resources please
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:34,260 --> 00:01:36,990
|
| 95 |
+
ensure that it's in the territory here and it looks like this.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:36,990 --> 00:01:42,690
|
| 99 |
+
We have fashion evidence on the score for fashion on the economist and you see these four files here.
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:42,930 --> 00:01:46,130
|
| 103 |
+
These are you bite files which we'll take a look at shortly.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:46,260 --> 00:01:52,120
|
| 107 |
+
So to unpack you files we use this function I found on gets up here from Thailand loans credit to him.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:52,440 --> 00:01:57,850
|
| 111 |
+
And basically what this does is that it takes as filed and it gets it into is extra and why train in
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:57,890 --> 00:02:00,710
|
| 115 |
+
US white US format directly.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:01,050 --> 00:02:06,870
|
| 119 |
+
And it's just like the format from L.A. It's very quick and easy function to use.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:06,960 --> 00:02:08,140
|
| 123 |
+
It's super quick.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:08,250 --> 00:02:11,150
|
| 127 |
+
I should say too and look at this.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:11,190 --> 00:02:14,740
|
| 131 |
+
This looks exactly like this data set as it's supposed to be.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:14,930 --> 00:02:16,640
|
| 135 |
+
So it'll be a direct drop in.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:16,650 --> 00:02:18,910
|
| 139 |
+
So now let's take a look at some sample images here.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:18,930 --> 00:02:20,060
|
| 143 |
+
Let's run this plot.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:20,310 --> 00:02:21,040
|
| 147 |
+
This is good.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:21,090 --> 00:02:25,800
|
| 151 |
+
A lot of times when you run matplotlib for functions for the first time it doesn't actually show it
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:25,800 --> 00:02:27,770
|
| 155 |
+
just tells you it is a figure size here.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:27,840 --> 00:02:30,610
|
| 159 |
+
So you have to do this again to generate the plots.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:31,020 --> 00:02:36,680
|
| 163 |
+
So we see our shoes sandals purses pants and coats or something.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:37,210 --> 00:02:40,350
|
| 167 |
+
And now let's look at two approach.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:40,560 --> 00:02:43,790
|
| 171 |
+
So you're pretty familiar with this now from this.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:44,100 --> 00:02:47,710
|
| 175 |
+
Basically we're just getting our data in shape and in the right format.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:02:48,010 --> 00:02:49,640
|
| 179 |
+
What one is quoting here as well.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:02:49,770 --> 00:02:54,670
|
| 183 |
+
Print the number of glasses just for information get the number of glasses number of pixels.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:02:54,750 --> 00:02:58,270
|
| 187 |
+
We use it here to create our CNN.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:02:58,290 --> 00:03:05,250
|
| 191 |
+
Now the difference with this CNN now is just added simply a batch mobilization and let's see how it
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:05,250 --> 00:03:06,340
|
| 195 |
+
affects.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:06,550 --> 00:03:11,340
|
| 199 |
+
When CNN's parameter size is should have zero effect on it.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:11,340 --> 00:03:13,390
|
| 203 |
+
And as you can see it doesn't.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:13,410 --> 00:03:19,450
|
| 207 |
+
So now let's train our model takes a well-stocked and let's see after tree box.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:19,460 --> 00:03:22,110
|
| 211 |
+
You can see how long it takes to train an epoch here.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:22,470 --> 00:03:25,660
|
| 215 |
+
And you can see that it is much slower than it was before.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:25,680 --> 00:03:28,890
|
| 219 |
+
And just the accuracy is pretty good after the box.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:28,890 --> 00:03:31,260
|
| 223 |
+
Let's see the results we get.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:31,260 --> 00:03:31,580
|
| 227 |
+
All right.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:31,590 --> 00:03:36,210
|
| 231 |
+
So after a tree ebox we've seen that we have actually gotten pretty good accuracy.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:36,210 --> 00:03:38,170
|
| 235 |
+
Ninety one point one percent.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:03:38,250 --> 00:03:39,240
|
| 239 |
+
So that's quite good.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:03:39,230 --> 00:03:44,250
|
| 243 |
+
After just history books on the fashion the the set which is supposed to be much more challenging than
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:03:44,250 --> 00:03:51,990
|
| 247 |
+
the amnesty to set and position was used and I'm pretty sure it accounted for this improvement in accuracy.
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:03:51,990 --> 00:03:57,060
|
| 251 |
+
I actually have another book running right now where we removed virtualization and I'm going to show
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:03:57,060 --> 00:04:03,530
|
| 255 |
+
you the difference after the box how Bagenal musician acts without rationalization you get worse results.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:04:03,540 --> 00:04:05,890
|
| 259 |
+
So for now let's just test this model.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:06,090 --> 00:04:11,730
|
| 263 |
+
This 91 percent accuracy accurate model display is not defined.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:11,740 --> 00:04:16,340
|
| 267 |
+
Actually we do need to display this was an old bit of code used before.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:17,170 --> 00:04:17,920
|
| 271 |
+
So here we go.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:17,930 --> 00:04:18,830
|
| 275 |
+
It's pretty cool.
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:04:19,030 --> 00:04:25,870
|
| 279 |
+
So it's a coat predicts the coat actually got some of his shirts to show the red dress right.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:04:27,880 --> 00:04:28,500
|
| 283 |
+
All right.
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:04:28,510 --> 00:04:36,150
|
| 287 |
+
So for now you would expect with a 91 percent accurate model you would get at least one in 10 are wrong.
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:04:36,160 --> 00:04:38,360
|
| 291 |
+
So let's see if we get any rolling again.
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:04:38,380 --> 00:04:39,820
|
| 295 |
+
So if it's all good.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:04:40,240 --> 00:04:40,500
|
| 299 |
+
OK.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:04:40,500 --> 00:04:42,200
|
| 303 |
+
So that's actually quite good.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:04:42,220 --> 00:04:44,810
|
| 307 |
+
Now let's see how this one is progressing.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:04:45,050 --> 00:04:47,660
|
| 311 |
+
This one is almost at the end of one book.
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:04:47,830 --> 00:04:53,970
|
| 315 |
+
What I'll do I'll wait for this and I'll show you guys the results after Trieb box.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:04:54,060 --> 00:04:59,190
|
| 319 |
+
OK so here are the results of training or model with out Bachan of my vision.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:04:59,430 --> 00:05:05,620
|
| 323 |
+
And as you can see the tree box we only got 90 percent accuracy whereas with Bashraheel position we
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:05:05,620 --> 00:05:07,730
|
| 327 |
+
got 91 percent accuracy.
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:05:07,740 --> 00:05:13,380
|
| 331 |
+
Now it seems like a small difference but national normalization does make a big difference overall when
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:05:13,380 --> 00:05:17,730
|
| 335 |
+
you're training these networks that one 2 percent actually could mean a difference between a good model
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:05:17,730 --> 00:05:19,180
|
| 339 |
+
and a great model.
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:05:19,710 --> 00:05:26,760
|
| 343 |
+
So just one thing you can observe to look at a time it took the train one IPAC just over two minutes
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:05:26,760 --> 00:05:30,890
|
| 347 |
+
here whereas with naturalisation it took just over three minutes.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:05:31,020 --> 00:05:34,110
|
| 351 |
+
So you can definitely see how using background radiation has a price.
|
13. Batch Normalization & LeNet, AlexNet Clothing Classifier/5.1 Fashion MNIST dataset.html
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
<script type="text/javascript">window.location = "https://drive.google.com/file/d/1s_qF6Ael_-4-Ni7YpPTA72D-W9mB1bFu/view?usp=sharing";</script>
|
13. Building LeNet and AlexNet in Keras/13.1 Built LeNet and test on MNIST.ipynb
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"### Let's construct LeNet in Keras!\n",
|
| 8 |
+
"\n",
|
| 9 |
+
"#### First let's load and prep our MNIST data"
|
| 10 |
+
]
|
| 11 |
+
},
|
| 12 |
+
{
|
| 13 |
+
"cell_type": "code",
|
| 14 |
+
"execution_count": 2,
|
| 15 |
+
"metadata": {},
|
| 16 |
+
"outputs": [],
|
| 17 |
+
"source": [
|
| 18 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 19 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 20 |
+
"from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten\n",
|
| 21 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D\n",
|
| 22 |
+
"from tensorflow.keras.layers import BatchNormalization\n",
|
| 23 |
+
"from tensorflow.keras.regularizers import l2\n",
|
| 24 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 25 |
+
"from tensorflow.keras.utils import to_categorical\n",
|
| 26 |
+
"import tensorflow as tf\n",
|
| 27 |
+
"\n",
|
| 28 |
+
"# loads the MNIST dataset\n",
|
| 29 |
+
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
|
| 30 |
+
"\n",
|
| 31 |
+
"# Lets store the number of rows and columns\n",
|
| 32 |
+
"img_rows = x_train[0].shape[0]\n",
|
| 33 |
+
"img_cols = x_train[1].shape[0]\n",
|
| 34 |
+
"\n",
|
| 35 |
+
"# Getting our date in the right 'shape' needed for Keras\n",
|
| 36 |
+
"# We need to add a 4th dimenion to our date thereby changing our\n",
|
| 37 |
+
"# Our original image shape of (60000,28,28) to (60000,28,28,1)\n",
|
| 38 |
+
"x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
|
| 39 |
+
"x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
|
| 40 |
+
"\n",
|
| 41 |
+
"# store the shape of a single image \n",
|
| 42 |
+
"input_shape = (img_rows, img_cols, 1)\n",
|
| 43 |
+
"\n",
|
| 44 |
+
"# change our image type to float32 data type\n",
|
| 45 |
+
"x_train = x_train.astype('float32')\n",
|
| 46 |
+
"x_test = x_test.astype('float32')\n",
|
| 47 |
+
"\n",
|
| 48 |
+
"# Normalize our data by changing the range from (0 to 255) to (0 to 1)\n",
|
| 49 |
+
"x_train /= 255\n",
|
| 50 |
+
"x_test /= 255\n",
|
| 51 |
+
"\n",
|
| 52 |
+
"# Now we one hot encode outputs\n",
|
| 53 |
+
"y_train = to_categorical(y_train)\n",
|
| 54 |
+
"y_test = to_categorical(y_test)\n",
|
| 55 |
+
"\n",
|
| 56 |
+
"num_classes = y_test.shape[1]\n",
|
| 57 |
+
"num_pixels = x_train.shape[1] * x_train.shape[2]"
|
| 58 |
+
]
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"cell_type": "markdown",
|
| 62 |
+
"metadata": {},
|
| 63 |
+
"source": [
|
| 64 |
+
"### Now let's create our layers to replicate LeNet"
|
| 65 |
+
]
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"cell_type": "code",
|
| 69 |
+
"execution_count": 4,
|
| 70 |
+
"metadata": {},
|
| 71 |
+
"outputs": [
|
| 72 |
+
{
|
| 73 |
+
"name": "stdout",
|
| 74 |
+
"output_type": "stream",
|
| 75 |
+
"text": [
|
| 76 |
+
"Model: \"sequential\"\n",
|
| 77 |
+
"_________________________________________________________________\n",
|
| 78 |
+
"Layer (type) Output Shape Param # \n",
|
| 79 |
+
"=================================================================\n",
|
| 80 |
+
"conv2d (Conv2D) (None, 28, 28, 20) 520 \n",
|
| 81 |
+
"_________________________________________________________________\n",
|
| 82 |
+
"activation (Activation) (None, 28, 28, 20) 0 \n",
|
| 83 |
+
"_________________________________________________________________\n",
|
| 84 |
+
"max_pooling2d (MaxPooling2D) (None, 14, 14, 20) 0 \n",
|
| 85 |
+
"_________________________________________________________________\n",
|
| 86 |
+
"conv2d_1 (Conv2D) (None, 14, 14, 50) 25050 \n",
|
| 87 |
+
"_________________________________________________________________\n",
|
| 88 |
+
"activation_1 (Activation) (None, 14, 14, 50) 0 \n",
|
| 89 |
+
"_________________________________________________________________\n",
|
| 90 |
+
"max_pooling2d_1 (MaxPooling2 (None, 7, 7, 50) 0 \n",
|
| 91 |
+
"_________________________________________________________________\n",
|
| 92 |
+
"flatten (Flatten) (None, 2450) 0 \n",
|
| 93 |
+
"_________________________________________________________________\n",
|
| 94 |
+
"dense (Dense) (None, 500) 1225500 \n",
|
| 95 |
+
"_________________________________________________________________\n",
|
| 96 |
+
"activation_2 (Activation) (None, 500) 0 \n",
|
| 97 |
+
"_________________________________________________________________\n",
|
| 98 |
+
"dense_1 (Dense) (None, 10) 5010 \n",
|
| 99 |
+
"_________________________________________________________________\n",
|
| 100 |
+
"activation_3 (Activation) (None, 10) 0 \n",
|
| 101 |
+
"=================================================================\n",
|
| 102 |
+
"Total params: 1,256,080\n",
|
| 103 |
+
"Trainable params: 1,256,080\n",
|
| 104 |
+
"Non-trainable params: 0\n",
|
| 105 |
+
"_________________________________________________________________\n",
|
| 106 |
+
"None\n"
|
| 107 |
+
]
|
| 108 |
+
}
|
| 109 |
+
],
|
| 110 |
+
"source": [
|
| 111 |
+
"# create model\n",
|
| 112 |
+
"model = Sequential()\n",
|
| 113 |
+
"\n",
|
| 114 |
+
"# 2 sets of CRP (Convolution, RELU, Pooling)\n",
|
| 115 |
+
"model.add(Conv2D(20, (5, 5),\n",
|
| 116 |
+
" padding = \"same\", \n",
|
| 117 |
+
" input_shape = input_shape))\n",
|
| 118 |
+
"model.add(Activation(\"relu\"))\n",
|
| 119 |
+
"model.add(MaxPooling2D(pool_size = (2, 2), strides = (2, 2)))\n",
|
| 120 |
+
"\n",
|
| 121 |
+
"model.add(Conv2D(50, (5, 5),\n",
|
| 122 |
+
" padding = \"same\"))\n",
|
| 123 |
+
"model.add(Activation(\"relu\"))\n",
|
| 124 |
+
"model.add(MaxPooling2D(pool_size = (2, 2), strides = (2, 2)))\n",
|
| 125 |
+
"\n",
|
| 126 |
+
"# Fully connected layers (w/ RELU)\n",
|
| 127 |
+
"model.add(Flatten())\n",
|
| 128 |
+
"model.add(Dense(500))\n",
|
| 129 |
+
"model.add(Activation(\"relu\"))\n",
|
| 130 |
+
"\n",
|
| 131 |
+
"# Softmax (for classification)\n",
|
| 132 |
+
"model.add(Dense(num_classes))\n",
|
| 133 |
+
"model.add(Activation(\"softmax\"))\n",
|
| 134 |
+
" \n",
|
| 135 |
+
"model.compile(loss = 'categorical_crossentropy',\n",
|
| 136 |
+
" optimizer = tf.keras.optimizers.Adadelta(),\n",
|
| 137 |
+
" metrics = ['accuracy'])\n",
|
| 138 |
+
" \n",
|
| 139 |
+
"print(model.summary())"
|
| 140 |
+
]
|
| 141 |
+
},
|
| 142 |
+
{
|
| 143 |
+
"cell_type": "markdown",
|
| 144 |
+
"metadata": {},
|
| 145 |
+
"source": [
|
| 146 |
+
"### Now let us train LeNet on our MNIST Dataset"
|
| 147 |
+
]
|
| 148 |
+
},
|
| 149 |
+
{
|
| 150 |
+
"cell_type": "code",
|
| 151 |
+
"execution_count": 5,
|
| 152 |
+
"metadata": {
|
| 153 |
+
"scrolled": true
|
| 154 |
+
},
|
| 155 |
+
"outputs": [
|
| 156 |
+
{
|
| 157 |
+
"name": "stdout",
|
| 158 |
+
"output_type": "stream",
|
| 159 |
+
"text": [
|
| 160 |
+
"Train on 60000 samples, validate on 10000 samples\n",
|
| 161 |
+
"60000/60000 [==============================] - 82s 1ms/sample - loss: 2.2876 - accuracy: 0.1511 - val_loss: 2.2647 - val_accuracy: 0.2432\n",
|
| 162 |
+
"10000/10000 [==============================] - 4s 436us/sample - loss: 2.2647 - accuracy: 0.2432\n",
|
| 163 |
+
"Test loss: 2.264678302001953\n",
|
| 164 |
+
"Test accuracy: 0.2432\n"
|
| 165 |
+
]
|
| 166 |
+
}
|
| 167 |
+
],
|
| 168 |
+
"source": [
|
| 169 |
+
"# Training Parameters\n",
|
| 170 |
+
"batch_size = 128\n",
|
| 171 |
+
"epochs = 1\n",
|
| 172 |
+
"\n",
|
| 173 |
+
"history = model.fit(x_train, y_train,\n",
|
| 174 |
+
" batch_size=batch_size,\n",
|
| 175 |
+
" epochs=epochs,\n",
|
| 176 |
+
" validation_data=(x_test, y_test),\n",
|
| 177 |
+
" shuffle=True)\n",
|
| 178 |
+
"\n",
|
| 179 |
+
"model.save(\"mnist_LeNet.h5\")\n",
|
| 180 |
+
"\n",
|
| 181 |
+
"# Evaluate the performance of our trained model\n",
|
| 182 |
+
"scores = model.evaluate(x_test, y_test, verbose=1)\n",
|
| 183 |
+
"print('Test loss:', scores[0])\n",
|
| 184 |
+
"print('Test accuracy:', scores[1])"
|
| 185 |
+
]
|
| 186 |
+
}
|
| 187 |
+
],
|
| 188 |
+
"metadata": {
|
| 189 |
+
"kernelspec": {
|
| 190 |
+
"display_name": "Python 3",
|
| 191 |
+
"language": "python",
|
| 192 |
+
"name": "python3"
|
| 193 |
+
},
|
| 194 |
+
"language_info": {
|
| 195 |
+
"codemirror_mode": {
|
| 196 |
+
"name": "ipython",
|
| 197 |
+
"version": 3
|
| 198 |
+
},
|
| 199 |
+
"file_extension": ".py",
|
| 200 |
+
"mimetype": "text/x-python",
|
| 201 |
+
"name": "python",
|
| 202 |
+
"nbconvert_exporter": "python",
|
| 203 |
+
"pygments_lexer": "ipython3",
|
| 204 |
+
"version": "3.7.4"
|
| 205 |
+
}
|
| 206 |
+
},
|
| 207 |
+
"nbformat": 4,
|
| 208 |
+
"nbformat_minor": 2
|
| 209 |
+
}
|
13. Building LeNet and AlexNet in Keras/13.2 Build AlexNet and test on CIFAR10.ipynb
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"### Let's construct AlexNet in Keras!\n",
|
| 8 |
+
"\n",
|
| 9 |
+
"#### First let's load and prep our CIFAR10 data"
|
| 10 |
+
]
|
| 11 |
+
},
|
| 12 |
+
{
|
| 13 |
+
"cell_type": "code",
|
| 14 |
+
"execution_count": 2,
|
| 15 |
+
"metadata": {},
|
| 16 |
+
"outputs": [
|
| 17 |
+
{
|
| 18 |
+
"name": "stdout",
|
| 19 |
+
"output_type": "stream",
|
| 20 |
+
"text": [
|
| 21 |
+
"x_train shape: (50000, 32, 32, 3)\n",
|
| 22 |
+
"50000 train samples\n",
|
| 23 |
+
"10000 test samples\n"
|
| 24 |
+
]
|
| 25 |
+
}
|
| 26 |
+
],
|
| 27 |
+
"source": [
|
| 28 |
+
"from __future__ import print_function\n",
|
| 29 |
+
"import tensorflow as tf\n",
|
| 30 |
+
"from tensorflow.keras.datasets import cifar10\n",
|
| 31 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 32 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 33 |
+
"from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten\n",
|
| 34 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D\n",
|
| 35 |
+
"from tensorflow.keras.layers import BatchNormalization\n",
|
| 36 |
+
"from tensorflow.keras.regularizers import l2\n",
|
| 37 |
+
"from tensorflow.keras.utils import to_categorical\n",
|
| 38 |
+
"\n",
|
| 39 |
+
"# Loads the CIFAR dataset\n",
|
| 40 |
+
"(x_train, y_train), (x_test, y_test) = cifar10.load_data()\n",
|
| 41 |
+
"\n",
|
| 42 |
+
"# Display our data shape/dimensions\n",
|
| 43 |
+
"print('x_train shape:', x_train.shape)\n",
|
| 44 |
+
"print(x_train.shape[0], 'train samples')\n",
|
| 45 |
+
"print(x_test.shape[0], 'test samples')\n",
|
| 46 |
+
"\n",
|
| 47 |
+
"# Now we one hot encode outputs\n",
|
| 48 |
+
"num_classes = 10\n",
|
| 49 |
+
"y_train = to_categorical(y_train)\n",
|
| 50 |
+
"y_test = to_categorical(y_test)"
|
| 51 |
+
]
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"cell_type": "markdown",
|
| 55 |
+
"metadata": {},
|
| 56 |
+
"source": [
|
| 57 |
+
"### Now let's create our layers to replicate AlexNet"
|
| 58 |
+
]
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"cell_type": "code",
|
| 62 |
+
"execution_count": 3,
|
| 63 |
+
"metadata": {},
|
| 64 |
+
"outputs": [
|
| 65 |
+
{
|
| 66 |
+
"name": "stdout",
|
| 67 |
+
"output_type": "stream",
|
| 68 |
+
"text": [
|
| 69 |
+
"Model: \"sequential\"\n",
|
| 70 |
+
"_________________________________________________________________\n",
|
| 71 |
+
"Layer (type) Output Shape Param # \n",
|
| 72 |
+
"=================================================================\n",
|
| 73 |
+
"conv2d (Conv2D) (None, 32, 32, 96) 34944 \n",
|
| 74 |
+
"_________________________________________________________________\n",
|
| 75 |
+
"batch_normalization (BatchNo (None, 32, 32, 96) 384 \n",
|
| 76 |
+
"_________________________________________________________________\n",
|
| 77 |
+
"activation (Activation) (None, 32, 32, 96) 0 \n",
|
| 78 |
+
"_________________________________________________________________\n",
|
| 79 |
+
"max_pooling2d (MaxPooling2D) (None, 16, 16, 96) 0 \n",
|
| 80 |
+
"_________________________________________________________________\n",
|
| 81 |
+
"conv2d_1 (Conv2D) (None, 16, 16, 256) 614656 \n",
|
| 82 |
+
"_________________________________________________________________\n",
|
| 83 |
+
"batch_normalization_1 (Batch (None, 16, 16, 256) 1024 \n",
|
| 84 |
+
"_________________________________________________________________\n",
|
| 85 |
+
"activation_1 (Activation) (None, 16, 16, 256) 0 \n",
|
| 86 |
+
"_________________________________________________________________\n",
|
| 87 |
+
"max_pooling2d_1 (MaxPooling2 (None, 8, 8, 256) 0 \n",
|
| 88 |
+
"_________________________________________________________________\n",
|
| 89 |
+
"zero_padding2d (ZeroPadding2 (None, 10, 10, 256) 0 \n",
|
| 90 |
+
"_________________________________________________________________\n",
|
| 91 |
+
"conv2d_2 (Conv2D) (None, 10, 10, 512) 1180160 \n",
|
| 92 |
+
"_________________________________________________________________\n",
|
| 93 |
+
"batch_normalization_2 (Batch (None, 10, 10, 512) 2048 \n",
|
| 94 |
+
"_________________________________________________________________\n",
|
| 95 |
+
"activation_2 (Activation) (None, 10, 10, 512) 0 \n",
|
| 96 |
+
"_________________________________________________________________\n",
|
| 97 |
+
"max_pooling2d_2 (MaxPooling2 (None, 5, 5, 512) 0 \n",
|
| 98 |
+
"_________________________________________________________________\n",
|
| 99 |
+
"zero_padding2d_1 (ZeroPaddin (None, 7, 7, 512) 0 \n",
|
| 100 |
+
"_________________________________________________________________\n",
|
| 101 |
+
"conv2d_3 (Conv2D) (None, 7, 7, 1024) 4719616 \n",
|
| 102 |
+
"_________________________________________________________________\n",
|
| 103 |
+
"batch_normalization_3 (Batch (None, 7, 7, 1024) 4096 \n",
|
| 104 |
+
"_________________________________________________________________\n",
|
| 105 |
+
"activation_3 (Activation) (None, 7, 7, 1024) 0 \n",
|
| 106 |
+
"_________________________________________________________________\n",
|
| 107 |
+
"zero_padding2d_2 (ZeroPaddin (None, 9, 9, 1024) 0 \n",
|
| 108 |
+
"_________________________________________________________________\n",
|
| 109 |
+
"conv2d_4 (Conv2D) (None, 9, 9, 1024) 9438208 \n",
|
| 110 |
+
"_________________________________________________________________\n",
|
| 111 |
+
"batch_normalization_4 (Batch (None, 9, 9, 1024) 4096 \n",
|
| 112 |
+
"_________________________________________________________________\n",
|
| 113 |
+
"activation_4 (Activation) (None, 9, 9, 1024) 0 \n",
|
| 114 |
+
"_________________________________________________________________\n",
|
| 115 |
+
"max_pooling2d_3 (MaxPooling2 (None, 4, 4, 1024) 0 \n",
|
| 116 |
+
"_________________________________________________________________\n",
|
| 117 |
+
"flatten (Flatten) (None, 16384) 0 \n",
|
| 118 |
+
"_________________________________________________________________\n",
|
| 119 |
+
"dense (Dense) (None, 3072) 50334720 \n",
|
| 120 |
+
"_________________________________________________________________\n",
|
| 121 |
+
"batch_normalization_5 (Batch (None, 3072) 12288 \n",
|
| 122 |
+
"_________________________________________________________________\n",
|
| 123 |
+
"activation_5 (Activation) (None, 3072) 0 \n",
|
| 124 |
+
"_________________________________________________________________\n",
|
| 125 |
+
"dropout (Dropout) (None, 3072) 0 \n",
|
| 126 |
+
"_________________________________________________________________\n",
|
| 127 |
+
"dense_1 (Dense) (None, 4096) 12587008 \n",
|
| 128 |
+
"_________________________________________________________________\n",
|
| 129 |
+
"batch_normalization_6 (Batch (None, 4096) 16384 \n",
|
| 130 |
+
"_________________________________________________________________\n",
|
| 131 |
+
"activation_6 (Activation) (None, 4096) 0 \n",
|
| 132 |
+
"_________________________________________________________________\n",
|
| 133 |
+
"dropout_1 (Dropout) (None, 4096) 0 \n",
|
| 134 |
+
"_________________________________________________________________\n",
|
| 135 |
+
"dense_2 (Dense) (None, 10) 40970 \n",
|
| 136 |
+
"_________________________________________________________________\n",
|
| 137 |
+
"batch_normalization_7 (Batch (None, 10) 40 \n",
|
| 138 |
+
"_________________________________________________________________\n",
|
| 139 |
+
"activation_7 (Activation) (None, 10) 0 \n",
|
| 140 |
+
"=================================================================\n",
|
| 141 |
+
"Total params: 78,990,642\n",
|
| 142 |
+
"Trainable params: 78,970,462\n",
|
| 143 |
+
"Non-trainable params: 20,180\n",
|
| 144 |
+
"_________________________________________________________________\n",
|
| 145 |
+
"None\n"
|
| 146 |
+
]
|
| 147 |
+
}
|
| 148 |
+
],
|
| 149 |
+
"source": [
|
| 150 |
+
"l2_reg = 0\n",
|
| 151 |
+
"\n",
|
| 152 |
+
"# Initialize model\n",
|
| 153 |
+
"model = Sequential()\n",
|
| 154 |
+
"\n",
|
| 155 |
+
"# 1st Conv Layer \n",
|
| 156 |
+
"model.add(Conv2D(96, (11, 11), input_shape=x_train.shape[1:],\n",
|
| 157 |
+
" padding='same', kernel_regularizer=l2(l2_reg)))\n",
|
| 158 |
+
"model.add(BatchNormalization())\n",
|
| 159 |
+
"model.add(Activation('relu'))\n",
|
| 160 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 161 |
+
"\n",
|
| 162 |
+
"# 2nd Conv Layer \n",
|
| 163 |
+
"model.add(Conv2D(256, (5, 5), padding='same'))\n",
|
| 164 |
+
"model.add(BatchNormalization())\n",
|
| 165 |
+
"model.add(Activation('relu'))\n",
|
| 166 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 167 |
+
"\n",
|
| 168 |
+
"# 3rd Conv Layer \n",
|
| 169 |
+
"model.add(ZeroPadding2D((1, 1)))\n",
|
| 170 |
+
"model.add(Conv2D(512, (3, 3), padding='same'))\n",
|
| 171 |
+
"model.add(BatchNormalization())\n",
|
| 172 |
+
"model.add(Activation('relu'))\n",
|
| 173 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 174 |
+
"\n",
|
| 175 |
+
"# 4th Conv Layer \n",
|
| 176 |
+
"model.add(ZeroPadding2D((1, 1)))\n",
|
| 177 |
+
"model.add(Conv2D(1024, (3, 3), padding='same'))\n",
|
| 178 |
+
"model.add(BatchNormalization())\n",
|
| 179 |
+
"model.add(Activation('relu'))\n",
|
| 180 |
+
"\n",
|
| 181 |
+
"# 5th Conv Layer \n",
|
| 182 |
+
"model.add(ZeroPadding2D((1, 1)))\n",
|
| 183 |
+
"model.add(Conv2D(1024, (3, 3), padding='same'))\n",
|
| 184 |
+
"model.add(BatchNormalization())\n",
|
| 185 |
+
"model.add(Activation('relu'))\n",
|
| 186 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 187 |
+
"\n",
|
| 188 |
+
"# 1st FC Layer\n",
|
| 189 |
+
"model.add(Flatten())\n",
|
| 190 |
+
"model.add(Dense(3072))\n",
|
| 191 |
+
"model.add(BatchNormalization())\n",
|
| 192 |
+
"model.add(Activation('relu'))\n",
|
| 193 |
+
"model.add(Dropout(0.5))\n",
|
| 194 |
+
"\n",
|
| 195 |
+
"# 2nd FC Layer\n",
|
| 196 |
+
"model.add(Dense(4096))\n",
|
| 197 |
+
"model.add(BatchNormalization())\n",
|
| 198 |
+
"model.add(Activation('relu'))\n",
|
| 199 |
+
"model.add(Dropout(0.5))\n",
|
| 200 |
+
"\n",
|
| 201 |
+
"# 3rd FC Layer\n",
|
| 202 |
+
"model.add(Dense(num_classes))\n",
|
| 203 |
+
"model.add(BatchNormalization())\n",
|
| 204 |
+
"model.add(Activation('softmax'))\n",
|
| 205 |
+
"\n",
|
| 206 |
+
"print(model.summary())\n",
|
| 207 |
+
"\n",
|
| 208 |
+
"model.compile(loss = 'categorical_crossentropy',\n",
|
| 209 |
+
" optimizer = tf.keras.optimizers.Adadelta(),\n",
|
| 210 |
+
" metrics = ['accuracy'])\n"
|
| 211 |
+
]
|
| 212 |
+
},
|
| 213 |
+
{
|
| 214 |
+
"cell_type": "markdown",
|
| 215 |
+
"metadata": {},
|
| 216 |
+
"source": [
|
| 217 |
+
"### Now let us train AlexNet on our CIFAR10 Dataset"
|
| 218 |
+
]
|
| 219 |
+
},
|
| 220 |
+
{
|
| 221 |
+
"cell_type": "code",
|
| 222 |
+
"execution_count": null,
|
| 223 |
+
"metadata": {},
|
| 224 |
+
"outputs": [],
|
| 225 |
+
"source": [
|
| 226 |
+
"# Training Parameters\n",
|
| 227 |
+
"batch_size = 32\n",
|
| 228 |
+
"epochs = 1\n",
|
| 229 |
+
"\n",
|
| 230 |
+
"history = model.fit(x_train, y_train,\n",
|
| 231 |
+
" batch_size=batch_size,\n",
|
| 232 |
+
" epochs=epochs,\n",
|
| 233 |
+
" validation_data=(x_test, y_test),\n",
|
| 234 |
+
" shuffle=True)\n",
|
| 235 |
+
"\n",
|
| 236 |
+
"model.save(\"CIFAR10_AlexNet_1_Epoch.h5\")\n",
|
| 237 |
+
"\n",
|
| 238 |
+
"# Evaluate the performance of our trained model\n",
|
| 239 |
+
"scores = model.evaluate(x_test, y_test, verbose=1)\n",
|
| 240 |
+
"print('Test loss:', scores[0])\n",
|
| 241 |
+
"print('Test accuracy:', scores[1])"
|
| 242 |
+
]
|
| 243 |
+
}
|
| 244 |
+
],
|
| 245 |
+
"metadata": {
|
| 246 |
+
"kernelspec": {
|
| 247 |
+
"display_name": "Python 3",
|
| 248 |
+
"language": "python",
|
| 249 |
+
"name": "python3"
|
| 250 |
+
},
|
| 251 |
+
"language_info": {
|
| 252 |
+
"codemirror_mode": {
|
| 253 |
+
"name": "ipython",
|
| 254 |
+
"version": 3
|
| 255 |
+
},
|
| 256 |
+
"file_extension": ".py",
|
| 257 |
+
"mimetype": "text/x-python",
|
| 258 |
+
"name": "python",
|
| 259 |
+
"nbconvert_exporter": "python",
|
| 260 |
+
"pygments_lexer": "ipython3",
|
| 261 |
+
"version": "3.7.4"
|
| 262 |
+
}
|
| 263 |
+
},
|
| 264 |
+
"nbformat": 4,
|
| 265 |
+
"nbformat_minor": 2
|
| 266 |
+
}
|
13. Building LeNet and AlexNet in Keras/13.4 Fashion MNIST.ipynb
ADDED
|
@@ -0,0 +1,445 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"### Fashion MNIST"
|
| 8 |
+
]
|
| 9 |
+
},
|
| 10 |
+
{
|
| 11 |
+
"cell_type": "markdown",
|
| 12 |
+
"metadata": {},
|
| 13 |
+
"source": [
|
| 14 |
+
"### After downloading our dataset we see it's coded in the ubyte form\n",
|
| 15 |
+
"- We then use the following function to read the data and return it as a numpy array"
|
| 16 |
+
]
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"cell_type": "code",
|
| 20 |
+
"execution_count": 1,
|
| 21 |
+
"metadata": {},
|
| 22 |
+
"outputs": [],
|
| 23 |
+
"source": [
|
| 24 |
+
"import struct\n",
|
| 25 |
+
"import numpy as np\n",
|
| 26 |
+
"\n",
|
| 27 |
+
"def read_idx(filename):\n",
|
| 28 |
+
" \"\"\"Credit: https://gist.github.com/tylerneylon\"\"\"\n",
|
| 29 |
+
" with open(filename, 'rb') as f:\n",
|
| 30 |
+
" zero, data_type, dims = struct.unpack('>HBB', f.read(4))\n",
|
| 31 |
+
" shape = tuple(struct.unpack('>I', f.read(4))[0] for d in range(dims))\n",
|
| 32 |
+
" return np.frombuffer(f.read(), dtype=np.uint8).reshape(shape)"
|
| 33 |
+
]
|
| 34 |
+
},
|
| 35 |
+
{
|
| 36 |
+
"cell_type": "markdown",
|
| 37 |
+
"metadata": {},
|
| 38 |
+
"source": [
|
| 39 |
+
"### We use the function to extact our training and test datasets"
|
| 40 |
+
]
|
| 41 |
+
},
|
| 42 |
+
{
|
| 43 |
+
"cell_type": "code",
|
| 44 |
+
"execution_count": 2,
|
| 45 |
+
"metadata": {},
|
| 46 |
+
"outputs": [],
|
| 47 |
+
"source": [
|
| 48 |
+
"x_train = read_idx(\"./fashion_mnist/train-images-idx3-ubyte\")\n",
|
| 49 |
+
"y_train = read_idx(\"./fashion_mnist/train-labels-idx1-ubyte\")\n",
|
| 50 |
+
"x_test = read_idx(\"./fashion_mnist/t10k-images-idx3-ubyte\")\n",
|
| 51 |
+
"y_test = read_idx(\"./fashion_mnist/t10k-labels-idx1-ubyte\")"
|
| 52 |
+
]
|
| 53 |
+
},
|
| 54 |
+
{
|
| 55 |
+
"cell_type": "markdown",
|
| 56 |
+
"metadata": {},
|
| 57 |
+
"source": [
|
| 58 |
+
"### Let's inspect our dataset"
|
| 59 |
+
]
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"cell_type": "code",
|
| 63 |
+
"execution_count": 3,
|
| 64 |
+
"metadata": {},
|
| 65 |
+
"outputs": [
|
| 66 |
+
{
|
| 67 |
+
"name": "stdout",
|
| 68 |
+
"output_type": "stream",
|
| 69 |
+
"text": [
|
| 70 |
+
"Initial shape or dimensions of x_train (60000, 28, 28)\n",
|
| 71 |
+
"Number of samples in our training data: 60000\n",
|
| 72 |
+
"Number of labels in our training data: 60000\n",
|
| 73 |
+
"Number of samples in our test data: 10000\n",
|
| 74 |
+
"Number of labels in our test data: 10000\n",
|
| 75 |
+
"\n",
|
| 76 |
+
"Dimensions of x_train:(28, 28)\n",
|
| 77 |
+
"Labels in x_train:(60000,)\n",
|
| 78 |
+
"\n",
|
| 79 |
+
"Dimensions of x_test:(28, 28)\n",
|
| 80 |
+
"Labels in y_test:(10000,)\n"
|
| 81 |
+
]
|
| 82 |
+
}
|
| 83 |
+
],
|
| 84 |
+
"source": [
|
| 85 |
+
"# printing the number of samples in x_train, x_test, y_train, y_test\n",
|
| 86 |
+
"print(\"Initial shape or dimensions of x_train\", str(x_train.shape))\n",
|
| 87 |
+
"\n",
|
| 88 |
+
"print (\"Number of samples in our training data: \" + str(len(x_train)))\n",
|
| 89 |
+
"print (\"Number of labels in our training data: \" + str(len(y_train)))\n",
|
| 90 |
+
"print (\"Number of samples in our test data: \" + str(len(x_test)))\n",
|
| 91 |
+
"print (\"Number of labels in our test data: \" + str(len(y_test)))\n",
|
| 92 |
+
"print()\n",
|
| 93 |
+
"print (\"Dimensions of x_train:\" + str(x_train[0].shape))\n",
|
| 94 |
+
"print (\"Labels in x_train:\" + str(y_train.shape))\n",
|
| 95 |
+
"print()\n",
|
| 96 |
+
"print (\"Dimensions of x_test:\" + str(x_test[0].shape))\n",
|
| 97 |
+
"print (\"Labels in y_test:\" + str(y_test.shape))"
|
| 98 |
+
]
|
| 99 |
+
},
|
| 100 |
+
{
|
| 101 |
+
"cell_type": "markdown",
|
| 102 |
+
"metadata": {},
|
| 103 |
+
"source": [
|
| 104 |
+
"### Let's view some sample images"
|
| 105 |
+
]
|
| 106 |
+
},
|
| 107 |
+
{
|
| 108 |
+
"cell_type": "code",
|
| 109 |
+
"execution_count": 5,
|
| 110 |
+
"metadata": {},
|
| 111 |
+
"outputs": [
|
| 112 |
+
{
|
| 113 |
+
"data": {
|
| 114 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAU4AAACuCAYAAABZYORfAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO2de7CVVf3/XwtTTCEFEUQEASUVrxiJ4j0HhW/e706Dfk2zKS0rHTOdkV+3+aUzlY6/asIySQlNM6HyRgqpgyFKeEHkGuIxFFRUtJLM9ftj7/ez1t7nOefs5+zL2c8+n9fMmbPPs9bezzr7s/fzfG7r83HeewzDMIzK6dPTCzAMw8gbduE0DMPIiF04DcMwMmIXTsMwjIzYhdMwDCMjduE0DMPISFUXTufcZOfccufcKufc1bValNGzmFxbF5NtbXDdzeN0zm0FrAAmAW3AIuA87/2LtVue0WhMrq2LybZ2fKyK5x4CrPLerwFwzt0JnAJ0KATnXI9k22+99dYA7LDDDgD885//TMbixw3gDe/9zo08YTfIjVybiDzIFTLKtlZyHTFiRPL4vffeA2CbbbYB4KOPPgJgy5YtyZw+ffqU/I7Httpqq5LfmtO3b99kjl579erV1S69Q7lWc+EcBrwS/d0GTCif5Jy7BLikivNkQm/of//73+TYkCFDAJgyZQoAzz77bDL21FNPAUEAEmSdeLmeL14jmlKuTU4e5AoVyLYecv3Wt76VPF6wYAEAu+66KxAuimvXrk3m9OvXDwgXw1dffTUZ+8QnPgFA//79S36PHj06maPXPuOMM6pdeodyrebC6VKOtbtDee+nA9OhvpqJc4Xl6IK5yy67JGOHH344ALfccgsA3/72t5OxDRs2AEFwDbqANjNNJVejpnQp23rIddSoUcnj4447DgiWnrTRDz/8MJmj794HH3wAwLx585IxXUQHDhwIwODBg9udb86cObVYdqdUExxqA4ZHf+8G/KO65RhNgMm1dTHZ1ohqLpyLgDHOuVHOuW2Ac4H6X+qNemNybV1MtjWi26a69/5D59xlwEPAVsCt3vulNVtZ9vWU/H3uuecmj2+88caSsWnTpiWPr7rqKgBuuOEGoFeb6EDzydWoHY2W7RFHHAHAdtttlxxra2sD4P333y/5HQdyFNxZuXIlAB//+MeTsRdfLMSxXn654H48/fTTAdh+++2TORMnTgTg6aefBmDp0tr/i9X4OPHe3w/cX6O1GE2CybV1MdnWhqounM1AeRT92GOPBUoj50J3Jd3lINzVDjnkECBE2T/2sfDWxI5rwzAqY/ny5QC89tprybEJEwpB/N122w0IgZ84ZUmR9sMOO6xkLoTv8P33F679jz/+OFD6fV+xYgUA//hH/dy3tuXSMAwjI7nUOJV6BKX5mhDSkGbNmtXuef/617/aHfv9738PwJe+9CUgaJymZRpGdSif8t///ndyTN9L+ShPO+00AJYsWdLu+crjjP2fsgQPOOAAAPbff38AfvrTnyZz3nzzzZK59cA0TsMwjIzkQuOUhqnIuZLUIWicJ554IgDPPfdcu+eX+0Hj7VlKspWGqUjgE088kczRncu0UMOonLFjxwJhAwrAxRdfDMDChQsBuP766wF4/fXXkzn6nv/nP/8BSjNdBg0aBMBFF10EwPnnnw+EbdUAO+20ExAS7zdu3FiT/yfGNE7DMIyM2IXTMAwjI7kw1aW6l+9Hh5Acqzlpya7lASSZADEPPfQQEIJEsale/nzDMLpGKUZvvfVWcizedw4hcBR/x8orJ8UoAf7tt98uOT5jxozk8ZlnngnAu+++2+21d4VpnIZhGBnJhcYpyoNEAGeddRYAjz76aOrc8vlQeifT3W3dunVAcFKPHz8+maOtWxYkai60lU+Vdi699NJk7KijjgJg/vz5APzsZz+ryTnjwGRv357bETvvXChhOXLkSCC9LqZq4ypFUN9DCBtU9P7GWy7FG2+8UfL3zJkzk8cXXnghAFOnTgXg2muvzf5PdIFpnIZhGBnJhcZZXiNz7733TsY2b94MhOIBmltpS5By7fHee+8F4LOf/WxyTBqnaRjNhVLJRLztTtqOCj6ccMIJQGnBCaWpKG1l06ZNydiPf/xjAP785z+XPK/BHQNyiVL6pE0OGDCg3RzJRzKMq7wLWXhxTEKFjOOiHgDbbrtt8ljf0x133BGAo48+Ohn7y1/+kuE/6RjTOA3DMDJiF07DMIyMdGmqO+duBU4ENnjv9yseGwjcBYwE1gJne+83dfQa1VJuIk+ePDl5XF5rM6s5rfky8RUkik2BPffcE4BVq1Zleu1mphnkWi3laWJxCln8GODUU08FYMyYMckxyVOVsYYNG5aMKcAgUz0tha1Z6WnZqv6DfscBNaEqSe+88w6QnvKn3UDa1w7w0ksvAaHXkIh3B6mqUj2pROO8DZhcduxq4BHv/RjgkeLfRr64DZNrq3IbJtu6UlFfdefcSOCP0d1rOXCM9369c24oMN97v1cFr1NV8yc5nWNHsqoZ1Ro1gwI455xzgJqktDzjvR/f9bTG0CxyrRWqSRA/Tgs6VILquip5W4GneE90pIU2lVyhNrKtp1xnz54NhOBQHOhTcrsCPnFdzeeffx5oXwVNFZFqTIdy7W5UfYj3fj1AURDtW80VsTayucLk2rpUJFuTa2XUPR2plu1Ghw8vNOhLq7VZa+JtYq+8UmhFLS00Huut1FKu2qwQb1qQX0y+r0oso9hPVr5NN63tc/k5YuTjlPbz1a9+FWj9zQ+1lGtn77lqdS5btqzd88q3XMbxBvmhlUAvX2escTaixXd3o+qvF9V9ir831G5JRg9icm1dTLY1pLsa5xzgAuAHxd+za7aiFHSXue+++wCYMmVKMnbJJQWrYu3atQBs2FD4PMRRUPmllCwf+1OUUCukfcTHtQVMYw888EA1/04zUze5Kpk51hylDUqLi8fKtYW07badUf78NK2y/Ng999yTPD7ooIOA9j70tFqwOaGh31lI1/jKt2FKrrHvWM/T+5umcQ4dOhSA3XffHQjf/47OW2u61Didc7OAJ4G9nHNtzrmLKLz5k5xzK4FJxb+NHGFybV1MtvWnS43Te39eB0PH1XgtRgMxubYuJtv6k4u96pdddhkAo0ePBoI5DkFFV+BGqnuchiJzQOZivN9YZrtMMKU5xOa85itpt4VN9bpRSVAlrkGgdCClgMlEj1OOZJJVar53xM033wzAgQcemBxTUOimm26q6rV7M2lBGgVz1L4mLV1MJrrG4u+iXlNjcevgRmJbLg3DMDKSC41TgRo1f9I2LQjN7nVMFVGUrgAhvSGtso3uZnJOK9k21pB0l1SQyEinsxqoeg/jSjlq4iW5xg27rrjiCiAE9O644452r1utpqkgo7bwxut/+OGHgdBUTOQsINSjdKZxyvrTWKx5xlYFlAaOhD4Xgwd3mGpcV0zjNAzDyEguNE4lvKuydIx8JdIWpHnGdzBVkNbzY62hfP57770HBF8nhBSIH/7wh9X+Ky1NrBno/bzgggsAuPrqwtboG264IZmjwgxq+bpmzZpkbP369QB88YtfBILGWctUk6uuugoI1kVcKOKaa64pmduIpOreQHlRHX0X0wqBSCuN0TZM9SqKt0Y3EtM4DcMwMmIXTsMwjIzkwlTfd999gdCgKQ4KaN+4nMQy3dMCB6q1GQd+5IjWsbRy/WqtUKcKLC1Bnz59SoIrYsiQIUB4X1euXJmMHX/88UCQQVxHccWKFUBoxtfdmqh6bZmEcUM3uQgk+7TqV+WtWKxZW+WkvT9xYDcmrU6BXD+xa03HZKr3FKZxGoZhZCQXGucBBxwAhMCN9qkC7LTTTkDQEHV3ilMaNKZUpTihVqlKusvpjhg39ZKmWs8G93nno48+atc8DWD69OkAfOUrXwFKtX2lHymJ+fHHH0/Gxo0bBwQN48QTTwRKK/5rrLPq7OXpQ6p6BOFzoNSWtKpb5VpTtSlQvR2lD44YMQIIVlxaMLezTROyYOJanY3ENE7DMIyM5ELjlF/q5z//OVCqYejuJN+mSKvNqN/SMiHc6XRMc9KSbvPUd6aR9O/fn/Hjx5dsWVy8eDEAjz32GBBqmqr9MrS3IA4++OBkTHKQhrJ8+fJ2580ij5NOOgkIn5eYs88+u+LXMapDGqZkX157E4L1F39PheZJjtoa22hM4zQMw8hILjROoQrwcZEPNb2XT1N3qzhK15mfqnzLV3mjewi+lvKtYEaBLVu2sG7duqSTJAQN74UXXgDC+xonNUujkLUQa6zKltD8733vewAcddRRyRwVdJE84xqqqjCuz4Pqa8YairSeRx99tN3/pO258nWr18348aEFzXXXXQfALbfc0u75RudZB/oOS/bxXFl7aUnx8kvr8yRLJqYpKsA754Y75+Y555Y555Y65y4vHh/onJvrnFtZ/D2gq9cymgeTa2ticm0MlZjqHwJXeO/3AQ4FLnXOjcXajeYdk2trYnJtAJUUMl4PqDveZufcMmAYcApwTHHaDGA+8M26rLKIzLY4TUF7V8v3tcZqukzsNFNbz1dKiqq3xGafTMq0gFFeqaVcP/jgA1avXs3ll1/ebmyPPfYA4Ne//jVQGsRTBSvJIJaPTGSZZJqjpHWAJUuWAEF2mzZtSsZkhsuVIxM9/pwoxUnriFNiFHjS82W633777cmcNBO/p2mm72tnKLVP9SPiVLa0PeqiPCleQchGk8nHWezVPA5YiLUbbRlMrq2JybV+VHzhdM71A34HfM17/27a9ro0atluVBVz0pLbhe5I8RxpOToW39G0dUsaitIc4tqd0nZaSeMU9ZarmnI1I+W1NluJZvi+doY2PyjgG6eJ6fsqrTK2HvXdVQAorcZuI6goHck5tzUFIcz03isRz9qN5hyTa2ticq0/XWqcrnCr+iWwzHv/o2io4e1GlXYS+0PK28bK/5mWHC1fZVold/nU9NqxL06aqjTPVqCZ5GrUjmaUa1phFKURHXnkkUD69sq0tCIde/nll+uz2AqpxFQ/HJgKPO+cW1I8dg0FAfy22Hp0HXBWfZZo1AmTa2ticm0AlUTVnwA6cpBYu9GcYnJtTUyujSFXO4dkMiuQA8Eklxkt0z0ODimlRekmac7m8j3qcXUkjaXtKjIMIzvPPfccAOeffz6QXkNAbrMsdT07ml9rbK+6YRhGRnKhcUqLVHAnTifSnUpjSliOtcNyJ3N8d5PGWp7k3tbWlsxR47a0u6JhGNnR5gVZhvH3tTywq+90TNxKuicwjdMwDCMjudA4VbldmmeciC4/iOr8KSFWvW4APve5z5U8P+5Xcs899wAh2VZpEfE50qq0GIbRfbQ9Nq3mprRQpQ3Gc/TdTdNCG4ldEQzDMDKSC41TRRjko4wj3uXaqOowxsydOxcIhTviiLv8l9IwVQwirtuosX79+lX7rxiGEaHvdlrXSmml8Zh8oT0dbzCN0zAMIyN24TQMw8hILkz1tWvXAqGG38aNG5MxmeiDBxeqZKnFQZwEqwR2mfHxXneZ33JAp+1LVwtSrcMwjMrpLCH9j3/8IxD2rEOoEzFq1CgAnnrqqWRs0aJFAMycObPm68yCaZyGYRgZcXHjsrqfzLmNwPvAGw07ae0YRPXr3t17v3MtFtNMmFxNrk1IXeXa0AsngHPuae/9+K5nNhd5XXejyOv7k9d1N4q8vj/1XreZ6oZhGBmxC6dhGEZGeuLCOb0HzlkL8rruRpHX9yev624UeX1/6rruhvs4DcMw8o6Z6oZhGBmxC6dhGEZGGnbhdM5Nds4td86tcs5d3ajzZsU5N9w5N885t8w5t9Q5d3nx+EDn3Fzn3Mri7wE9vdZmIQ+yNblmx+TayXkb4eN0zm0FrAAmAW3AIuA87/2LdT95Roo9p4d67xc75/oDzwCnAv8LvOW9/0HxQzTAe//NHlxqU5AX2Zpcs2Fy7ZxGaZyHAKu892u891uAO4FTGnTuTHjv13vvFxcfbwaWAcMorHdGcdoMCsIxciJbk2tmTK6dUNWFM4MqPwx4Jfq7rXisqXHOjQTGAQuBId779VAQFjC451ZWXzKaaLmTbW+VK7T2d7aRcu32hbOoyv8EmAKMBc5zzo3taHrKsabOg3LO9QN+B3zNe/9uT6+nUWSUK+RMtr1VrtDa39lGy7XbPk7n3GHA//Hen1D8+1sA3vv/29Fc4Phur7QLyiu5x1XeVdZK/2vcQ0jH1GtIJeTqxBvNXgwii1yj+QvqtZ64oykEOVWKPg/lnRNrTNPLFbr1na2bXHNCh3Ktph5nmio/oXySc+4S4BJg/yrO1SUXXnghEGpuqk0GhKZPasgW19rUMbUbnTZtWj2X+XI9X7xGZJVrXRkwYIDOB8CGDRsqep5unKrT+uqrr3Y4V69dRaA0D3KFCmTbKLnmhA7lWs2FsyJV3ns/HZjunPsf4E9VnC8VaRRjxxYsDl0UpYFC6HypsfjCuXr16pLXUy+Tnu6i14NkkiuAc64mJlxsJUydOhWAk08+GQjFqx988MFkjiwJySruTKob4DvvvAPAo48+CsBdd92VzFmzZo3+l1osPw90Kdt6yLUVqSY41AYMj/7eDejQzvXe31/FuYzGkUmuRq4w2daIai6ci4AxzrlRzrltgHOBObVZltGDmFxbF5Ntjei2qe69/9A5dxnwELAVcKv3fmnNVlYhEyYUXDQ771zw4b722msAvPXWW8kc+bAUaNiyZUsyJtN86NChAIwePRqApUsb/q80BT0h16OPPhqAiy++ODmmFtBqDSsfdMynPvUpIJjjscyXL18OhKDQbrvtBsDdd9+dzLnvvvsA+O53v9vutWvg92w6muU72wpU1aytaH6bCd5imFxbF5NtbchFl8vO6N+/PwCDBg0qOS6NBULwQBpJHDiSxrnjjjsCobNeb9U464UCP8piABg4cCAAV155JQDr169PxtR9VPKVPN9+++1kjrTKcePGAaVa6bPPPltyvjFjxgAhIARw1llnAfCrX/0KgLa2tu7+e0Yvw6ojGYZhZCT3Gme5D0wpKnHCs3L51Ds9TqJWmov8nuUJ10ZtSOutLX+y0oik/UOQleQorTL2T69cuRKAY445BoDtt98+Gdu8eXPJ8/faa6+S4wAvv1xI05P/0zROo1JM4zQMw8hI7tUraRDSHuQ3i5PctY1SPk75zQDWrVtXckzJ8kb9kV9Zsttnn32SMWmoH3zwAQDz588HSjXG008/HQi7guTzBJg8eTIAw4YV6lLIzx0/X0ib/etf/1rNv2P0IkzjNAzDyIhdOA3DMDKSe1NdgR4V8ujbty8AQ4YMaTdHgZ933w1Vp2S6yTSM02WM2pGWSD5x4kQAnnvuOSAEcCBsSHjyyScB+OQnPwmUpiOp4Idk/8YbbyRjCvqNHDkSCIGjl156KZmzYsUKoNSt09l6DUOYxmkYhpGR3Guc0gyUliRtMg4AKTCwadMmIGzPjInTXIzGILkceuihQKm2L1nJWpAGefzxoaTrnDmFbdYHHXQQADvttFMypkCg0tSUchSfQ9t107ZzqmZrWhqV0X0qeV9lNSowGDN+/HggbGLRFmulpEEIDCro+MILLyRjChbqHNdffz1QmopWyRpN4zQMw8hI7jVOJU3LfymNQr4tgBkzCj2btI1v//1DTWUlSCsJO+0uZ9QHaQLyTWoLJAQNUXf//fbbD4DZs2cnc6Qpylep7bIQZC3NQlZGvJVWRUXirZ7CNM36UP6+pml3nX0HJdfDDjsMCN9b+cQhxDdeeaVQszn2i0uz1Dm6W3fXNE7DMIyM2IXTMAwjI12a6s65W4ETgQ3e+/2KxwYCdwEjgbXA2d77TfVbZse89957QPumXDLRIASM1IdIO1YgmPhKSdHrtTrNINcDDjgACOZamtkkE/vvf/87ULqfXUEl7Vm/+eabkzG5alQ165lnngFKdw7ddNNNQNgX3yo0g2y7orMAzKWXXgqEFLRVq1YlY6ecUmjtLlNb39c777wzmaPvsj4zqn5VKZW4aSrROG8DJpcduxp4xHs/Bnik+LeRL27D5Nqq3IbJtq50qXF67x8rNnqPOQU4pvh4BjAf+GYN11UxSkMSSl+JNU6lpqhSeNzUS+lMqhcZO5JbmWaQqywAaZ6qlgQhHUnyVT3NtWvXJnOkdeh1tHcdQhBIY0pZmjVrVjJHmolSnKSB5p1mkG1HyMLT91RBwOuuuy6Z89BDDwHwpz8VejuqngSUWhUdoeCvUpcq4ZBDDkkey0KNN0uU092o+hDv/XoA7/1659zgjiZau9FcYXJtXSqSrcm1MuqejlTvdqNKZVECu3wecUqD7ljSLmMfp3yi+p2WmmK0p7ty3WGHHZLHe++9NwDPP/98u3naIimZLViwAAiaKARfmKq9xzIfMWIEEBKl9TmJU9FUNUufh3333TcZ660dAOr9fS33Hx555JEA/OY3v0mOqRJWGtJYZSHqexu/7j333FMylsaBBx4IwKc//WkAzjjjjGTsxhtvBDrXOLsbVX/dOTcUoPh7Qzdfx2guTK6ti8m2hnRX45wDXAD8oPh7dufT64d8koqWSvOMt1DKt5lWuEEJtUqc7+UJ8HWXq3xaEGqf7rnnngDsscceyZg0f1kLmnPHHXe0m6N6nEp4huDblK9bGok6Y0IoDiIUxYWW1DhrKtuKtiX26Vovk8b59a9/vaJzyDdaXrgnnqPtuY899hgAY8eOTcYUldfnQlbH/feH/nXysXZGl/+Zc24W8CSwl3OuzTl3EYU3f5JzbiUwqfi3kSNMrq2Lybb+VBJVP6+DoeNqvBajgZhcWxeTbf3J/V51obQVtZGNVXclVkv1T6u52VvSkHqak046KXksc1r1ONX2BEJKiII6kufVV4f0QwV1FPCJA08KOKmdhtJNVDEHQuK7KuzEFbWM6qkkkVzv+dSpU5Njv/jFLyp+vkz2XXfdNTmmIN+kSZOAEISE4LbTpgl9HvQZqBTbcmkYhpGRltE45ehXsCdOcnfOAeEOFifHS0O1NKTGoG2SELRBWQLSBiCkkihtRBZFnAy9ePFiIAQIVIcRYPXq1UCQuQKDb775ZjJn+PDhQNA8464BRoHyAI/ez4q2JVYQQFKDvBNOOCE5Jo2zkteWtXH22WcnY9I+ZWl++ctfTsZUHUnnO+200wCYNm1al+csOX+m2YZhGEbraJxKmBbSJCHcnaRVpmmc1mOmMSgxHYJ1IC1AGiCEBOcXX3wRCP7PuAWwtmiqLufMmTOTMaWnSZscMGAAUJqmdvvttwNBI4m1YaNANXVJK3nuH/7wBwBOPvnk5JjS0mQ1xKjoizRM+TGXLVuWzPnCF74ABP9nGkphU0vqrDEO0zgNwzAyYhdOwzCMjLSMqS6TWykqcXBIuwNE7PDW8+I6j0b9UEAHwi4t7SCKAzcyyeRWkaket0RRczYFjAYPDnUrFi5cCMCUKVOA4A6QiQZw9NFHA8HUj9sTG9mIXWWqDyDz+xvf+EYydvDBBwNhd49M5Hh3z3HHFdJNZarHLpQzzzwTCClHV1xxBRBcOjFKd9PnC4L5rnQ3fU6y7hg0jdMwDCMjLaNxKtAgzSSuolNOXAVcGmqcPG3UniOOOAIofZ+lKUq7lKYCoeq3EtllEcStfHfffXcgBAjiwI/krwCSnq/UNAifA2kvqhgOoX7nkiVLsv2jLUafPn1KLDQFfMqbG1522WXJHCWVq/J6vLHg+9//PgCXXFKoXCfNL65hoEZsDz74IACf//znkzEF+WRJCKWkxahSWmd75rtraZrGaRiGkZGW0TjlY5EGGWsm5cTJ7rob6k5m1AdVJZJlACGpXSlGw4YNS8a0TU6tf/W82JclLUN+T1X6h6AZqcq7/KexZiPNSNs6Y81IWk9v1zg/+uij1LSicp9g3Nr57rvvBkJK2Xe+851kLPZlxmguwMSJE4GgacZWSnweCD7weD1pFZPKkRYax0KyYBqnYRhGRlpG45SfTJHW2N9VTpwYq8ib6j0a9UH9fO67777k2Gc+8xkgRE3/9re/JWPqFyMt8sknnyx5DoREaVkLcXL8mjVrgODrVGQ11kKkqcq3qcrfEHxvvZltt92W0aNHJz2ZIFhykovqlko+EN5rae0q4gJw7LHHAiFyrsrv8ldD+Dzo+eecc067tck3mdYZtbzvWFrEXM+LMzmyUEk9zuHOuXnOuWXOuaXOucuLxwc65+Y651YWf5utmyNMrq2JybUxVGKqfwhc4b3fBzgUuNQ5NxZrN5p3TK6ticm1Abise7Sdc7OB/1f8OabYMW8oMN9732kGcT2aP4m5c+cCwWSPnc1xgy4IAQcIAQpV5lEp/zrxjPe+8p6lDaQZ5Bqbx5Kn2gGrrUU8R6lGCu7EJuWVV14JhNbDSnaPzfm4NmiVtKRc+/bt63fZZZeS90nBFLnEFJSN3V8Kzui9j81wmfQK0j399NNAaQNFVbk6//zzgdLmbTK/05q0ifKqTGnpVHLzPPzwwyV/l9GhXDP5OIu9mscBC7F2oy2DybU1qVausfJhlFLxhdM51w/4HfA17/27cSJxZ9S73ajQXUXpJ5059+O16w7WW6sjNVqucaKytBR9QV944YVkTPVVFbRTsG/UqFHJHAUm9JpPPPFEMrbffvsBQTNSEEBaEIQUp7RE6WqqAjUDtZLrunXr+MlPfpLp3ArcqNpVnGYmOSooO2HCBKA0uKTkeBHLpZKtkeWyS5OltnNee+21Xb5eGhWlIznntqYghJne+3uLh63daM4xubYmJtf606XG6Qq3ql8Cy7z3P4qGmqZFMIQ7kXwusS+rnDgFQYUhpHX0FnpKrml3fyW7x1aC5Kf0I22lfemll5I50lqkcS5atCgZU6tgaZyyLOJtldpqqUIPlWplzUwzfF+V6qNYQhxTKOeBBx7o8vXqqf3feeed3XpeJab64cBU4HnnnLZRXENBAL8tth5dB5zVwfON5sTk2pqYXBtAJe2BnwA6uhVbu9GcYnJtTUyujaFldg4pJUWOaQUO0ojL7CtVors7CIxspAXhlBKmgBCEakqq3ymzPA7uqKWrgg9qgwAhzUVmvIKGcaBhzJgxQHwVIDwAAAO5SURBVDDVe2uA0MiO7VU3DMPISMtonHJIS6PoLB1JVXEgNPOyvcmNIU5HUhJznLgupH0qfUUJ07EloU0LqrjzyCOPJGM6psCEan1Kc4XQeviOO+4ATOM0Ksc0TsMwjIy0jMapGptKKZE2ksbKlSuTx9JQ46rwRv1IS/nRNlelDkHoE6WtcNp6Gfe2UWqR+gjFVoPSzPQ5kEUSz5GPU8T+T2mfeU+EN+qDaZyGYRgZaRmNUxqGigXERQPKkW8Ngs9t0KBBdVydIeL3Xlx88cVAaTEWVf1W3UbJN36+OhzK/xnLUAnw0iJVe/Ooo45K5sQV48G0S6NyTOM0DMPIiF04DcMwMtIyprpSSpR2ooZRacT72BV0ePbZZ+u3OCMhLeVHrRaUiB6z3XbbAXDyyScDsHHjxmRs3rx5AMyaNQuABQsWJGNKcdI++Ouvvx4IGyUqXZthpGEap2EYRkYyV4Cv6mTObQTeBzq+7Tcvg6h+3bt773euxWKaCZOrybUJqatcG3rhBHDOPd2sbQY6I6/rbhR5fX/yuu5Gkdf3p97rNlPdMAwjI3bhNAzDyEhPXDin98A5a0Fe190o8vr+5HXdjSKv709d191wH6dhGEbeMVPdMAwjI3bhNAzDyEjDLpzOucnOueXOuVXOuasbdd6sOOeGO+fmOeeWOeeWOucuLx4f6Jyb65xbWfw9oKfX2izkQbYm1+yYXDs5byN8nM65rYAVwCSgDVgEnOe9f7HuJ89Isef0UO/9Yudcf+AZ4FTgf4G3vPc/KH6IBnjvv9mDS20K8iJbk2s2TK6d0yiN8xBglfd+jfd+C3AncEqDzp0J7/167/3i4uPNwDJgGIX1zihOm0FBOEZOZGtyzYzJtRMadeEcBrwS/d1WPNbUOOdGAuOAhcAQ7/16KAgLGNxzK2sqcidbk2tFmFw7oVEXzrQ+z02dB+Wc6wf8Dvia9946uXVMrmRrcq0Yk2snNOrC2QYMj/7eDfhHg86dGefc1hSEMNN7f2/x8OtFf4r8Kht6an1NRm5ka3LNhMm1Exp14VwEjHHOjXLObQOcC8xp0Lkz4QrdxH4JLPPe/ygamgNcUHx8ATC70WtrUnIhW5NrZkyunZ23UTuHnHP/A9wIbAXc6r3/fkNOnBHn3BHA48DzgJrQXEPBb/JbYASwDjjLe/9W6ov0MvIgW5NrdkyunZzXtlwahmFkw3YOGYZhZMQunIZhGBmxC6dhGEZG7MJpGIaREbtwGoZhZMQunIZhGBmxC6dhGEZG/j/Y4FGAF4Wd6QAAAABJRU5ErkJggg==\n",
|
| 115 |
+
"text/plain": [
|
| 116 |
+
"<Figure size 432x288 with 6 Axes>"
|
| 117 |
+
]
|
| 118 |
+
},
|
| 119 |
+
"metadata": {
|
| 120 |
+
"needs_background": "light"
|
| 121 |
+
},
|
| 122 |
+
"output_type": "display_data"
|
| 123 |
+
}
|
| 124 |
+
],
|
| 125 |
+
"source": [
|
| 126 |
+
"# Let's do the same thing but using matplotlib to plot 6 images \n",
|
| 127 |
+
"import matplotlib.pyplot as plt\n",
|
| 128 |
+
"\n",
|
| 129 |
+
"# Plots 6 images, note subplot's arugments are nrows,ncols,index\n",
|
| 130 |
+
"# we set the color map to grey since our image dataset is grayscale\n",
|
| 131 |
+
"plt.subplot(331)\n",
|
| 132 |
+
"random_num = np.random.randint(0,len(x_train))\n",
|
| 133 |
+
"plt.imshow(x_train[random_num], cmap=plt.get_cmap('gray'))\n",
|
| 134 |
+
"\n",
|
| 135 |
+
"plt.subplot(332)\n",
|
| 136 |
+
"random_num = np.random.randint(0,len(x_train))\n",
|
| 137 |
+
"plt.imshow(x_train[random_num], cmap=plt.get_cmap('gray'))\n",
|
| 138 |
+
"\n",
|
| 139 |
+
"plt.subplot(333)\n",
|
| 140 |
+
"random_num = np.random.randint(0,len(x_train))\n",
|
| 141 |
+
"plt.imshow(x_train[random_num], cmap=plt.get_cmap('gray'))\n",
|
| 142 |
+
"\n",
|
| 143 |
+
"plt.subplot(334)\n",
|
| 144 |
+
"random_num = np.random.randint(0,len(x_train))\n",
|
| 145 |
+
"plt.imshow(x_train[random_num], cmap=plt.get_cmap('gray'))\n",
|
| 146 |
+
"\n",
|
| 147 |
+
"plt.subplot(335)\n",
|
| 148 |
+
"random_num = np.random.randint(0,len(x_train))\n",
|
| 149 |
+
"plt.imshow(x_train[random_num], cmap=plt.get_cmap('gray'))\n",
|
| 150 |
+
"\n",
|
| 151 |
+
"plt.subplot(336)\n",
|
| 152 |
+
"random_num = np.random.randint(0,len(x_train))\n",
|
| 153 |
+
"plt.imshow(x_train[random_num], cmap=plt.get_cmap('gray'))\n",
|
| 154 |
+
"\n",
|
| 155 |
+
"# Display out plots\n",
|
| 156 |
+
"plt.show()"
|
| 157 |
+
]
|
| 158 |
+
},
|
| 159 |
+
{
|
| 160 |
+
"cell_type": "markdown",
|
| 161 |
+
"metadata": {},
|
| 162 |
+
"source": [
|
| 163 |
+
"### Let's create our model"
|
| 164 |
+
]
|
| 165 |
+
},
|
| 166 |
+
{
|
| 167 |
+
"cell_type": "code",
|
| 168 |
+
"execution_count": 32,
|
| 169 |
+
"metadata": {},
|
| 170 |
+
"outputs": [
|
| 171 |
+
{
|
| 172 |
+
"data": {
|
| 173 |
+
"text/plain": [
|
| 174 |
+
"10"
|
| 175 |
+
]
|
| 176 |
+
},
|
| 177 |
+
"execution_count": 32,
|
| 178 |
+
"metadata": {},
|
| 179 |
+
"output_type": "execute_result"
|
| 180 |
+
}
|
| 181 |
+
],
|
| 182 |
+
"source": [
|
| 183 |
+
"num_classes"
|
| 184 |
+
]
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"cell_type": "code",
|
| 188 |
+
"execution_count": 6,
|
| 189 |
+
"metadata": {},
|
| 190 |
+
"outputs": [
|
| 191 |
+
{
|
| 192 |
+
"name": "stdout",
|
| 193 |
+
"output_type": "stream",
|
| 194 |
+
"text": [
|
| 195 |
+
"x_train shape: (60000, 28, 28, 1)\n",
|
| 196 |
+
"60000 train samples\n",
|
| 197 |
+
"10000 test samples\n",
|
| 198 |
+
"Number of Classes: 10\n",
|
| 199 |
+
"Model: \"sequential\"\n",
|
| 200 |
+
"_________________________________________________________________\n",
|
| 201 |
+
"Layer (type) Output Shape Param # \n",
|
| 202 |
+
"=================================================================\n",
|
| 203 |
+
"conv2d (Conv2D) (None, 26, 26, 32) 320 \n",
|
| 204 |
+
"_________________________________________________________________\n",
|
| 205 |
+
"batch_normalization (BatchNo (None, 26, 26, 32) 128 \n",
|
| 206 |
+
"_________________________________________________________________\n",
|
| 207 |
+
"conv2d_1 (Conv2D) (None, 24, 24, 64) 18496 \n",
|
| 208 |
+
"_________________________________________________________________\n",
|
| 209 |
+
"batch_normalization_1 (Batch (None, 24, 24, 64) 256 \n",
|
| 210 |
+
"_________________________________________________________________\n",
|
| 211 |
+
"max_pooling2d (MaxPooling2D) (None, 12, 12, 64) 0 \n",
|
| 212 |
+
"_________________________________________________________________\n",
|
| 213 |
+
"dropout (Dropout) (None, 12, 12, 64) 0 \n",
|
| 214 |
+
"_________________________________________________________________\n",
|
| 215 |
+
"flatten (Flatten) (None, 9216) 0 \n",
|
| 216 |
+
"_________________________________________________________________\n",
|
| 217 |
+
"dense (Dense) (None, 128) 1179776 \n",
|
| 218 |
+
"_________________________________________________________________\n",
|
| 219 |
+
"batch_normalization_2 (Batch (None, 128) 512 \n",
|
| 220 |
+
"_________________________________________________________________\n",
|
| 221 |
+
"dropout_1 (Dropout) (None, 128) 0 \n",
|
| 222 |
+
"_________________________________________________________________\n",
|
| 223 |
+
"dense_1 (Dense) (None, 10) 1290 \n",
|
| 224 |
+
"_________________________________________________________________\n",
|
| 225 |
+
"activation (Activation) (None, 10) 0 \n",
|
| 226 |
+
"=================================================================\n",
|
| 227 |
+
"Total params: 1,200,778\n",
|
| 228 |
+
"Trainable params: 1,200,330\n",
|
| 229 |
+
"Non-trainable params: 448\n",
|
| 230 |
+
"_________________________________________________________________\n",
|
| 231 |
+
"None\n"
|
| 232 |
+
]
|
| 233 |
+
}
|
| 234 |
+
],
|
| 235 |
+
"source": [
|
| 236 |
+
"from tensorflow.keras.utils import to_categorical\n",
|
| 237 |
+
"import tensorflow as tf\n",
|
| 238 |
+
"from tensorflow.keras.datasets import mnist\n",
|
| 239 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 240 |
+
"from tensorflow.keras.layers import Dense, Dropout, Flatten, Activation\n",
|
| 241 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D, BatchNormalization\n",
|
| 242 |
+
"from tensorflow.keras.optimizers import SGD \n",
|
| 243 |
+
"\n",
|
| 244 |
+
"# Training Parameters\n",
|
| 245 |
+
"batch_size = 128\n",
|
| 246 |
+
"epochs = 1\n",
|
| 247 |
+
"\n",
|
| 248 |
+
"# Lets store the number of rows and columns\n",
|
| 249 |
+
"img_rows = x_train[0].shape[0]\n",
|
| 250 |
+
"img_cols = x_train[1].shape[0]\n",
|
| 251 |
+
"\n",
|
| 252 |
+
"# Getting our date in the right 'shape' needed for Keras\n",
|
| 253 |
+
"# We need to add a 4th dimenion to our date thereby changing our\n",
|
| 254 |
+
"# Our original image shape of (60000,28,28) to (60000,28,28,1)\n",
|
| 255 |
+
"x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
|
| 256 |
+
"x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
|
| 257 |
+
"\n",
|
| 258 |
+
"# store the shape of a single image \n",
|
| 259 |
+
"input_shape = (img_rows, img_cols, 1)\n",
|
| 260 |
+
"\n",
|
| 261 |
+
"# change our image type to float32 data type\n",
|
| 262 |
+
"x_train = x_train.astype('float32')\n",
|
| 263 |
+
"x_test = x_test.astype('float32')\n",
|
| 264 |
+
"\n",
|
| 265 |
+
"# Normalize our data by changing the range from (0 to 255) to (0 to 1)\n",
|
| 266 |
+
"x_train /= 255.0\n",
|
| 267 |
+
"x_test /= 255.0\n",
|
| 268 |
+
"\n",
|
| 269 |
+
"print('x_train shape:', x_train.shape)\n",
|
| 270 |
+
"print(x_train.shape[0], 'train samples')\n",
|
| 271 |
+
"print(x_test.shape[0], 'test samples')\n",
|
| 272 |
+
"\n",
|
| 273 |
+
"# Now we one hot encode outputs\n",
|
| 274 |
+
"y_train = to_categorical(y_train)\n",
|
| 275 |
+
"y_test = to_categorical(y_test)\n",
|
| 276 |
+
"\n",
|
| 277 |
+
"num_classes = y_test.shape[1]\n",
|
| 278 |
+
"# Let's count the number columns in our hot encoded matrix \n",
|
| 279 |
+
"print (\"Number of Classes: \" + str(num_classes))\n",
|
| 280 |
+
"\n",
|
| 281 |
+
"num_pixels = x_train.shape[1] * x_train.shape[2]\n",
|
| 282 |
+
"\n",
|
| 283 |
+
"# create model\n",
|
| 284 |
+
"model = Sequential()\n",
|
| 285 |
+
"\n",
|
| 286 |
+
"model.add(Conv2D(32, kernel_size=(3, 3),\n",
|
| 287 |
+
" activation='relu',\n",
|
| 288 |
+
" input_shape=input_shape))\n",
|
| 289 |
+
"model.add(BatchNormalization())\n",
|
| 290 |
+
"\n",
|
| 291 |
+
"model.add(Conv2D(64, (3, 3), activation='relu'))\n",
|
| 292 |
+
"model.add(BatchNormalization())\n",
|
| 293 |
+
"\n",
|
| 294 |
+
"model.add(MaxPooling2D(pool_size=(2, 2)))\n",
|
| 295 |
+
"model.add(Dropout(0.25))\n",
|
| 296 |
+
"\n",
|
| 297 |
+
"model.add(Flatten())\n",
|
| 298 |
+
"model.add(Dense(128, activation='relu'))\n",
|
| 299 |
+
"model.add(BatchNormalization())\n",
|
| 300 |
+
"\n",
|
| 301 |
+
"model.add(Dropout(0.5))\n",
|
| 302 |
+
"model.add(Dense(num_classes))\n",
|
| 303 |
+
"model.add(Activation('softmax'))\n",
|
| 304 |
+
"\n",
|
| 305 |
+
"model.compile(loss = 'categorical_crossentropy',\n",
|
| 306 |
+
" optimizer = SGD(0.01),\n",
|
| 307 |
+
" metrics = ['accuracy'])\n",
|
| 308 |
+
"\n",
|
| 309 |
+
"print(model.summary())"
|
| 310 |
+
]
|
| 311 |
+
},
|
| 312 |
+
{
|
| 313 |
+
"cell_type": "markdown",
|
| 314 |
+
"metadata": {},
|
| 315 |
+
"source": [
|
| 316 |
+
"### Let's train our model"
|
| 317 |
+
]
|
| 318 |
+
},
|
| 319 |
+
{
|
| 320 |
+
"cell_type": "code",
|
| 321 |
+
"execution_count": 7,
|
| 322 |
+
"metadata": {},
|
| 323 |
+
"outputs": [
|
| 324 |
+
{
|
| 325 |
+
"name": "stdout",
|
| 326 |
+
"output_type": "stream",
|
| 327 |
+
"text": [
|
| 328 |
+
"Train on 60000 samples, validate on 10000 samples\n",
|
| 329 |
+
"60000/60000 [==============================] - 150s 3ms/sample - loss: 0.6229 - accuracy: 0.7877 - val_loss: 0.5973 - val_accuracy: 0.8136\n",
|
| 330 |
+
"Test loss: 0.5973387075424195\n",
|
| 331 |
+
"Test accuracy: 0.8136\n"
|
| 332 |
+
]
|
| 333 |
+
}
|
| 334 |
+
],
|
| 335 |
+
"source": [
|
| 336 |
+
"history = model.fit(x_train, y_train,\n",
|
| 337 |
+
" batch_size=batch_size,\n",
|
| 338 |
+
" epochs=epochs,\n",
|
| 339 |
+
" verbose=1,\n",
|
| 340 |
+
" validation_data=(x_test, y_test))\n",
|
| 341 |
+
"\n",
|
| 342 |
+
"score = model.evaluate(x_test, y_test, verbose=0)\n",
|
| 343 |
+
"print('Test loss:', score[0])\n",
|
| 344 |
+
"print('Test accuracy:', score[1])"
|
| 345 |
+
]
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"cell_type": "markdown",
|
| 349 |
+
"metadata": {},
|
| 350 |
+
"source": [
|
| 351 |
+
"### Let's test out our model"
|
| 352 |
+
]
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"cell_type": "code",
|
| 356 |
+
"execution_count": 8,
|
| 357 |
+
"metadata": {},
|
| 358 |
+
"outputs": [],
|
| 359 |
+
"source": [
|
| 360 |
+
"import cv2\n",
|
| 361 |
+
"import numpy as np\n",
|
| 362 |
+
"\n",
|
| 363 |
+
"def getLabel(input_class):\n",
|
| 364 |
+
" number = int(input_class)\n",
|
| 365 |
+
" if number == 0:\n",
|
| 366 |
+
" return \"T-shirt/top \"\n",
|
| 367 |
+
" if number == 1:\n",
|
| 368 |
+
" return \"Trouser\"\n",
|
| 369 |
+
" if number == 2:\n",
|
| 370 |
+
" return \"Pullover\"\n",
|
| 371 |
+
" if number == 3:\n",
|
| 372 |
+
" return \"Dress\"\n",
|
| 373 |
+
" if number == 4:\n",
|
| 374 |
+
" return \"Coat\"\n",
|
| 375 |
+
" if number == 5:\n",
|
| 376 |
+
" return \"Sandal\"\n",
|
| 377 |
+
" if number == 6:\n",
|
| 378 |
+
" return \"Shirt\"\n",
|
| 379 |
+
" if number == 7:\n",
|
| 380 |
+
" return \"Sneaker\"\n",
|
| 381 |
+
" if number == 8:\n",
|
| 382 |
+
" return \"Bag\"\n",
|
| 383 |
+
" if number == 9:\n",
|
| 384 |
+
" return \"Ankle boot\"\n",
|
| 385 |
+
"\n",
|
| 386 |
+
"def draw_test(name, pred, actual, input_im):\n",
|
| 387 |
+
" BLACK = [0,0,0]\n",
|
| 388 |
+
"\n",
|
| 389 |
+
" res = getLabel(pred)\n",
|
| 390 |
+
" actual = getLabel(actual) \n",
|
| 391 |
+
" expanded_image = cv2.copyMakeBorder(input_im, 0, 0, 0, 4*imageL.shape[0] ,cv2.BORDER_CONSTANT,value=BLACK)\n",
|
| 392 |
+
" expanded_image = cv2.cvtColor(expanded_image, cv2.COLOR_GRAY2BGR)\n",
|
| 393 |
+
" cv2.putText(expanded_image, \"Predicted - \" + str(res), (152, 70) , cv2.FONT_HERSHEY_COMPLEX_SMALL,1, (0,255,0), 1)\n",
|
| 394 |
+
" cv2.putText(expanded_image, \" Actual - \" + str(actual), (152, 90) , cv2.FONT_HERSHEY_COMPLEX_SMALL,1, (0,0,255), 1)\n",
|
| 395 |
+
" cv2.imshow(name, expanded_image)\n",
|
| 396 |
+
"\n",
|
| 397 |
+
"\n",
|
| 398 |
+
"for i in range(0,10):\n",
|
| 399 |
+
" rand = np.random.randint(0,len(x_test))\n",
|
| 400 |
+
" input_im = x_test[rand]\n",
|
| 401 |
+
" actual = y_test[rand].argmax(axis=0)\n",
|
| 402 |
+
" imageL = cv2.resize(input_im, None, fx=4, fy=4, interpolation = cv2.INTER_CUBIC)\n",
|
| 403 |
+
" input_im = input_im.reshape(1,28,28,1) \n",
|
| 404 |
+
" \n",
|
| 405 |
+
" ## Get Prediction\n",
|
| 406 |
+
" res = str(model.predict_classes(input_im, 1, verbose = 0)[0])\n",
|
| 407 |
+
"\n",
|
| 408 |
+
" draw_test(\"Prediction\", res, actual, imageL) \n",
|
| 409 |
+
" cv2.waitKey(0)\n",
|
| 410 |
+
"\n",
|
| 411 |
+
"cv2.destroyAllWindows()"
|
| 412 |
+
]
|
| 413 |
+
},
|
| 414 |
+
{
|
| 415 |
+
"cell_type": "code",
|
| 416 |
+
"execution_count": null,
|
| 417 |
+
"metadata": {},
|
| 418 |
+
"outputs": [],
|
| 419 |
+
"source": [
|
| 420 |
+
"\n"
|
| 421 |
+
]
|
| 422 |
+
}
|
| 423 |
+
],
|
| 424 |
+
"metadata": {
|
| 425 |
+
"kernelspec": {
|
| 426 |
+
"display_name": "Python 3",
|
| 427 |
+
"language": "python",
|
| 428 |
+
"name": "python3"
|
| 429 |
+
},
|
| 430 |
+
"language_info": {
|
| 431 |
+
"codemirror_mode": {
|
| 432 |
+
"name": "ipython",
|
| 433 |
+
"version": 3
|
| 434 |
+
},
|
| 435 |
+
"file_extension": ".py",
|
| 436 |
+
"mimetype": "text/x-python",
|
| 437 |
+
"name": "python",
|
| 438 |
+
"nbconvert_exporter": "python",
|
| 439 |
+
"pygments_lexer": "ipython3",
|
| 440 |
+
"version": "3.7.4"
|
| 441 |
+
}
|
| 442 |
+
},
|
| 443 |
+
"nbformat": 4,
|
| 444 |
+
"nbformat_minor": 2
|
| 445 |
+
}
|
14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/1. Chapter Introduction.srt
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,510 --> 00:00:06,140
|
| 3 |
+
Hi and welcome to Chapter 14 where we take a look at image that and then we start using some pre-trained
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,140 --> 00:00:07,010
|
| 7 |
+
mortal's and terrorists.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:07,020 --> 00:00:09,970
|
| 11 |
+
And those models include Viji 16 19.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:09,990 --> 00:00:15,750
|
| 15 |
+
Inception vision tree Reznor 50 and mobile at so the contents of this chapter.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:15,960 --> 00:00:20,070
|
| 19 |
+
Basically we're going experiment with those models I just mentioned them we're going to understand a
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:20,070 --> 00:00:22,670
|
| 23 |
+
bit about Fiji family of models.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:22,760 --> 00:00:27,060
|
| 27 |
+
If you take a look at Reznor at 50 and then understand the inception family as well.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:27,060 --> 00:00:29,040
|
| 31 |
+
So it's going to be a very interesting chapter.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:29,050 --> 00:00:29,860
|
| 35 |
+
Stay tuned.
|
14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/2. ImageNet - Experimenting with pre-trained Models in Keras (VGG16, ResNet50, Mobi.srt
ADDED
|
@@ -0,0 +1,575 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:01,040 --> 00:00:05,960
|
| 3 |
+
Hi and welcome to chapter fourteen point one where we start experimenting with some Pretorian models
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,170 --> 00:00:12,140
|
| 7 |
+
and these models include Viji inception resonant and Mouillot and I'm sure as a computer vision enthusiastic
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:12,180 --> 00:00:14,250
|
| 11 |
+
could solve some of these models before.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:14,570 --> 00:00:19,580
|
| 15 |
+
And what we're going to do in Kurus we're actually going to use these models these already train models.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:19,850 --> 00:00:22,630
|
| 19 |
+
But before we begin let's talk about image nets.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:22,740 --> 00:00:29,240
|
| 23 |
+
So what made these models famous was the performance on image net image that is part of the sickly d
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:29,290 --> 00:00:36,440
|
| 27 |
+
i l s v r c as the image that Loskiel visual recognition challenge and it's basically the benchmark
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:36,440 --> 00:00:39,550
|
| 31 |
+
and standard used to test modern day CNN's.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:39,560 --> 00:00:46,280
|
| 35 |
+
Now it comprises of 1.2 million treating images a huge image data set into a thousand objects classes
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:46,820 --> 00:00:52,370
|
| 39 |
+
that you can see why it's so hard to get that accuracy in this challenge.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:52,370 --> 00:00:54,200
|
| 43 |
+
This is an example of how image looks.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:54,200 --> 00:00:56,300
|
| 47 |
+
You can go to the Web site.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:56,330 --> 00:00:58,420
|
| 51 |
+
Image net dot org can see it here.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:00:58,760 --> 00:01:02,020
|
| 55 |
+
And basically you can explore the different trining images here.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:03,770 --> 00:01:07,410
|
| 59 |
+
This is a summary of the last few netbooks that actually one image.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:07,640 --> 00:01:09,220
|
| 63 |
+
And this is the top five areas.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:09,260 --> 00:01:16,510
|
| 67 |
+
And basically what top five it means is that was the actual image that was in that picture.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:16,610 --> 00:01:19,610
|
| 71 |
+
Let's assume it's classifying this image here.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:20,030 --> 00:01:21,390
|
| 75 |
+
How was his top five results.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:21,440 --> 00:01:22,680
|
| 79 |
+
Was it in the top five.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:22,850 --> 00:01:24,160
|
| 83 |
+
So we do some aggregation.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:24,200 --> 00:01:26,610
|
| 87 |
+
And basically that's how you come up with this ranking here.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:26,810 --> 00:01:32,360
|
| 91 |
+
So you can see it resonates was actually quite successful in image that and it had much less parameters
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:32,360 --> 00:01:33,560
|
| 95 |
+
and Fiji.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:33,590 --> 00:01:36,750
|
| 99 |
+
So in the next few chapters we're going to start talking about these models here.
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:38,480 --> 00:01:42,650
|
| 103 |
+
So what's cool about this is that it actually comes with these pre-read models.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:42,650 --> 00:01:47,750
|
| 107 |
+
We can import them into Cara's and actually use and execute them and then we can actually even tweak
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:01:47,750 --> 00:01:48,020
|
| 111 |
+
them.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:01:48,020 --> 00:01:53,690
|
| 115 |
+
What you'll find out later for on models so loading them is quite straightforward and we're going to
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:01:53,690 --> 00:01:54,640
|
| 119 |
+
do that now.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:01:54,980 --> 00:01:55,350
|
| 123 |
+
OK.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:01:55,370 --> 00:02:00,500
|
| 127 |
+
So let's go back to a virtual machine and now no no no
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:04,510 --> 00:02:12,870
|
| 131 |
+
from Kern's and the Casitas.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:12,980 --> 00:02:15,470
|
| 135 |
+
OK so welcome to our virtual machine.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:15,600 --> 00:02:16,970
|
| 139 |
+
Let's open this chapter it is.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:16,980 --> 00:02:22,620
|
| 143 |
+
I put it in the book file fourteen point one and cool.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:22,700 --> 00:02:28,090
|
| 147 |
+
So like I said we're going to use some pre-trained models so to import pre-trained model.
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:28,130 --> 00:02:30,570
|
| 151 |
+
This line basically is quite important.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:30,590 --> 00:02:37,580
|
| 155 |
+
It allows us to actually access the resident fee Brezhnev 50 model and then it's so easy to use as adults.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:37,610 --> 00:02:42,450
|
| 159 |
+
So we just loaded here we specified the weeds we want to pre-trained way it's being winds that were
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:42,480 --> 00:02:48,200
|
| 163 |
+
trained on image nets and we just run this and it takes a little while to load it because as you saw
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:48,200 --> 00:02:53,670
|
| 167 |
+
in the previous slides resonant is a huge model that has a 128 million parameters.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:02:53,780 --> 00:02:58,240
|
| 171 |
+
So this one may take a bit of a while to load but it's finished.
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:02:58,340 --> 00:03:05,110
|
| 175 |
+
And now we can just run the load an image and just bring it into the format that we need and test it.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:03:05,120 --> 00:03:06,660
|
| 179 |
+
So we just go resonate.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:03:06,670 --> 00:03:10,400
|
| 183 |
+
This is what we call a model here resonant underscore model thought predict.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:03:10,400 --> 00:03:11,850
|
| 187 |
+
So let's see what happens.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:03:14,680 --> 00:03:16,120
|
| 191 |
+
Runs again or there we go.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:16,390 --> 00:03:20,470
|
| 195 |
+
So it actually predicted that this image a dog and is actually find this image.
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:20,470 --> 00:03:22,820
|
| 199 |
+
Now let's go back to this directory.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:23,510 --> 00:03:26,300
|
| 203 |
+
The images we used here with a dog.
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:26,720 --> 00:03:28,620
|
| 207 |
+
It was actually my dog Samuel.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:29,050 --> 00:03:30,760
|
| 211 |
+
So it actually got it right.
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:30,760 --> 00:03:32,630
|
| 215 |
+
So let's go back to another book.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:32,740 --> 00:03:37,970
|
| 219 |
+
So it actually said this is actually it actually has the breeds of a dog which is quite cool.
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:38,430 --> 00:03:40,660
|
| 223 |
+
It basically classifies so many classes.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:40,660 --> 00:03:41,620
|
| 227 |
+
Now he is a Labrador.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:03:41,710 --> 00:03:42,790
|
| 231 |
+
And it's not here.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:03:43,010 --> 00:03:48,060
|
| 235 |
+
However I'm guessing this Chesapeake Chesapeake Bay Retriever.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:03:48,100 --> 00:03:48,520
|
| 239 |
+
Sorry.
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:03:48,580 --> 00:03:51,730
|
| 243 |
+
Couldn't can read it properly is very close to how he looks.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:03:51,730 --> 00:03:55,580
|
| 247 |
+
Not even sure what this is actually I should check it out and see what this is.
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:03:55,990 --> 00:03:56,790
|
| 251 |
+
OK I get.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:03:56,850 --> 00:03:58,960
|
| 255 |
+
I can see how he resemble that in that picture.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:03:59,050 --> 00:04:00,090
|
| 259 |
+
That's fine.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:00,730 --> 00:04:03,310
|
| 263 |
+
So now let's run a few test images here.
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:04:03,340 --> 00:04:04,360
|
| 267 |
+
We'll go through this here.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:04:04,360 --> 00:04:05,790
|
| 271 |
+
I actually find this.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:04:05,790 --> 00:04:07,590
|
| 275 |
+
This should not be here.
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:04:08,140 --> 00:04:08,660
|
| 279 |
+
Oh I know.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:04:08,680 --> 00:04:12,730
|
| 283 |
+
So I guess I'll go back to this after so let's run the chutes.
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:04:12,760 --> 00:04:15,640
|
| 287 |
+
Loads of images with open TV and drool over results.
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:04:15,640 --> 00:04:17,140
|
| 291 |
+
Why not test images.
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:04:17,620 --> 00:04:17,860
|
| 295 |
+
OK.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:04:17,860 --> 00:04:20,080
|
| 299 |
+
So this is my wife who lost.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:04:20,170 --> 00:04:21,400
|
| 303 |
+
Actually this is a bikini competition.
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:04:21,400 --> 00:04:25,510
|
| 307 |
+
She did two years ago and basically got it right.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:04:25,870 --> 00:04:27,850
|
| 311 |
+
50 percent probability that it's a bikini.
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:04:27,880 --> 00:04:29,300
|
| 315 |
+
That's actually quite good.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:04:29,580 --> 00:04:31,670
|
| 319 |
+
Let's see the images here.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:04:32,410 --> 00:04:37,180
|
| 323 |
+
This one is a snail and it got it bit of probability of 15 percent which is quite good because everything
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:04:37,180 --> 00:04:40,170
|
| 327 |
+
else here slogan a puffa do resemble a snail.
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:04:40,750 --> 00:04:42,450
|
| 331 |
+
Let's see what else.
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:04:42,460 --> 00:04:43,960
|
| 335 |
+
Back to my Labrador.
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:04:44,580 --> 00:04:45,020
|
| 339 |
+
OK.
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:04:45,100 --> 00:04:46,800
|
| 343 |
+
So we see it works quite well.
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:04:46,840 --> 00:04:53,590
|
| 347 |
+
You can now load your images here into this folder and experiment with images of yourself or anything
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:04:53,590 --> 00:04:56,610
|
| 351 |
+
you want and see how close.
|
| 352 |
+
|
| 353 |
+
89
|
| 354 |
+
00:04:56,620 --> 00:05:00,070
|
| 355 |
+
So let's go back to I buy the book and see what else we can do.
|
| 356 |
+
|
| 357 |
+
90
|
| 358 |
+
00:05:00,070 --> 00:05:04,180
|
| 359 |
+
So how about we compare all three models on the same test images and do that.
|
| 360 |
+
|
| 361 |
+
91
|
| 362 |
+
00:05:04,180 --> 00:05:08,410
|
| 363 |
+
We have to load Viji 16 and Inception vision tree.
|
| 364 |
+
|
| 365 |
+
92
|
| 366 |
+
00:05:08,560 --> 00:05:10,030
|
| 367 |
+
And we do it exactly the same way.
|
| 368 |
+
|
| 369 |
+
93
|
| 370 |
+
00:05:10,060 --> 00:05:15,730
|
| 371 |
+
We just import them from Terriss applications load the weights of a tree into an image net and we just
|
| 372 |
+
|
| 373 |
+
94
|
| 374 |
+
00:05:15,790 --> 00:05:16,360
|
| 375 |
+
give it a name.
|
| 376 |
+
|
| 377 |
+
95
|
| 378 |
+
00:05:16,370 --> 00:05:17,570
|
| 379 |
+
Fuji's is here.
|
| 380 |
+
|
| 381 |
+
96
|
| 382 |
+
00:05:17,590 --> 00:05:18,890
|
| 383 |
+
And Inception is here.
|
| 384 |
+
|
| 385 |
+
97
|
| 386 |
+
00:05:19,180 --> 00:05:26,300
|
| 387 |
+
So let's now load all three models and test them all test images.
|
| 388 |
+
|
| 389 |
+
98
|
| 390 |
+
00:05:26,430 --> 00:05:27,890
|
| 391 |
+
I actually haven't run this good.
|
| 392 |
+
|
| 393 |
+
99
|
| 394 |
+
00:05:27,940 --> 00:05:29,720
|
| 395 |
+
So wondering what was going on there.
|
| 396 |
+
|
| 397 |
+
100
|
| 398 |
+
00:05:31,480 --> 00:05:35,340
|
| 399 |
+
I guess again it takes maybe about 10 to 20 seconds.
|
| 400 |
+
|
| 401 |
+
101
|
| 402 |
+
00:05:35,330 --> 00:05:38,660
|
| 403 |
+
No these models RESIDENT It definitely takes longer than the others.
|
| 404 |
+
|
| 405 |
+
102
|
| 406 |
+
00:05:38,700 --> 00:05:44,600
|
| 407 |
+
And the reason I'm not looking resonant again here is because we looked at it previously here.
|
| 408 |
+
|
| 409 |
+
103
|
| 410 |
+
00:05:44,840 --> 00:05:46,230
|
| 411 |
+
So let's wait till this is done.
|
| 412 |
+
|
| 413 |
+
104
|
| 414 |
+
00:05:46,300 --> 00:05:46,980
|
| 415 |
+
But it's two miles.
|
| 416 |
+
|
| 417 |
+
105
|
| 418 |
+
00:05:46,990 --> 00:05:49,530
|
| 419 |
+
We're doing at the same time now it's going to be a bit long.
|
| 420 |
+
|
| 421 |
+
106
|
| 422 |
+
00:05:54,510 --> 00:05:55,340
|
| 423 |
+
All right there we go.
|
| 424 |
+
|
| 425 |
+
107
|
| 426 |
+
00:05:55,350 --> 00:05:56,310
|
| 427 |
+
It's done.
|
| 428 |
+
|
| 429 |
+
108
|
| 430 |
+
00:05:56,350 --> 00:05:59,350
|
| 431 |
+
So now let's compare all three models and see how they compare.
|
| 432 |
+
|
| 433 |
+
109
|
| 434 |
+
00:06:03,100 --> 00:06:03,470
|
| 435 |
+
OK.
|
| 436 |
+
|
| 437 |
+
110
|
| 438 |
+
00:06:03,520 --> 00:06:03,970
|
| 439 |
+
There we go.
|
| 440 |
+
|
| 441 |
+
111
|
| 442 |
+
00:06:03,970 --> 00:06:06,250
|
| 443 |
+
He says came up treat times like this.
|
| 444 |
+
|
| 445 |
+
112
|
| 446 |
+
00:06:06,250 --> 00:06:08,240
|
| 447 |
+
It's a bit messy.
|
| 448 |
+
|
| 449 |
+
113
|
| 450 |
+
00:06:08,350 --> 00:06:09,710
|
| 451 |
+
But we know what the images are.
|
| 452 |
+
|
| 453 |
+
114
|
| 454 |
+
00:06:09,820 --> 00:06:10,860
|
| 455 |
+
You see them before.
|
| 456 |
+
|
| 457 |
+
115
|
| 458 |
+
00:06:11,170 --> 00:06:13,730
|
| 459 |
+
So let's look at this one if we did YOU 16 predict that.
|
| 460 |
+
|
| 461 |
+
116
|
| 462 |
+
00:06:13,780 --> 00:06:15,510
|
| 463 |
+
And that was definitely correct.
|
| 464 |
+
|
| 465 |
+
117
|
| 466 |
+
00:06:15,520 --> 00:06:16,710
|
| 467 |
+
So did resonate.
|
| 468 |
+
|
| 469 |
+
118
|
| 470 |
+
00:06:16,720 --> 00:06:18,050
|
| 471 |
+
What did inception do.
|
| 472 |
+
|
| 473 |
+
119
|
| 474 |
+
00:06:18,230 --> 00:06:23,200
|
| 475 |
+
Stopwatch ash can and shield I don't see any of those things in this image.
|
| 476 |
+
|
| 477 |
+
120
|
| 478 |
+
00:06:23,200 --> 00:06:24,500
|
| 479 |
+
So definitely resonate.
|
| 480 |
+
|
| 481 |
+
121
|
| 482 |
+
00:06:24,520 --> 00:06:27,570
|
| 483 |
+
I mean inception has some issues with this image.
|
| 484 |
+
|
| 485 |
+
122
|
| 486 |
+
00:06:27,910 --> 00:06:30,160
|
| 487 |
+
So let's see what the others look like.
|
| 488 |
+
|
| 489 |
+
123
|
| 490 |
+
00:06:31,080 --> 00:06:31,560
|
| 491 |
+
OK.
|
| 492 |
+
|
| 493 |
+
124
|
| 494 |
+
00:06:31,630 --> 00:06:32,860
|
| 495 |
+
So this is a snail.
|
| 496 |
+
|
| 497 |
+
125
|
| 498 |
+
00:06:32,940 --> 00:06:35,560
|
| 499 |
+
Is the other image here.
|
| 500 |
+
|
| 501 |
+
126
|
| 502 |
+
00:06:35,560 --> 00:06:35,940
|
| 503 |
+
All right.
|
| 504 |
+
|
| 505 |
+
127
|
| 506 |
+
00:06:35,950 --> 00:06:37,080
|
| 507 |
+
Actually we can look at it here.
|
| 508 |
+
|
| 509 |
+
128
|
| 510 |
+
00:06:37,330 --> 00:06:39,930
|
| 511 |
+
So we see video you said ball.
|
| 512 |
+
|
| 513 |
+
129
|
| 514 |
+
00:06:39,980 --> 00:06:45,500
|
| 515 |
+
And they even show what that is mushroom and coral reef fin off close to a mushroom.
|
| 516 |
+
|
| 517 |
+
130
|
| 518 |
+
00:06:45,500 --> 00:06:49,640
|
| 519 |
+
Not exactly though but I can see something like that looking like a mushroom.
|
| 520 |
+
|
| 521 |
+
131
|
| 522 |
+
00:06:49,840 --> 00:06:51,900
|
| 523 |
+
Inception again and got horrible results.
|
| 524 |
+
|
| 525 |
+
132
|
| 526 |
+
00:06:51,920 --> 00:06:55,300
|
| 527 |
+
Chainlink fence website web sites as well.
|
| 528 |
+
|
| 529 |
+
133
|
| 530 |
+
00:06:55,510 --> 00:06:56,370
|
| 531 |
+
And sunglasses.
|
| 532 |
+
|
| 533 |
+
134
|
| 534 |
+
00:06:56,380 --> 00:06:57,120
|
| 535 |
+
So that's all.
|
| 536 |
+
|
| 537 |
+
135
|
| 538 |
+
00:06:57,130 --> 00:07:01,300
|
| 539 |
+
Definitely very off and Reznick definitely got it right.
|
| 540 |
+
|
| 541 |
+
136
|
| 542 |
+
00:07:01,300 --> 00:07:04,490
|
| 543 |
+
So let's see what the image by Labrada does.
|
| 544 |
+
|
| 545 |
+
137
|
| 546 |
+
00:07:04,550 --> 00:07:07,080
|
| 547 |
+
A video you said German short haired pointer.
|
| 548 |
+
|
| 549 |
+
138
|
| 550 |
+
00:07:07,300 --> 00:07:13,180
|
| 551 |
+
Guessing that's a dog and guessing Jeep's Chesapeake Bay who shiver again came up here.
|
| 552 |
+
|
| 553 |
+
139
|
| 554 |
+
00:07:13,240 --> 00:07:19,510
|
| 555 |
+
Let's see what inception got it's inception definitely has a misuse here and we know what we did you
|
| 556 |
+
|
| 557 |
+
140
|
| 558 |
+
00:07:19,510 --> 00:07:20,910
|
| 559 |
+
got to bring it up.
|
| 560 |
+
|
| 561 |
+
141
|
| 562 |
+
00:07:20,920 --> 00:07:24,660
|
| 563 |
+
Sorry Reznor got and hit her results here.
|
| 564 |
+
|
| 565 |
+
142
|
| 566 |
+
00:07:25,150 --> 00:07:26,080
|
| 567 |
+
So it's pretty cool.
|
| 568 |
+
|
| 569 |
+
143
|
| 570 |
+
00:07:26,090 --> 00:07:30,280
|
| 571 |
+
So that's how we play or we experiment with these pre-trained models.
|
| 572 |
+
|
| 573 |
+
144
|
| 574 |
+
00:07:30,280 --> 00:07:35,680
|
| 575 |
+
They are very much useful in doing a lot of other things later on and you'll find out.
|
14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/3. Understanding VGG16 and VGG19.srt
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,870 --> 00:00:05,850
|
| 3 |
+
And welcome to chapter fourteen point to where we try to understand exactly how Viji 16 and Visagie
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:05,850 --> 00:00:15,580
|
| 7 |
+
19 actually work so Viridian at this mall was unveiled by these guys here in a paper titled very deep
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:15,580 --> 00:00:18,510
|
| 11 |
+
convolutional drone that works for large scale image recognition.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:18,820 --> 00:00:25,480
|
| 15 |
+
And what's famous about Viji is that they use a sequence of only tree by tree convolutional Lia's Troughton's
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:25,480 --> 00:00:26,350
|
| 19 |
+
network.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:26,350 --> 00:00:32,830
|
| 23 |
+
I'll give you a picture of the diagram of the network diagram shortly but it was very clever and very
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:32,830 --> 00:00:34,260
|
| 27 |
+
unique in this design.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:34,510 --> 00:00:36,010
|
| 31 |
+
And it was actually pretty deep.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:36,010 --> 00:00:44,230
|
| 35 |
+
By 2014 standards of being 16 and 19 layers hence the names Fiji 16 and 19 and due to dept and a number
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:44,230 --> 00:00:45,290
|
| 39 |
+
of parameters.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:45,430 --> 00:00:49,130
|
| 43 |
+
Villages takes very very long time to train.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:49,130 --> 00:00:54,360
|
| 47 |
+
I think these researchers took I think it was two weeks using two GP used to train Viji.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:54,700 --> 00:01:00,850
|
| 51 |
+
So it's not something you can actually train on your home system even today unless we're using GP use.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:01:01,120 --> 00:01:04,780
|
| 55 |
+
So this is basically the condensed diagram for the Jiji.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:04,870 --> 00:01:09,290
|
| 59 |
+
These are the different visions the ELR and B C and stuff.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:09,310 --> 00:01:17,200
|
| 63 |
+
It was BTD 19 and I think C or D are different flavors of Fiji 16 so you can see the sequence of convolutional
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:17,200 --> 00:01:18,150
|
| 67 |
+
levels here.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:18,310 --> 00:01:19,670
|
| 71 |
+
Sixty four letters each.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:19,660 --> 00:01:29,080
|
| 75 |
+
Then it keeps getting more and more filter's 128 256 512 and basically Max Boot all at the end and then
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:29,080 --> 00:01:31,930
|
| 79 |
+
we have all these fully connected lives at the end here.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:31,990 --> 00:01:34,000
|
| 83 |
+
So it's a relatively simple architecture.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:34,000 --> 00:01:37,790
|
| 87 |
+
Pretty sure by now you can actually build this on your own.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:37,960 --> 00:01:43,270
|
| 91 |
+
Just might be a lot of layers to add but you can build this on your own in Chris.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:43,300 --> 00:01:45,130
|
| 95 |
+
So now let's take a look at it Reznick to 50.
|
14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/4. Understanding ResNet50.srt
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:01,770 --> 00:00:08,100
|
| 3 |
+
Hi and welcome to chapter fourteen point three where we talk about resonate 50 arrests 50 was the winner
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:08,100 --> 00:00:10,990
|
| 7 |
+
of the Eilis our AVC competition in 2015.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:11,190 --> 00:00:16,710
|
| 11 |
+
It was developed by district researchers here and present resin that stands for residual network.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:16,860 --> 00:00:21,770
|
| 15 |
+
And that's because it uses the concept of residual linning which helps it in a number of ways.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:21,780 --> 00:00:23,740
|
| 19 |
+
And I'll talk about those ways now.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:24,210 --> 00:00:30,690
|
| 23 |
+
So the beauty of Reznor was that it got around a problem of just making instead of making networks deeper
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:30,690 --> 00:00:33,620
|
| 27 |
+
and deeper which was the trend back in 2015.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:33,660 --> 00:00:39,120
|
| 31 |
+
And by doing that by making it deeper and deeper People were coming up with coming across accuracy becoming
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:39,120 --> 00:00:43,270
|
| 35 |
+
saturated and integrating rapidly during training which was not good.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:43,410 --> 00:00:50,010
|
| 39 |
+
So what resonated was that it introduced a shallow architecture with a deep deep residual learning framework
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:50,730 --> 00:00:55,480
|
| 43 |
+
and resonate instead of leaving high mid and low mid and high level features.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:55,660 --> 00:01:02,580
|
| 47 |
+
It lends residuals by using short cut connections directly connecting the input of the player to an
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:01:02,670 --> 00:01:02,950
|
| 51 |
+
end.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:01:02,970 --> 00:01:04,070
|
| 55 |
+
Plus ex-slave.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:04,380 --> 00:01:10,490
|
| 59 |
+
So this results in far easier training and resolves to degrading accuracy problem.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:10,620 --> 00:01:14,730
|
| 63 |
+
This is what the resonant residual module looks like here.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:15,000 --> 00:01:15,960
|
| 67 |
+
Can Take a look.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:16,050 --> 00:01:17,610
|
| 71 |
+
Basically this is a x one layer.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:17,610 --> 00:01:22,370
|
| 75 |
+
Here goes through this all these layers here and added back to the experts.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:22,380 --> 00:01:25,260
|
| 79 |
+
One layer here.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:25,600 --> 00:01:27,850
|
| 83 |
+
So now let's move on to Inception vision tree.
|
14. Advanced Image Classiers - ImageNet in Keras (VGG1619, InceptionV3, ResNet50)/5. Understanding InceptionV3.srt
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,600 --> 00:00:06,320
|
| 3 |
+
I will come to chapter fourteen point four where we take a look at inception or the inception models.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,600 --> 00:00:13,130
|
| 7 |
+
And if you if you're familiar with inception in movie you may be familiar with the inception model concept.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:13,230 --> 00:00:18,360
|
| 11 |
+
So the inception architecture was introduced by Sid Geddie pronounce it right.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:18,510 --> 00:00:25,140
|
| 15 |
+
Hopefully in 2014 with his paper titled Going deeper with convolutions and in the inception model or
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:25,140 --> 00:00:28,470
|
| 19 |
+
at least version one of it was the winner of the aisles.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:28,560 --> 00:00:33,390
|
| 23 |
+
I'll ask VRC Toodles 2014 competition and it was implemented.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:33,390 --> 00:00:36,390
|
| 27 |
+
The model is actually implemented by Google Lynette's.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:36,420 --> 00:00:38,920
|
| 31 |
+
So that was a Google research group team.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:39,210 --> 00:00:43,680
|
| 35 |
+
And since then further improvements have been made that we have inception version to entry.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:43,950 --> 00:00:46,890
|
| 39 |
+
So now let's move on to what makes the inception world special.
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:47,040 --> 00:00:54,150
|
| 43 |
+
OK so the beauty of the Inception model is that it is a concatenation of multiple filter sizes.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:54,270 --> 00:01:01,110
|
| 47 |
+
So instead of having one filter here we actually have four filters in a series or parallel.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:01:01,130 --> 00:01:04,200
|
| 51 |
+
I should say between here usually is.
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:01:04,230 --> 00:01:07,350
|
| 55 |
+
So that's a unique architecture that we haven't seen before.
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:07,350 --> 00:01:12,760
|
| 59 |
+
Before we had totally sequential models I would just like one filter feeds into our next filter.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:12,840 --> 00:01:15,070
|
| 63 |
+
Well actually at least one leaf is intellectually.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:15,420 --> 00:01:21,630
|
| 67 |
+
However now we have these four of these tree convolutions here to combine with a max pulling at the
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:21,630 --> 00:01:22,400
|
| 71 |
+
end here.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:22,830 --> 00:01:29,190
|
| 75 |
+
And what this means here is that number of parameters in the Indian ception model was much less.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:29,250 --> 00:01:31,160
|
| 79 |
+
Viji 16 and 19.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:31,380 --> 00:01:34,450
|
| 83 |
+
Which means that it was much quicker and faster to train.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:34,890 --> 00:01:37,270
|
| 87 |
+
This is how the architecture actually looks.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:37,290 --> 00:01:43,290
|
| 91 |
+
These basically parallel layers here are the concatenated multiples of the sizes that we've seen here
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:43,290 --> 00:01:43,930
|
| 95 |
+
before.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:44,460 --> 00:01:50,780
|
| 99 |
+
And basically inception just a series of sequential sequence of these parallel filters over and over
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:50,790 --> 00:01:56,520
|
| 103 |
+
here you can take a look at the paper here and it's quite good to read these papers because you get
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:01:56,580 --> 00:02:01,800
|
| 107 |
+
very familiar with advance CNN concepts.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:02:02,040 --> 00:02:04,920
|
| 111 |
+
So that concludes these chapters on these models.
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:02:04,920 --> 00:02:06,480
|
| 115 |
+
I hope you find them informative.
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:06,480 --> 00:02:12,570
|
| 119 |
+
They were very brief overview and even soft undescended bunch of models in that detail.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:12,810 --> 00:02:17,930
|
| 123 |
+
I used to know inception and Viji quite well honestly don't know that much about resonate.
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:17,970 --> 00:02:19,340
|
| 127 |
+
However I will read the paper.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:19,380 --> 00:02:24,660
|
| 131 |
+
So if you have any questions about resonance and the rest of these models feel free to axe me and of
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:24,660 --> 00:02:25,770
|
| 135 |
+
course comments.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:25,930 --> 00:02:26,340
|
| 139 |
+
Thank you.
|
14. ImageNet and Pretrained Models VGG16_ResNet50_InceptionV3/14.1 Experimenting with pre-trained Models in Keras.ipynb
ADDED
|
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"### Let's start with loading ResNet50 "
|
| 8 |
+
]
|
| 9 |
+
},
|
| 10 |
+
{
|
| 11 |
+
"cell_type": "code",
|
| 12 |
+
"execution_count": 1,
|
| 13 |
+
"metadata": {},
|
| 14 |
+
"outputs": [],
|
| 15 |
+
"source": [
|
| 16 |
+
"from tensorflow.keras.applications.resnet50 import ResNet50\n",
|
| 17 |
+
"from tensorflow.keras.preprocessing import image\n",
|
| 18 |
+
"from tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions\n",
|
| 19 |
+
"import numpy as np\n",
|
| 20 |
+
"\n",
|
| 21 |
+
"resnet_model = ResNet50(weights='imagenet')"
|
| 22 |
+
]
|
| 23 |
+
},
|
| 24 |
+
{
|
| 25 |
+
"cell_type": "code",
|
| 26 |
+
"execution_count": 2,
|
| 27 |
+
"metadata": {},
|
| 28 |
+
"outputs": [
|
| 29 |
+
{
|
| 30 |
+
"name": "stdout",
|
| 31 |
+
"output_type": "stream",
|
| 32 |
+
"text": [
|
| 33 |
+
"Predicted: [('n02100583', 'vizsla', 0.5282586), ('n02092339', 'Weimaraner', 0.32402116), ('n02099849', 'Chesapeake_Bay_retriever', 0.07540441)]\n"
|
| 34 |
+
]
|
| 35 |
+
}
|
| 36 |
+
],
|
| 37 |
+
"source": [
|
| 38 |
+
"from tensorflow.keras.preprocessing import image\n",
|
| 39 |
+
"\n",
|
| 40 |
+
"img_path = './images/dog.jpg' \n",
|
| 41 |
+
"\n",
|
| 42 |
+
"img = image.load_img(img_path, target_size=(224, 224))\n",
|
| 43 |
+
"x = image.img_to_array(img)\n",
|
| 44 |
+
"x = np.expand_dims(x, axis=0)\n",
|
| 45 |
+
"x = preprocess_input(x)\n",
|
| 46 |
+
"\n",
|
| 47 |
+
"preds = resnet_model.predict(x)\n",
|
| 48 |
+
"# decode the results into a list of tuples (class, description, probability)\n",
|
| 49 |
+
"# (one such list for each sample in the batch)\n",
|
| 50 |
+
"print('Predicted:', decode_predictions(preds, top=3)[0])"
|
| 51 |
+
]
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"cell_type": "markdown",
|
| 55 |
+
"metadata": {},
|
| 56 |
+
"source": [
|
| 57 |
+
"### Let's run through a few test images"
|
| 58 |
+
]
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"cell_type": "code",
|
| 62 |
+
"execution_count": 5,
|
| 63 |
+
"metadata": {},
|
| 64 |
+
"outputs": [],
|
| 65 |
+
"source": [
|
| 66 |
+
"import cv2\n",
|
| 67 |
+
"from os import listdir\n",
|
| 68 |
+
"from os.path import isfile, join\n",
|
| 69 |
+
"\n",
|
| 70 |
+
"# Our openCV function that displays the image and it's predicted labels \n",
|
| 71 |
+
"def draw_test(name, preditions, input_im):\n",
|
| 72 |
+
" \"\"\"Function displays the output of the prediction alongside the orignal image\"\"\"\n",
|
| 73 |
+
" BLACK = [0,0,0]\n",
|
| 74 |
+
" expanded_image = cv2.copyMakeBorder(input_im, 0, 0, 0, imageL.shape[1]+300 ,cv2.BORDER_CONSTANT,value=BLACK)\n",
|
| 75 |
+
" img_width = input_im.shape[1]\n",
|
| 76 |
+
" for (i,predition) in enumerate(preditions):\n",
|
| 77 |
+
" string = str(predition[1]) + \" \" + str(predition[2])\n",
|
| 78 |
+
" cv2.putText(expanded_image,str(name),(img_width + 50,50),cv2.FONT_HERSHEY_COMPLEX_SMALL,2,(0,0,255),1)\n",
|
| 79 |
+
" cv2.putText(expanded_image,string,(img_width + 50,50+((i+1)*50)),cv2.FONT_HERSHEY_COMPLEX_SMALL,2,(0,255,0),1)\n",
|
| 80 |
+
" cv2.imshow(name, expanded_image)\n",
|
| 81 |
+
"\n",
|
| 82 |
+
"# Get images located in ./images folder \n",
|
| 83 |
+
"mypath = \"./images/\"\n",
|
| 84 |
+
"file_names = [f for f in listdir(mypath) if isfile(join(mypath, f))]\n",
|
| 85 |
+
"\n",
|
| 86 |
+
"# Loop through images run them through our classifer\n",
|
| 87 |
+
"for file in file_names:\n",
|
| 88 |
+
"\n",
|
| 89 |
+
" from tensorflow.keras.preprocessing import image # Need to reload as opencv2 seems to have a conflict\n",
|
| 90 |
+
" img = image.load_img(mypath+file, target_size=(224, 224))\n",
|
| 91 |
+
" x = image.img_to_array(img)\n",
|
| 92 |
+
" x = np.expand_dims(x, axis=0)\n",
|
| 93 |
+
" x = preprocess_input(x)\n",
|
| 94 |
+
" \n",
|
| 95 |
+
" #load image using opencv\n",
|
| 96 |
+
" img2 = cv2.imread(mypath+file)\n",
|
| 97 |
+
" imageL = cv2.resize(img2, None, fx=.5, fy=.5, interpolation = cv2.INTER_CUBIC) \n",
|
| 98 |
+
" \n",
|
| 99 |
+
" # Get Predictions\n",
|
| 100 |
+
" preds = resnet_model.predict(x)\n",
|
| 101 |
+
" preditions = decode_predictions(preds, top=3)[0]\n",
|
| 102 |
+
" draw_test(\"Predictions\", preditions, imageL) \n",
|
| 103 |
+
" cv2.waitKey(0)\n",
|
| 104 |
+
"\n",
|
| 105 |
+
"cv2.destroyAllWindows()"
|
| 106 |
+
]
|
| 107 |
+
},
|
| 108 |
+
{
|
| 109 |
+
"cell_type": "markdown",
|
| 110 |
+
"metadata": {},
|
| 111 |
+
"source": [
|
| 112 |
+
"### Let's now load VGG16 and InceptionV3"
|
| 113 |
+
]
|
| 114 |
+
},
|
| 115 |
+
{
|
| 116 |
+
"cell_type": "code",
|
| 117 |
+
"execution_count": 6,
|
| 118 |
+
"metadata": {},
|
| 119 |
+
"outputs": [],
|
| 120 |
+
"source": [
|
| 121 |
+
"import tensorflow as tf\n",
|
| 122 |
+
"import numpy as np\n",
|
| 123 |
+
"from tensorflow.keras.applications import vgg16, inception_v3, resnet50\n",
|
| 124 |
+
" \n",
|
| 125 |
+
"#Loads the VGG16 model\n",
|
| 126 |
+
"vgg_model = vgg16.VGG16(weights='imagenet')\n",
|
| 127 |
+
" \n",
|
| 128 |
+
"# Loads the Inception_V3 model\n",
|
| 129 |
+
"inception_model = inception_v3.InceptionV3(weights='imagenet')\n",
|
| 130 |
+
" \n",
|
| 131 |
+
"# Loads the ResNet50 model \n",
|
| 132 |
+
"# uncomment the line below if you didn't load resnet50 beforehand\n",
|
| 133 |
+
"#resnet_model = resnet50.ResNet50(weights='imagenet')"
|
| 134 |
+
]
|
| 135 |
+
},
|
| 136 |
+
{
|
| 137 |
+
"cell_type": "markdown",
|
| 138 |
+
"metadata": {},
|
| 139 |
+
"source": [
|
| 140 |
+
"### Compare all 3 Models with the same test images"
|
| 141 |
+
]
|
| 142 |
+
},
|
| 143 |
+
{
|
| 144 |
+
"cell_type": "code",
|
| 145 |
+
"execution_count": 24,
|
| 146 |
+
"metadata": {},
|
| 147 |
+
"outputs": [],
|
| 148 |
+
"source": [
|
| 149 |
+
"def getImage(path, dim=224, inception = False):\n",
|
| 150 |
+
" img = image.load_img(path, target_size=(dim, dim))\n",
|
| 151 |
+
" x = image.img_to_array(img)\n",
|
| 152 |
+
" x = np.expand_dims(x, axis=0)\n",
|
| 153 |
+
" if inception:\n",
|
| 154 |
+
" x /= 255.\n",
|
| 155 |
+
" x -= 0.5\n",
|
| 156 |
+
" x *= 2.\n",
|
| 157 |
+
" else:\n",
|
| 158 |
+
" x = preprocess_input(x)\n",
|
| 159 |
+
" return x"
|
| 160 |
+
]
|
| 161 |
+
},
|
| 162 |
+
{
|
| 163 |
+
"cell_type": "code",
|
| 164 |
+
"execution_count": 25,
|
| 165 |
+
"metadata": {},
|
| 166 |
+
"outputs": [],
|
| 167 |
+
"source": [
|
| 168 |
+
"# Get images located in ./images folder \n",
|
| 169 |
+
"mypath = \"./images/\"\n",
|
| 170 |
+
"file_names = [f for f in listdir(mypath) if isfile(join(mypath, f))]\n",
|
| 171 |
+
"\n",
|
| 172 |
+
"# Loop through images run them through our classifer\n",
|
| 173 |
+
"for file in file_names:\n",
|
| 174 |
+
"\n",
|
| 175 |
+
" from tensorflow.keras.preprocessing import image # Need to reload as opencv2 seems to have a conflict\n",
|
| 176 |
+
" #img = image.load_img(mypath+file, target_size=(dim, dim))\n",
|
| 177 |
+
" x = getImage(mypath+file, 229)\n",
|
| 178 |
+
" #load image using opencv\n",
|
| 179 |
+
" img2 = cv2.imread(mypath+file)\n",
|
| 180 |
+
" imageL = cv2.resize(img2, None, fx=.5, fy=.5, interpolation = cv2.INTER_CUBIC) \n",
|
| 181 |
+
" \n",
|
| 182 |
+
" # Get VGG16 Predictions\n",
|
| 183 |
+
" x = getImage(mypath+file, 224)\n",
|
| 184 |
+
" preds_vgg_model = vgg_model.predict(x)\n",
|
| 185 |
+
" preditions_vgg = decode_predictions(preds_vgg_model, top=3)[0]\n",
|
| 186 |
+
" draw_test(\"VGG16 Predictions\", preditions_vgg, imageL) \n",
|
| 187 |
+
" \n",
|
| 188 |
+
" # Get Inception_V3 Predictions\n",
|
| 189 |
+
" x = getImage(mypath+file, 299, inception = True)\n",
|
| 190 |
+
" preds_inception = inception_model.predict(x)\n",
|
| 191 |
+
" preditions_inception = decode_predictions(preds_inception, top=3)[0]\n",
|
| 192 |
+
" draw_test(\"Inception_V3 Predictions\", preditions_inception, imageL) \n",
|
| 193 |
+
"\n",
|
| 194 |
+
" # Get ResNet50 Predictions\n",
|
| 195 |
+
" x = getImage(mypath+file, 224)\n",
|
| 196 |
+
" preds_resnet = resnet_model.predict(x)\n",
|
| 197 |
+
" preditions_resnet = decode_predictions(preds_resnet, top=3)[0]\n",
|
| 198 |
+
" draw_test(\"ResNet50 Predictions\", preditions_resnet, imageL) \n",
|
| 199 |
+
" \n",
|
| 200 |
+
" cv2.waitKey(0)\n",
|
| 201 |
+
"\n",
|
| 202 |
+
"cv2.destroyAllWindows()\n"
|
| 203 |
+
]
|
| 204 |
+
}
|
| 205 |
+
],
|
| 206 |
+
"metadata": {
|
| 207 |
+
"kernelspec": {
|
| 208 |
+
"display_name": "Python 3",
|
| 209 |
+
"language": "python",
|
| 210 |
+
"name": "python3"
|
| 211 |
+
},
|
| 212 |
+
"language_info": {
|
| 213 |
+
"codemirror_mode": {
|
| 214 |
+
"name": "ipython",
|
| 215 |
+
"version": 3
|
| 216 |
+
},
|
| 217 |
+
"file_extension": ".py",
|
| 218 |
+
"mimetype": "text/x-python",
|
| 219 |
+
"name": "python",
|
| 220 |
+
"nbconvert_exporter": "python",
|
| 221 |
+
"pygments_lexer": "ipython3",
|
| 222 |
+
"version": "3.7.4"
|
| 223 |
+
}
|
| 224 |
+
},
|
| 225 |
+
"nbformat": 4,
|
| 226 |
+
"nbformat_minor": 2
|
| 227 |
+
}
|
15. Transfer Learning & Fine Tuning/15.2 Using MobileNet to make a Monkey Breed Classifier.ipynb
ADDED
|
@@ -0,0 +1,657 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"# Using MobileNet for our Monkey Classifer\n",
|
| 8 |
+
"\n",
|
| 9 |
+
"### Loading the MobileNet Model"
|
| 10 |
+
]
|
| 11 |
+
},
|
| 12 |
+
{
|
| 13 |
+
"cell_type": "markdown",
|
| 14 |
+
"metadata": {},
|
| 15 |
+
"source": [
|
| 16 |
+
"Freeze all layers except the top 4, as we'll only be training the top 4"
|
| 17 |
+
]
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"cell_type": "code",
|
| 21 |
+
"execution_count": 1,
|
| 22 |
+
"metadata": {},
|
| 23 |
+
"outputs": [
|
| 24 |
+
{
|
| 25 |
+
"name": "stdout",
|
| 26 |
+
"output_type": "stream",
|
| 27 |
+
"text": [
|
| 28 |
+
"Downloading data from https://github.com/fchollet/deep-learning-models/releases/download/v0.6/mobilenet_1_0_224_tf_no_top.h5\n",
|
| 29 |
+
"17227776/17225924 [==============================] - 14s 1us/step\n",
|
| 30 |
+
"0 InputLayer False\n",
|
| 31 |
+
"1 ZeroPadding2D False\n",
|
| 32 |
+
"2 Conv2D False\n",
|
| 33 |
+
"3 BatchNormalization False\n",
|
| 34 |
+
"4 ReLU False\n",
|
| 35 |
+
"5 DepthwiseConv2D False\n",
|
| 36 |
+
"6 BatchNormalization False\n",
|
| 37 |
+
"7 ReLU False\n",
|
| 38 |
+
"8 Conv2D False\n",
|
| 39 |
+
"9 BatchNormalization False\n",
|
| 40 |
+
"10 ReLU False\n",
|
| 41 |
+
"11 ZeroPadding2D False\n",
|
| 42 |
+
"12 DepthwiseConv2D False\n",
|
| 43 |
+
"13 BatchNormalization False\n",
|
| 44 |
+
"14 ReLU False\n",
|
| 45 |
+
"15 Conv2D False\n",
|
| 46 |
+
"16 BatchNormalization False\n",
|
| 47 |
+
"17 ReLU False\n",
|
| 48 |
+
"18 DepthwiseConv2D False\n",
|
| 49 |
+
"19 BatchNormalization False\n",
|
| 50 |
+
"20 ReLU False\n",
|
| 51 |
+
"21 Conv2D False\n",
|
| 52 |
+
"22 BatchNormalization False\n",
|
| 53 |
+
"23 ReLU False\n",
|
| 54 |
+
"24 ZeroPadding2D False\n",
|
| 55 |
+
"25 DepthwiseConv2D False\n",
|
| 56 |
+
"26 BatchNormalization False\n",
|
| 57 |
+
"27 ReLU False\n",
|
| 58 |
+
"28 Conv2D False\n",
|
| 59 |
+
"29 BatchNormalization False\n",
|
| 60 |
+
"30 ReLU False\n",
|
| 61 |
+
"31 DepthwiseConv2D False\n",
|
| 62 |
+
"32 BatchNormalization False\n",
|
| 63 |
+
"33 ReLU False\n",
|
| 64 |
+
"34 Conv2D False\n",
|
| 65 |
+
"35 BatchNormalization False\n",
|
| 66 |
+
"36 ReLU False\n",
|
| 67 |
+
"37 ZeroPadding2D False\n",
|
| 68 |
+
"38 DepthwiseConv2D False\n",
|
| 69 |
+
"39 BatchNormalization False\n",
|
| 70 |
+
"40 ReLU False\n",
|
| 71 |
+
"41 Conv2D False\n",
|
| 72 |
+
"42 BatchNormalization False\n",
|
| 73 |
+
"43 ReLU False\n",
|
| 74 |
+
"44 DepthwiseConv2D False\n",
|
| 75 |
+
"45 BatchNormalization False\n",
|
| 76 |
+
"46 ReLU False\n",
|
| 77 |
+
"47 Conv2D False\n",
|
| 78 |
+
"48 BatchNormalization False\n",
|
| 79 |
+
"49 ReLU False\n",
|
| 80 |
+
"50 DepthwiseConv2D False\n",
|
| 81 |
+
"51 BatchNormalization False\n",
|
| 82 |
+
"52 ReLU False\n",
|
| 83 |
+
"53 Conv2D False\n",
|
| 84 |
+
"54 BatchNormalization False\n",
|
| 85 |
+
"55 ReLU False\n",
|
| 86 |
+
"56 DepthwiseConv2D False\n",
|
| 87 |
+
"57 BatchNormalization False\n",
|
| 88 |
+
"58 ReLU False\n",
|
| 89 |
+
"59 Conv2D False\n",
|
| 90 |
+
"60 BatchNormalization False\n",
|
| 91 |
+
"61 ReLU False\n",
|
| 92 |
+
"62 DepthwiseConv2D False\n",
|
| 93 |
+
"63 BatchNormalization False\n",
|
| 94 |
+
"64 ReLU False\n",
|
| 95 |
+
"65 Conv2D False\n",
|
| 96 |
+
"66 BatchNormalization False\n",
|
| 97 |
+
"67 ReLU False\n",
|
| 98 |
+
"68 DepthwiseConv2D False\n",
|
| 99 |
+
"69 BatchNormalization False\n",
|
| 100 |
+
"70 ReLU False\n",
|
| 101 |
+
"71 Conv2D False\n",
|
| 102 |
+
"72 BatchNormalization False\n",
|
| 103 |
+
"73 ReLU False\n",
|
| 104 |
+
"74 ZeroPadding2D False\n",
|
| 105 |
+
"75 DepthwiseConv2D False\n",
|
| 106 |
+
"76 BatchNormalization False\n",
|
| 107 |
+
"77 ReLU False\n",
|
| 108 |
+
"78 Conv2D False\n",
|
| 109 |
+
"79 BatchNormalization False\n",
|
| 110 |
+
"80 ReLU False\n",
|
| 111 |
+
"81 DepthwiseConv2D False\n",
|
| 112 |
+
"82 BatchNormalization False\n",
|
| 113 |
+
"83 ReLU False\n",
|
| 114 |
+
"84 Conv2D False\n",
|
| 115 |
+
"85 BatchNormalization False\n",
|
| 116 |
+
"86 ReLU False\n"
|
| 117 |
+
]
|
| 118 |
+
}
|
| 119 |
+
],
|
| 120 |
+
"source": [
|
| 121 |
+
"from tensorflow.keras.applications import MobileNet\n",
|
| 122 |
+
"\n",
|
| 123 |
+
"# MobileNet was designed to work on 224 x 224 pixel input images sizes\n",
|
| 124 |
+
"img_rows, img_cols = 224, 224 \n",
|
| 125 |
+
"\n",
|
| 126 |
+
"# Re-loads the MobileNet model without the top or FC layers\n",
|
| 127 |
+
"MobileNet = MobileNet(weights = 'imagenet', \n",
|
| 128 |
+
" include_top = False, \n",
|
| 129 |
+
" input_shape = (img_rows, img_cols, 3))\n",
|
| 130 |
+
"\n",
|
| 131 |
+
"# Here we freeze the last 4 layers \n",
|
| 132 |
+
"# Layers are set to trainable as True by default\n",
|
| 133 |
+
"for layer in MobileNet.layers:\n",
|
| 134 |
+
" layer.trainable = False\n",
|
| 135 |
+
" \n",
|
| 136 |
+
"# Let's print our layers \n",
|
| 137 |
+
"for (i,layer) in enumerate(MobileNet.layers):\n",
|
| 138 |
+
" print(str(i) + \" \"+ layer.__class__.__name__, layer.trainable)"
|
| 139 |
+
]
|
| 140 |
+
},
|
| 141 |
+
{
|
| 142 |
+
"cell_type": "markdown",
|
| 143 |
+
"metadata": {},
|
| 144 |
+
"source": [
|
| 145 |
+
"### Let's make a function that returns our FC Head"
|
| 146 |
+
]
|
| 147 |
+
},
|
| 148 |
+
{
|
| 149 |
+
"cell_type": "code",
|
| 150 |
+
"execution_count": 2,
|
| 151 |
+
"metadata": {},
|
| 152 |
+
"outputs": [],
|
| 153 |
+
"source": [
|
| 154 |
+
"def addTopModelMobileNet(bottom_model, num_classes):\n",
|
| 155 |
+
" \"\"\"creates the top or head of the model that will be \n",
|
| 156 |
+
" placed ontop of the bottom layers\"\"\"\n",
|
| 157 |
+
"\n",
|
| 158 |
+
" top_model = bottom_model.output\n",
|
| 159 |
+
" top_model = GlobalAveragePooling2D()(top_model)\n",
|
| 160 |
+
" top_model = Dense(1024,activation='relu')(top_model)\n",
|
| 161 |
+
" top_model = Dense(1024,activation='relu')(top_model)\n",
|
| 162 |
+
" top_model = Dense(512,activation='relu')(top_model)\n",
|
| 163 |
+
" top_model = Dense(num_classes,activation='softmax')(top_model)\n",
|
| 164 |
+
" return top_model"
|
| 165 |
+
]
|
| 166 |
+
},
|
| 167 |
+
{
|
| 168 |
+
"cell_type": "markdown",
|
| 169 |
+
"metadata": {},
|
| 170 |
+
"source": [
|
| 171 |
+
"### Let's add our FC Head back onto MobileNet"
|
| 172 |
+
]
|
| 173 |
+
},
|
| 174 |
+
{
|
| 175 |
+
"cell_type": "code",
|
| 176 |
+
"execution_count": 5,
|
| 177 |
+
"metadata": {
|
| 178 |
+
"scrolled": true
|
| 179 |
+
},
|
| 180 |
+
"outputs": [
|
| 181 |
+
{
|
| 182 |
+
"name": "stdout",
|
| 183 |
+
"output_type": "stream",
|
| 184 |
+
"text": [
|
| 185 |
+
"Model: \"model_1\"\n",
|
| 186 |
+
"_________________________________________________________________\n",
|
| 187 |
+
"Layer (type) Output Shape Param # \n",
|
| 188 |
+
"=================================================================\n",
|
| 189 |
+
"input_1 (InputLayer) [(None, 224, 224, 3)] 0 \n",
|
| 190 |
+
"_________________________________________________________________\n",
|
| 191 |
+
"conv1_pad (ZeroPadding2D) (None, 225, 225, 3) 0 \n",
|
| 192 |
+
"_________________________________________________________________\n",
|
| 193 |
+
"conv1 (Conv2D) (None, 112, 112, 32) 864 \n",
|
| 194 |
+
"_________________________________________________________________\n",
|
| 195 |
+
"conv1_bn (BatchNormalization (None, 112, 112, 32) 128 \n",
|
| 196 |
+
"_________________________________________________________________\n",
|
| 197 |
+
"conv1_relu (ReLU) (None, 112, 112, 32) 0 \n",
|
| 198 |
+
"_________________________________________________________________\n",
|
| 199 |
+
"conv_dw_1 (DepthwiseConv2D) (None, 112, 112, 32) 288 \n",
|
| 200 |
+
"_________________________________________________________________\n",
|
| 201 |
+
"conv_dw_1_bn (BatchNormaliza (None, 112, 112, 32) 128 \n",
|
| 202 |
+
"_________________________________________________________________\n",
|
| 203 |
+
"conv_dw_1_relu (ReLU) (None, 112, 112, 32) 0 \n",
|
| 204 |
+
"_________________________________________________________________\n",
|
| 205 |
+
"conv_pw_1 (Conv2D) (None, 112, 112, 64) 2048 \n",
|
| 206 |
+
"_________________________________________________________________\n",
|
| 207 |
+
"conv_pw_1_bn (BatchNormaliza (None, 112, 112, 64) 256 \n",
|
| 208 |
+
"_________________________________________________________________\n",
|
| 209 |
+
"conv_pw_1_relu (ReLU) (None, 112, 112, 64) 0 \n",
|
| 210 |
+
"_________________________________________________________________\n",
|
| 211 |
+
"conv_pad_2 (ZeroPadding2D) (None, 113, 113, 64) 0 \n",
|
| 212 |
+
"_________________________________________________________________\n",
|
| 213 |
+
"conv_dw_2 (DepthwiseConv2D) (None, 56, 56, 64) 576 \n",
|
| 214 |
+
"_________________________________________________________________\n",
|
| 215 |
+
"conv_dw_2_bn (BatchNormaliza (None, 56, 56, 64) 256 \n",
|
| 216 |
+
"_________________________________________________________________\n",
|
| 217 |
+
"conv_dw_2_relu (ReLU) (None, 56, 56, 64) 0 \n",
|
| 218 |
+
"_________________________________________________________________\n",
|
| 219 |
+
"conv_pw_2 (Conv2D) (None, 56, 56, 128) 8192 \n",
|
| 220 |
+
"_________________________________________________________________\n",
|
| 221 |
+
"conv_pw_2_bn (BatchNormaliza (None, 56, 56, 128) 512 \n",
|
| 222 |
+
"_________________________________________________________________\n",
|
| 223 |
+
"conv_pw_2_relu (ReLU) (None, 56, 56, 128) 0 \n",
|
| 224 |
+
"_________________________________________________________________\n",
|
| 225 |
+
"conv_dw_3 (DepthwiseConv2D) (None, 56, 56, 128) 1152 \n",
|
| 226 |
+
"_________________________________________________________________\n",
|
| 227 |
+
"conv_dw_3_bn (BatchNormaliza (None, 56, 56, 128) 512 \n",
|
| 228 |
+
"_________________________________________________________________\n",
|
| 229 |
+
"conv_dw_3_relu (ReLU) (None, 56, 56, 128) 0 \n",
|
| 230 |
+
"_________________________________________________________________\n",
|
| 231 |
+
"conv_pw_3 (Conv2D) (None, 56, 56, 128) 16384 \n",
|
| 232 |
+
"_________________________________________________________________\n",
|
| 233 |
+
"conv_pw_3_bn (BatchNormaliza (None, 56, 56, 128) 512 \n",
|
| 234 |
+
"_________________________________________________________________\n",
|
| 235 |
+
"conv_pw_3_relu (ReLU) (None, 56, 56, 128) 0 \n",
|
| 236 |
+
"_________________________________________________________________\n",
|
| 237 |
+
"conv_pad_4 (ZeroPadding2D) (None, 57, 57, 128) 0 \n",
|
| 238 |
+
"_________________________________________________________________\n",
|
| 239 |
+
"conv_dw_4 (DepthwiseConv2D) (None, 28, 28, 128) 1152 \n",
|
| 240 |
+
"_________________________________________________________________\n",
|
| 241 |
+
"conv_dw_4_bn (BatchNormaliza (None, 28, 28, 128) 512 \n",
|
| 242 |
+
"_________________________________________________________________\n",
|
| 243 |
+
"conv_dw_4_relu (ReLU) (None, 28, 28, 128) 0 \n",
|
| 244 |
+
"_________________________________________________________________\n",
|
| 245 |
+
"conv_pw_4 (Conv2D) (None, 28, 28, 256) 32768 \n",
|
| 246 |
+
"_________________________________________________________________\n",
|
| 247 |
+
"conv_pw_4_bn (BatchNormaliza (None, 28, 28, 256) 1024 \n",
|
| 248 |
+
"_________________________________________________________________\n",
|
| 249 |
+
"conv_pw_4_relu (ReLU) (None, 28, 28, 256) 0 \n",
|
| 250 |
+
"_________________________________________________________________\n",
|
| 251 |
+
"conv_dw_5 (DepthwiseConv2D) (None, 28, 28, 256) 2304 \n",
|
| 252 |
+
"_________________________________________________________________\n",
|
| 253 |
+
"conv_dw_5_bn (BatchNormaliza (None, 28, 28, 256) 1024 \n",
|
| 254 |
+
"_________________________________________________________________\n",
|
| 255 |
+
"conv_dw_5_relu (ReLU) (None, 28, 28, 256) 0 \n",
|
| 256 |
+
"_________________________________________________________________\n",
|
| 257 |
+
"conv_pw_5 (Conv2D) (None, 28, 28, 256) 65536 \n",
|
| 258 |
+
"_________________________________________________________________\n",
|
| 259 |
+
"conv_pw_5_bn (BatchNormaliza (None, 28, 28, 256) 1024 \n",
|
| 260 |
+
"_________________________________________________________________\n",
|
| 261 |
+
"conv_pw_5_relu (ReLU) (None, 28, 28, 256) 0 \n",
|
| 262 |
+
"_________________________________________________________________\n",
|
| 263 |
+
"conv_pad_6 (ZeroPadding2D) (None, 29, 29, 256) 0 \n",
|
| 264 |
+
"_________________________________________________________________\n",
|
| 265 |
+
"conv_dw_6 (DepthwiseConv2D) (None, 14, 14, 256) 2304 \n",
|
| 266 |
+
"_________________________________________________________________\n",
|
| 267 |
+
"conv_dw_6_bn (BatchNormaliza (None, 14, 14, 256) 1024 \n",
|
| 268 |
+
"_________________________________________________________________\n",
|
| 269 |
+
"conv_dw_6_relu (ReLU) (None, 14, 14, 256) 0 \n",
|
| 270 |
+
"_________________________________________________________________\n",
|
| 271 |
+
"conv_pw_6 (Conv2D) (None, 14, 14, 512) 131072 \n",
|
| 272 |
+
"_________________________________________________________________\n",
|
| 273 |
+
"conv_pw_6_bn (BatchNormaliza (None, 14, 14, 512) 2048 \n",
|
| 274 |
+
"_________________________________________________________________\n",
|
| 275 |
+
"conv_pw_6_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 276 |
+
"_________________________________________________________________\n",
|
| 277 |
+
"conv_dw_7 (DepthwiseConv2D) (None, 14, 14, 512) 4608 \n",
|
| 278 |
+
"_________________________________________________________________\n",
|
| 279 |
+
"conv_dw_7_bn (BatchNormaliza (None, 14, 14, 512) 2048 \n",
|
| 280 |
+
"_________________________________________________________________\n",
|
| 281 |
+
"conv_dw_7_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 282 |
+
"_________________________________________________________________\n",
|
| 283 |
+
"conv_pw_7 (Conv2D) (None, 14, 14, 512) 262144 \n",
|
| 284 |
+
"_________________________________________________________________\n",
|
| 285 |
+
"conv_pw_7_bn (BatchNormaliza (None, 14, 14, 512) 2048 \n",
|
| 286 |
+
"_________________________________________________________________\n",
|
| 287 |
+
"conv_pw_7_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 288 |
+
"_________________________________________________________________\n",
|
| 289 |
+
"conv_dw_8 (DepthwiseConv2D) (None, 14, 14, 512) 4608 \n",
|
| 290 |
+
"_________________________________________________________________\n",
|
| 291 |
+
"conv_dw_8_bn (BatchNormaliza (None, 14, 14, 512) 2048 \n",
|
| 292 |
+
"_________________________________________________________________\n",
|
| 293 |
+
"conv_dw_8_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 294 |
+
"_________________________________________________________________\n",
|
| 295 |
+
"conv_pw_8 (Conv2D) (None, 14, 14, 512) 262144 \n",
|
| 296 |
+
"_________________________________________________________________\n",
|
| 297 |
+
"conv_pw_8_bn (BatchNormaliza (None, 14, 14, 512) 2048 \n",
|
| 298 |
+
"_________________________________________________________________\n",
|
| 299 |
+
"conv_pw_8_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 300 |
+
"_________________________________________________________________\n",
|
| 301 |
+
"conv_dw_9 (DepthwiseConv2D) (None, 14, 14, 512) 4608 \n",
|
| 302 |
+
"_________________________________________________________________\n",
|
| 303 |
+
"conv_dw_9_bn (BatchNormaliza (None, 14, 14, 512) 2048 \n",
|
| 304 |
+
"_________________________________________________________________\n",
|
| 305 |
+
"conv_dw_9_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 306 |
+
"_________________________________________________________________\n",
|
| 307 |
+
"conv_pw_9 (Conv2D) (None, 14, 14, 512) 262144 \n",
|
| 308 |
+
"_________________________________________________________________\n",
|
| 309 |
+
"conv_pw_9_bn (BatchNormaliza (None, 14, 14, 512) 2048 \n",
|
| 310 |
+
"_________________________________________________________________\n",
|
| 311 |
+
"conv_pw_9_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 312 |
+
"_________________________________________________________________\n",
|
| 313 |
+
"conv_dw_10 (DepthwiseConv2D) (None, 14, 14, 512) 4608 \n",
|
| 314 |
+
"_________________________________________________________________\n",
|
| 315 |
+
"conv_dw_10_bn (BatchNormaliz (None, 14, 14, 512) 2048 \n",
|
| 316 |
+
"_________________________________________________________________\n",
|
| 317 |
+
"conv_dw_10_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 318 |
+
"_________________________________________________________________\n",
|
| 319 |
+
"conv_pw_10 (Conv2D) (None, 14, 14, 512) 262144 \n",
|
| 320 |
+
"_________________________________________________________________\n",
|
| 321 |
+
"conv_pw_10_bn (BatchNormaliz (None, 14, 14, 512) 2048 \n",
|
| 322 |
+
"_________________________________________________________________\n",
|
| 323 |
+
"conv_pw_10_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 324 |
+
"_________________________________________________________________\n",
|
| 325 |
+
"conv_dw_11 (DepthwiseConv2D) (None, 14, 14, 512) 4608 \n",
|
| 326 |
+
"_________________________________________________________________\n",
|
| 327 |
+
"conv_dw_11_bn (BatchNormaliz (None, 14, 14, 512) 2048 \n",
|
| 328 |
+
"_________________________________________________________________\n",
|
| 329 |
+
"conv_dw_11_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 330 |
+
"_________________________________________________________________\n",
|
| 331 |
+
"conv_pw_11 (Conv2D) (None, 14, 14, 512) 262144 \n",
|
| 332 |
+
"_________________________________________________________________\n",
|
| 333 |
+
"conv_pw_11_bn (BatchNormaliz (None, 14, 14, 512) 2048 \n",
|
| 334 |
+
"_________________________________________________________________\n",
|
| 335 |
+
"conv_pw_11_relu (ReLU) (None, 14, 14, 512) 0 \n",
|
| 336 |
+
"_________________________________________________________________\n",
|
| 337 |
+
"conv_pad_12 (ZeroPadding2D) (None, 15, 15, 512) 0 \n",
|
| 338 |
+
"_________________________________________________________________\n",
|
| 339 |
+
"conv_dw_12 (DepthwiseConv2D) (None, 7, 7, 512) 4608 \n",
|
| 340 |
+
"_________________________________________________________________\n",
|
| 341 |
+
"conv_dw_12_bn (BatchNormaliz (None, 7, 7, 512) 2048 \n",
|
| 342 |
+
"_________________________________________________________________\n",
|
| 343 |
+
"conv_dw_12_relu (ReLU) (None, 7, 7, 512) 0 \n",
|
| 344 |
+
"_________________________________________________________________\n",
|
| 345 |
+
"conv_pw_12 (Conv2D) (None, 7, 7, 1024) 524288 \n",
|
| 346 |
+
"_________________________________________________________________\n",
|
| 347 |
+
"conv_pw_12_bn (BatchNormaliz (None, 7, 7, 1024) 4096 \n",
|
| 348 |
+
"_________________________________________________________________\n",
|
| 349 |
+
"conv_pw_12_relu (ReLU) (None, 7, 7, 1024) 0 \n",
|
| 350 |
+
"_________________________________________________________________\n",
|
| 351 |
+
"conv_dw_13 (DepthwiseConv2D) (None, 7, 7, 1024) 9216 \n",
|
| 352 |
+
"_________________________________________________________________\n",
|
| 353 |
+
"conv_dw_13_bn (BatchNormaliz (None, 7, 7, 1024) 4096 \n",
|
| 354 |
+
"_________________________________________________________________\n",
|
| 355 |
+
"conv_dw_13_relu (ReLU) (None, 7, 7, 1024) 0 \n",
|
| 356 |
+
"_________________________________________________________________\n",
|
| 357 |
+
"conv_pw_13 (Conv2D) (None, 7, 7, 1024) 1048576 \n",
|
| 358 |
+
"_________________________________________________________________\n",
|
| 359 |
+
"conv_pw_13_bn (BatchNormaliz (None, 7, 7, 1024) 4096 \n",
|
| 360 |
+
"_________________________________________________________________\n",
|
| 361 |
+
"conv_pw_13_relu (ReLU) (None, 7, 7, 1024) 0 \n",
|
| 362 |
+
"_________________________________________________________________\n",
|
| 363 |
+
"global_average_pooling2d_1 ( (None, 1024) 0 \n",
|
| 364 |
+
"_________________________________________________________________\n",
|
| 365 |
+
"dense_4 (Dense) (None, 1024) 1049600 \n",
|
| 366 |
+
"_________________________________________________________________\n",
|
| 367 |
+
"dense_5 (Dense) (None, 1024) 1049600 \n",
|
| 368 |
+
"_________________________________________________________________\n",
|
| 369 |
+
"dense_6 (Dense) (None, 512) 524800 \n",
|
| 370 |
+
"_________________________________________________________________\n",
|
| 371 |
+
"dense_7 (Dense) (None, 10) 5130 \n",
|
| 372 |
+
"=================================================================\n",
|
| 373 |
+
"Total params: 5,857,994\n",
|
| 374 |
+
"Trainable params: 2,629,130\n",
|
| 375 |
+
"Non-trainable params: 3,228,864\n",
|
| 376 |
+
"_________________________________________________________________\n",
|
| 377 |
+
"None\n"
|
| 378 |
+
]
|
| 379 |
+
}
|
| 380 |
+
],
|
| 381 |
+
"source": [
|
| 382 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 383 |
+
"from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten, GlobalAveragePooling2D\n",
|
| 384 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D\n",
|
| 385 |
+
"from tensorflow.keras.layers import BatchNormalization\n",
|
| 386 |
+
"from tensorflow.keras.models import Model\n",
|
| 387 |
+
"\n",
|
| 388 |
+
"# Set our class number to 3 (Young, Middle, Old)\n",
|
| 389 |
+
"num_classes = 10\n",
|
| 390 |
+
"\n",
|
| 391 |
+
"FC_Head = addTopModelMobileNet(MobileNet, num_classes)\n",
|
| 392 |
+
"\n",
|
| 393 |
+
"model = Model(inputs = MobileNet.input, outputs = FC_Head)\n",
|
| 394 |
+
"\n",
|
| 395 |
+
"print(model.summary())"
|
| 396 |
+
]
|
| 397 |
+
},
|
| 398 |
+
{
|
| 399 |
+
"cell_type": "markdown",
|
| 400 |
+
"metadata": {},
|
| 401 |
+
"source": [
|
| 402 |
+
"### Loading our Monkey Breed Dataset"
|
| 403 |
+
]
|
| 404 |
+
},
|
| 405 |
+
{
|
| 406 |
+
"cell_type": "code",
|
| 407 |
+
"execution_count": 6,
|
| 408 |
+
"metadata": {},
|
| 409 |
+
"outputs": [
|
| 410 |
+
{
|
| 411 |
+
"name": "stdout",
|
| 412 |
+
"output_type": "stream",
|
| 413 |
+
"text": [
|
| 414 |
+
"Found 1098 images belonging to 10 classes.\n",
|
| 415 |
+
"Found 272 images belonging to 10 classes.\n"
|
| 416 |
+
]
|
| 417 |
+
}
|
| 418 |
+
],
|
| 419 |
+
"source": [
|
| 420 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 421 |
+
"\n",
|
| 422 |
+
"train_data_dir = './monkey_breed/train'\n",
|
| 423 |
+
"validation_data_dir = './monkey_breed/validation'\n",
|
| 424 |
+
"\n",
|
| 425 |
+
"# Let's use some data augmentaiton \n",
|
| 426 |
+
"train_datagen = ImageDataGenerator(\n",
|
| 427 |
+
" rescale=1./255,\n",
|
| 428 |
+
" rotation_range=45,\n",
|
| 429 |
+
" width_shift_range=0.3,\n",
|
| 430 |
+
" height_shift_range=0.3,\n",
|
| 431 |
+
" horizontal_flip=True,\n",
|
| 432 |
+
" fill_mode='nearest')\n",
|
| 433 |
+
" \n",
|
| 434 |
+
"validation_datagen = ImageDataGenerator(rescale=1./255)\n",
|
| 435 |
+
" \n",
|
| 436 |
+
"# set our batch size (typically on most mid tier systems we'll use 16-32)\n",
|
| 437 |
+
"batch_size = 32\n",
|
| 438 |
+
" \n",
|
| 439 |
+
"train_generator = train_datagen.flow_from_directory(\n",
|
| 440 |
+
" train_data_dir,\n",
|
| 441 |
+
" target_size=(img_rows, img_cols),\n",
|
| 442 |
+
" batch_size=batch_size,\n",
|
| 443 |
+
" class_mode='categorical')\n",
|
| 444 |
+
" \n",
|
| 445 |
+
"validation_generator = validation_datagen.flow_from_directory(\n",
|
| 446 |
+
" validation_data_dir,\n",
|
| 447 |
+
" target_size=(img_rows, img_cols),\n",
|
| 448 |
+
" batch_size=batch_size,\n",
|
| 449 |
+
" class_mode='categorical')"
|
| 450 |
+
]
|
| 451 |
+
},
|
| 452 |
+
{
|
| 453 |
+
"cell_type": "markdown",
|
| 454 |
+
"metadata": {},
|
| 455 |
+
"source": [
|
| 456 |
+
"### Training out Model\n",
|
| 457 |
+
"- Note we're using checkpointing and early stopping"
|
| 458 |
+
]
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"cell_type": "code",
|
| 462 |
+
"execution_count": null,
|
| 463 |
+
"metadata": {},
|
| 464 |
+
"outputs": [],
|
| 465 |
+
"source": [
|
| 466 |
+
"from tensorflow.keras.optimizers import RMSprop\n",
|
| 467 |
+
"from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\n",
|
| 468 |
+
"\n",
|
| 469 |
+
" \n",
|
| 470 |
+
"checkpoint = ModelCheckpoint(\"monkey_breed_mobileNet.h5\",\n",
|
| 471 |
+
" monitor=\"val_loss\",\n",
|
| 472 |
+
" mode=\"min\",\n",
|
| 473 |
+
" save_best_only = True,\n",
|
| 474 |
+
" verbose=1)\n",
|
| 475 |
+
"\n",
|
| 476 |
+
"earlystop = EarlyStopping(monitor = 'val_loss', \n",
|
| 477 |
+
" min_delta = 0, \n",
|
| 478 |
+
" patience = 3,\n",
|
| 479 |
+
" verbose = 1,\n",
|
| 480 |
+
" restore_best_weights = True)\n",
|
| 481 |
+
"\n",
|
| 482 |
+
"# we put our call backs into a callback list\n",
|
| 483 |
+
"callbacks = [earlystop, checkpoint]\n",
|
| 484 |
+
"\n",
|
| 485 |
+
"# We use a very small learning rate \n",
|
| 486 |
+
"model.compile(loss = 'categorical_crossentropy',\n",
|
| 487 |
+
" optimizer = RMSprop(lr = 0.001),\n",
|
| 488 |
+
" metrics = ['accuracy'])\n",
|
| 489 |
+
"\n",
|
| 490 |
+
"# Enter the number of training and validation samples here\n",
|
| 491 |
+
"nb_train_samples = 1097\n",
|
| 492 |
+
"nb_validation_samples = 272\n",
|
| 493 |
+
"\n",
|
| 494 |
+
"# We only train 5 EPOCHS \n",
|
| 495 |
+
"epochs = 1\n",
|
| 496 |
+
"batch_size = 16\n",
|
| 497 |
+
"\n",
|
| 498 |
+
"history = model.fit_generator(\n",
|
| 499 |
+
" train_generator,\n",
|
| 500 |
+
" steps_per_epoch = nb_train_samples // batch_size,\n",
|
| 501 |
+
" epochs = epochs,\n",
|
| 502 |
+
" callbacks = callbacks,\n",
|
| 503 |
+
" validation_data = validation_generator,\n",
|
| 504 |
+
" validation_steps = nb_validation_samples // batch_size)"
|
| 505 |
+
]
|
| 506 |
+
},
|
| 507 |
+
{
|
| 508 |
+
"cell_type": "markdown",
|
| 509 |
+
"metadata": {},
|
| 510 |
+
"source": [
|
| 511 |
+
"### Loading our classifer\n"
|
| 512 |
+
]
|
| 513 |
+
},
|
| 514 |
+
{
|
| 515 |
+
"cell_type": "code",
|
| 516 |
+
"execution_count": 10,
|
| 517 |
+
"metadata": {},
|
| 518 |
+
"outputs": [
|
| 519 |
+
{
|
| 520 |
+
"ename": "OSError",
|
| 521 |
+
"evalue": "SavedModel file does not exist at: monkey_breed_mobileNet.h5/{saved_model.pbtxt|saved_model.pb}",
|
| 522 |
+
"output_type": "error",
|
| 523 |
+
"traceback": [
|
| 524 |
+
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
| 525 |
+
"\u001b[1;31mOSError\u001b[0m Traceback (most recent call last)",
|
| 526 |
+
"\u001b[1;32m<ipython-input-10-071d0eb65c49>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[1;32mfrom\u001b[0m \u001b[0mtensorflow\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mkeras\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmodels\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mload_model\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 3\u001b[1;33m \u001b[0mclassifier\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mload_model\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'monkey_breed_mobileNet.h5'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
|
| 527 |
+
"\u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\cv\\lib\\site-packages\\tensorflow_core\\python\\keras\\saving\\save.py\u001b[0m in \u001b[0;36mload_model\u001b[1;34m(filepath, custom_objects, compile)\u001b[0m\n\u001b[0;32m 147\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 148\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfilepath\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0msix\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mstring_types\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 149\u001b[1;33m \u001b[0mloader_impl\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mparse_saved_model\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfilepath\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 150\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0msaved_model_load\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mload\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfilepath\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcompile\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 151\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 528 |
+
"\u001b[1;32mC:\\ProgramData\\Anaconda3\\envs\\cv\\lib\\site-packages\\tensorflow_core\\python\\saved_model\\loader_impl.py\u001b[0m in \u001b[0;36mparse_saved_model\u001b[1;34m(export_dir)\u001b[0m\n\u001b[0;32m 81\u001b[0m (export_dir,\n\u001b[0;32m 82\u001b[0m \u001b[0mconstants\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mSAVED_MODEL_FILENAME_PBTXT\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 83\u001b[1;33m constants.SAVED_MODEL_FILENAME_PB))\n\u001b[0m\u001b[0;32m 84\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 85\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
|
| 529 |
+
"\u001b[1;31mOSError\u001b[0m: SavedModel file does not exist at: monkey_breed_mobileNet.h5/{saved_model.pbtxt|saved_model.pb}"
|
| 530 |
+
]
|
| 531 |
+
}
|
| 532 |
+
],
|
| 533 |
+
"source": [
|
| 534 |
+
"from tensorflow.keras.models import load_model\n",
|
| 535 |
+
"\n",
|
| 536 |
+
"classifier = load_model('monkey_breed_mobileNet.h5')"
|
| 537 |
+
]
|
| 538 |
+
},
|
| 539 |
+
{
|
| 540 |
+
"cell_type": "markdown",
|
| 541 |
+
"metadata": {},
|
| 542 |
+
"source": [
|
| 543 |
+
"### Testing our classifer on some test images"
|
| 544 |
+
]
|
| 545 |
+
},
|
| 546 |
+
{
|
| 547 |
+
"cell_type": "code",
|
| 548 |
+
"execution_count": 16,
|
| 549 |
+
"metadata": {},
|
| 550 |
+
"outputs": [
|
| 551 |
+
{
|
| 552 |
+
"name": "stdout",
|
| 553 |
+
"output_type": "stream",
|
| 554 |
+
"text": [
|
| 555 |
+
"Class - mantled_howler \n",
|
| 556 |
+
"Class - patas_monkey\n",
|
| 557 |
+
"Class - patas_monkey\n",
|
| 558 |
+
"Class - silvery_marmoset\n",
|
| 559 |
+
"Class - black_headed_night_monkey\n",
|
| 560 |
+
"Class - pygmy_marmoset \n",
|
| 561 |
+
"Class - silvery_marmoset\n",
|
| 562 |
+
"Class - mantled_howler \n",
|
| 563 |
+
"Class - common_squirrel_monkey\n",
|
| 564 |
+
"Class - patas_monkey\n"
|
| 565 |
+
]
|
| 566 |
+
}
|
| 567 |
+
],
|
| 568 |
+
"source": [
|
| 569 |
+
"import os\n",
|
| 570 |
+
"import cv2\n",
|
| 571 |
+
"import numpy as np\n",
|
| 572 |
+
"from os import listdir\n",
|
| 573 |
+
"from os.path import isfile, join\n",
|
| 574 |
+
"\n",
|
| 575 |
+
"monkey_breeds_dict = {\"[0]\": \"mantled_howler \", \n",
|
| 576 |
+
" \"[1]\": \"patas_monkey\",\n",
|
| 577 |
+
" \"[2]\": \"bald_uakari\",\n",
|
| 578 |
+
" \"[3]\": \"japanese_macaque\",\n",
|
| 579 |
+
" \"[4]\": \"pygmy_marmoset \",\n",
|
| 580 |
+
" \"[5]\": \"white_headed_capuchin\",\n",
|
| 581 |
+
" \"[6]\": \"silvery_marmoset\",\n",
|
| 582 |
+
" \"[7]\": \"common_squirrel_monkey\",\n",
|
| 583 |
+
" \"[8]\": \"black_headed_night_monkey\",\n",
|
| 584 |
+
" \"[9]\": \"nilgiri_langur\"}\n",
|
| 585 |
+
"\n",
|
| 586 |
+
"monkey_breeds_dict_n = {\"n0\": \"mantled_howler \", \n",
|
| 587 |
+
" \"n1\": \"patas_monkey\",\n",
|
| 588 |
+
" \"n2\": \"bald_uakari\",\n",
|
| 589 |
+
" \"n3\": \"japanese_macaque\",\n",
|
| 590 |
+
" \"n4\": \"pygmy_marmoset \",\n",
|
| 591 |
+
" \"n5\": \"white_headed_capuchin\",\n",
|
| 592 |
+
" \"n6\": \"silvery_marmoset\",\n",
|
| 593 |
+
" \"n7\": \"common_squirrel_monkey\",\n",
|
| 594 |
+
" \"n8\": \"black_headed_night_monkey\",\n",
|
| 595 |
+
" \"n9\": \"nilgiri_langur\"}\n",
|
| 596 |
+
"\n",
|
| 597 |
+
"def draw_test(name, pred, im):\n",
|
| 598 |
+
" monkey = monkey_breeds_dict[str(pred)]\n",
|
| 599 |
+
" BLACK = [0,0,0]\n",
|
| 600 |
+
" expanded_image = cv2.copyMakeBorder(im, 80, 0, 0, 100 ,cv2.BORDER_CONSTANT,value=BLACK)\n",
|
| 601 |
+
" cv2.putText(expanded_image, monkey, (20, 60) , cv2.FONT_HERSHEY_SIMPLEX,1, (0,0,255), 2)\n",
|
| 602 |
+
" cv2.imshow(name, expanded_image)\n",
|
| 603 |
+
"\n",
|
| 604 |
+
"def getRandomImage(path):\n",
|
| 605 |
+
" \"\"\"function loads a random images from a random folder in our test path \"\"\"\n",
|
| 606 |
+
" folders = list(filter(lambda x: os.path.isdir(os.path.join(path, x)), os.listdir(path)))\n",
|
| 607 |
+
" random_directory = np.random.randint(0,len(folders))\n",
|
| 608 |
+
" path_class = folders[random_directory]\n",
|
| 609 |
+
" print(\"Class - \" + monkey_breeds_dict_n[str(path_class)])\n",
|
| 610 |
+
" file_path = path + path_class\n",
|
| 611 |
+
" file_names = [f for f in listdir(file_path) if isfile(join(file_path, f))]\n",
|
| 612 |
+
" random_file_index = np.random.randint(0,len(file_names))\n",
|
| 613 |
+
" image_name = file_names[random_file_index]\n",
|
| 614 |
+
" return cv2.imread(file_path+\"/\"+image_name) \n",
|
| 615 |
+
"\n",
|
| 616 |
+
"for i in range(0,10):\n",
|
| 617 |
+
" input_im = getRandomImage(\"./monkey_breed/validation/\")\n",
|
| 618 |
+
" input_original = input_im.copy()\n",
|
| 619 |
+
" input_original = cv2.resize(input_original, None, fx=0.5, fy=0.5, interpolation = cv2.INTER_LINEAR)\n",
|
| 620 |
+
" \n",
|
| 621 |
+
" input_im = cv2.resize(input_im, (224, 224), interpolation = cv2.INTER_LINEAR)\n",
|
| 622 |
+
" input_im = input_im / 255.\n",
|
| 623 |
+
" input_im = input_im.reshape(1,224,224,3) \n",
|
| 624 |
+
" \n",
|
| 625 |
+
" # Get Prediction\n",
|
| 626 |
+
" res = np.argmax(classifier.predict(input_im, 1, verbose = 0), axis=1)\n",
|
| 627 |
+
" \n",
|
| 628 |
+
" # Show image with predicted class\n",
|
| 629 |
+
" draw_test(\"Prediction\", res, input_original) \n",
|
| 630 |
+
" cv2.waitKey(0)\n",
|
| 631 |
+
"\n",
|
| 632 |
+
"cv2.destroyAllWindows()"
|
| 633 |
+
]
|
| 634 |
+
}
|
| 635 |
+
],
|
| 636 |
+
"metadata": {
|
| 637 |
+
"kernelspec": {
|
| 638 |
+
"display_name": "Python 3",
|
| 639 |
+
"language": "python",
|
| 640 |
+
"name": "python3"
|
| 641 |
+
},
|
| 642 |
+
"language_info": {
|
| 643 |
+
"codemirror_mode": {
|
| 644 |
+
"name": "ipython",
|
| 645 |
+
"version": 3
|
| 646 |
+
},
|
| 647 |
+
"file_extension": ".py",
|
| 648 |
+
"mimetype": "text/x-python",
|
| 649 |
+
"name": "python",
|
| 650 |
+
"nbconvert_exporter": "python",
|
| 651 |
+
"pygments_lexer": "ipython3",
|
| 652 |
+
"version": "3.7.4"
|
| 653 |
+
}
|
| 654 |
+
},
|
| 655 |
+
"nbformat": 4,
|
| 656 |
+
"nbformat_minor": 2
|
| 657 |
+
}
|
15. Transfer Learning & Fine Tuning/15.3 Making a Flower Classifier with VGG16.ipynb
ADDED
|
@@ -0,0 +1,693 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [
|
| 3 |
+
{
|
| 4 |
+
"cell_type": "markdown",
|
| 5 |
+
"metadata": {},
|
| 6 |
+
"source": [
|
| 7 |
+
"# Making a Flower Classifier with VGG16\n",
|
| 8 |
+
"\n",
|
| 9 |
+
"### Loading the VGG16 Model"
|
| 10 |
+
]
|
| 11 |
+
},
|
| 12 |
+
{
|
| 13 |
+
"cell_type": "code",
|
| 14 |
+
"execution_count": 2,
|
| 15 |
+
"metadata": {},
|
| 16 |
+
"outputs": [],
|
| 17 |
+
"source": [
|
| 18 |
+
"from tensorflow.keras.applications import VGG16\n",
|
| 19 |
+
"\n",
|
| 20 |
+
"# VGG16 was designed to work on 224 x 224 pixel input images sizes\n",
|
| 21 |
+
"img_rows = 224\n",
|
| 22 |
+
"img_cols = 224 \n",
|
| 23 |
+
"\n",
|
| 24 |
+
"#Loads the VGG16 model \n",
|
| 25 |
+
"vgg16 = VGG16(weights = 'imagenet', \n",
|
| 26 |
+
" include_top = False, \n",
|
| 27 |
+
" input_shape = (img_rows, img_cols, 3))"
|
| 28 |
+
]
|
| 29 |
+
},
|
| 30 |
+
{
|
| 31 |
+
"cell_type": "markdown",
|
| 32 |
+
"metadata": {},
|
| 33 |
+
"source": [
|
| 34 |
+
"### Inpsecting each layer"
|
| 35 |
+
]
|
| 36 |
+
},
|
| 37 |
+
{
|
| 38 |
+
"cell_type": "code",
|
| 39 |
+
"execution_count": 3,
|
| 40 |
+
"metadata": {},
|
| 41 |
+
"outputs": [
|
| 42 |
+
{
|
| 43 |
+
"name": "stdout",
|
| 44 |
+
"output_type": "stream",
|
| 45 |
+
"text": [
|
| 46 |
+
"0 InputLayer True\n",
|
| 47 |
+
"1 Conv2D True\n",
|
| 48 |
+
"2 Conv2D True\n",
|
| 49 |
+
"3 MaxPooling2D True\n",
|
| 50 |
+
"4 Conv2D True\n",
|
| 51 |
+
"5 Conv2D True\n",
|
| 52 |
+
"6 MaxPooling2D True\n",
|
| 53 |
+
"7 Conv2D True\n",
|
| 54 |
+
"8 Conv2D True\n",
|
| 55 |
+
"9 Conv2D True\n",
|
| 56 |
+
"10 MaxPooling2D True\n",
|
| 57 |
+
"11 Conv2D True\n",
|
| 58 |
+
"12 Conv2D True\n",
|
| 59 |
+
"13 Conv2D True\n",
|
| 60 |
+
"14 MaxPooling2D True\n",
|
| 61 |
+
"15 Conv2D True\n",
|
| 62 |
+
"16 Conv2D True\n",
|
| 63 |
+
"17 Conv2D True\n",
|
| 64 |
+
"18 MaxPooling2D True\n"
|
| 65 |
+
]
|
| 66 |
+
}
|
| 67 |
+
],
|
| 68 |
+
"source": [
|
| 69 |
+
"# Let's print our layers \n",
|
| 70 |
+
"for (i,layer) in enumerate(vgg16.layers):\n",
|
| 71 |
+
" print(str(i) + \" \"+ layer.__class__.__name__, layer.trainable)"
|
| 72 |
+
]
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"cell_type": "markdown",
|
| 76 |
+
"metadata": {},
|
| 77 |
+
"source": [
|
| 78 |
+
"### Let's freeze all layers except the top 4 "
|
| 79 |
+
]
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"cell_type": "code",
|
| 83 |
+
"execution_count": 4,
|
| 84 |
+
"metadata": {},
|
| 85 |
+
"outputs": [
|
| 86 |
+
{
|
| 87 |
+
"name": "stdout",
|
| 88 |
+
"output_type": "stream",
|
| 89 |
+
"text": [
|
| 90 |
+
"0 InputLayer False\n",
|
| 91 |
+
"1 Conv2D False\n",
|
| 92 |
+
"2 Conv2D False\n",
|
| 93 |
+
"3 MaxPooling2D False\n",
|
| 94 |
+
"4 Conv2D False\n",
|
| 95 |
+
"5 Conv2D False\n",
|
| 96 |
+
"6 MaxPooling2D False\n",
|
| 97 |
+
"7 Conv2D False\n",
|
| 98 |
+
"8 Conv2D False\n",
|
| 99 |
+
"9 Conv2D False\n",
|
| 100 |
+
"10 MaxPooling2D False\n",
|
| 101 |
+
"11 Conv2D False\n",
|
| 102 |
+
"12 Conv2D False\n",
|
| 103 |
+
"13 Conv2D False\n",
|
| 104 |
+
"14 MaxPooling2D False\n",
|
| 105 |
+
"15 Conv2D False\n",
|
| 106 |
+
"16 Conv2D False\n",
|
| 107 |
+
"17 Conv2D False\n",
|
| 108 |
+
"18 MaxPooling2D False\n"
|
| 109 |
+
]
|
| 110 |
+
}
|
| 111 |
+
],
|
| 112 |
+
"source": [
|
| 113 |
+
"from tensorflow.keras.applications import VGG16\n",
|
| 114 |
+
"\n",
|
| 115 |
+
"# VGG16 was designed to work on 224 x 224 pixel input images sizes\n",
|
| 116 |
+
"img_rows = 224\n",
|
| 117 |
+
"img_cols = 224 \n",
|
| 118 |
+
"\n",
|
| 119 |
+
"# Re-loads the VGG16 model without the top or FC layers\n",
|
| 120 |
+
"vgg16 = VGG16(weights = 'imagenet', \n",
|
| 121 |
+
" include_top = False, \n",
|
| 122 |
+
" input_shape = (img_rows, img_cols, 3))\n",
|
| 123 |
+
"\n",
|
| 124 |
+
"# Here we freeze the last 4 layers \n",
|
| 125 |
+
"# Layers are set to trainable as True by default\n",
|
| 126 |
+
"for layer in vgg16.layers:\n",
|
| 127 |
+
" layer.trainable = False\n",
|
| 128 |
+
" \n",
|
| 129 |
+
"# Let's print our layers \n",
|
| 130 |
+
"for (i,layer) in enumerate(vgg16.layers):\n",
|
| 131 |
+
" print(str(i) + \" \"+ layer.__class__.__name__, layer.trainable)"
|
| 132 |
+
]
|
| 133 |
+
},
|
| 134 |
+
{
|
| 135 |
+
"cell_type": "markdown",
|
| 136 |
+
"metadata": {},
|
| 137 |
+
"source": [
|
| 138 |
+
"### Let's make a function that returns our FC Head"
|
| 139 |
+
]
|
| 140 |
+
},
|
| 141 |
+
{
|
| 142 |
+
"cell_type": "code",
|
| 143 |
+
"execution_count": 7,
|
| 144 |
+
"metadata": {},
|
| 145 |
+
"outputs": [],
|
| 146 |
+
"source": [
|
| 147 |
+
"def addTopModel(bottom_model, num_classes, D=256):\n",
|
| 148 |
+
" \"\"\"creates the top or head of the model that will be \n",
|
| 149 |
+
" placed ontop of the bottom layers\"\"\"\n",
|
| 150 |
+
" top_model = bottom_model.output\n",
|
| 151 |
+
" top_model = Flatten(name = \"flatten\")(top_model)\n",
|
| 152 |
+
" top_model = Dense(D, activation = \"relu\")(top_model)\n",
|
| 153 |
+
" top_model = Dropout(0.3)(top_model)\n",
|
| 154 |
+
" top_model = Dense(num_classes, activation = \"softmax\")(top_model)\n",
|
| 155 |
+
" return top_model"
|
| 156 |
+
]
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"cell_type": "markdown",
|
| 160 |
+
"metadata": {},
|
| 161 |
+
"source": [
|
| 162 |
+
"### Let's add our FC Head back onto VGG"
|
| 163 |
+
]
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"cell_type": "code",
|
| 167 |
+
"execution_count": 8,
|
| 168 |
+
"metadata": {},
|
| 169 |
+
"outputs": [
|
| 170 |
+
{
|
| 171 |
+
"name": "stdout",
|
| 172 |
+
"output_type": "stream",
|
| 173 |
+
"text": [
|
| 174 |
+
"Model: \"model\"\n",
|
| 175 |
+
"_________________________________________________________________\n",
|
| 176 |
+
"Layer (type) Output Shape Param # \n",
|
| 177 |
+
"=================================================================\n",
|
| 178 |
+
"input_3 (InputLayer) [(None, 224, 224, 3)] 0 \n",
|
| 179 |
+
"_________________________________________________________________\n",
|
| 180 |
+
"block1_conv1 (Conv2D) (None, 224, 224, 64) 1792 \n",
|
| 181 |
+
"_________________________________________________________________\n",
|
| 182 |
+
"block1_conv2 (Conv2D) (None, 224, 224, 64) 36928 \n",
|
| 183 |
+
"_________________________________________________________________\n",
|
| 184 |
+
"block1_pool (MaxPooling2D) (None, 112, 112, 64) 0 \n",
|
| 185 |
+
"_________________________________________________________________\n",
|
| 186 |
+
"block2_conv1 (Conv2D) (None, 112, 112, 128) 73856 \n",
|
| 187 |
+
"_________________________________________________________________\n",
|
| 188 |
+
"block2_conv2 (Conv2D) (None, 112, 112, 128) 147584 \n",
|
| 189 |
+
"_________________________________________________________________\n",
|
| 190 |
+
"block2_pool (MaxPooling2D) (None, 56, 56, 128) 0 \n",
|
| 191 |
+
"_________________________________________________________________\n",
|
| 192 |
+
"block3_conv1 (Conv2D) (None, 56, 56, 256) 295168 \n",
|
| 193 |
+
"_________________________________________________________________\n",
|
| 194 |
+
"block3_conv2 (Conv2D) (None, 56, 56, 256) 590080 \n",
|
| 195 |
+
"_________________________________________________________________\n",
|
| 196 |
+
"block3_conv3 (Conv2D) (None, 56, 56, 256) 590080 \n",
|
| 197 |
+
"_________________________________________________________________\n",
|
| 198 |
+
"block3_pool (MaxPooling2D) (None, 28, 28, 256) 0 \n",
|
| 199 |
+
"_________________________________________________________________\n",
|
| 200 |
+
"block4_conv1 (Conv2D) (None, 28, 28, 512) 1180160 \n",
|
| 201 |
+
"_________________________________________________________________\n",
|
| 202 |
+
"block4_conv2 (Conv2D) (None, 28, 28, 512) 2359808 \n",
|
| 203 |
+
"_________________________________________________________________\n",
|
| 204 |
+
"block4_conv3 (Conv2D) (None, 28, 28, 512) 2359808 \n",
|
| 205 |
+
"_________________________________________________________________\n",
|
| 206 |
+
"block4_pool (MaxPooling2D) (None, 14, 14, 512) 0 \n",
|
| 207 |
+
"_________________________________________________________________\n",
|
| 208 |
+
"block5_conv1 (Conv2D) (None, 14, 14, 512) 2359808 \n",
|
| 209 |
+
"_________________________________________________________________\n",
|
| 210 |
+
"block5_conv2 (Conv2D) (None, 14, 14, 512) 2359808 \n",
|
| 211 |
+
"_________________________________________________________________\n",
|
| 212 |
+
"block5_conv3 (Conv2D) (None, 14, 14, 512) 2359808 \n",
|
| 213 |
+
"_________________________________________________________________\n",
|
| 214 |
+
"block5_pool (MaxPooling2D) (None, 7, 7, 512) 0 \n",
|
| 215 |
+
"_________________________________________________________________\n",
|
| 216 |
+
"flatten (Flatten) (None, 25088) 0 \n",
|
| 217 |
+
"_________________________________________________________________\n",
|
| 218 |
+
"dense (Dense) (None, 256) 6422784 \n",
|
| 219 |
+
"_________________________________________________________________\n",
|
| 220 |
+
"dropout (Dropout) (None, 256) 0 \n",
|
| 221 |
+
"_________________________________________________________________\n",
|
| 222 |
+
"dense_1 (Dense) (None, 17) 4369 \n",
|
| 223 |
+
"=================================================================\n",
|
| 224 |
+
"Total params: 21,141,841\n",
|
| 225 |
+
"Trainable params: 6,427,153\n",
|
| 226 |
+
"Non-trainable params: 14,714,688\n",
|
| 227 |
+
"_________________________________________________________________\n",
|
| 228 |
+
"None\n"
|
| 229 |
+
]
|
| 230 |
+
}
|
| 231 |
+
],
|
| 232 |
+
"source": [
|
| 233 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 234 |
+
"from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten\n",
|
| 235 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D\n",
|
| 236 |
+
"from tensorflow.keras.layers import BatchNormalization\n",
|
| 237 |
+
"from tensorflow.keras.models import Model\n",
|
| 238 |
+
"\n",
|
| 239 |
+
"num_classes = 17\n",
|
| 240 |
+
"\n",
|
| 241 |
+
"FC_Head = addTopModel(vgg16, num_classes)\n",
|
| 242 |
+
"\n",
|
| 243 |
+
"model = Model(inputs=vgg16.input, outputs=FC_Head)\n",
|
| 244 |
+
"\n",
|
| 245 |
+
"print(model.summary())"
|
| 246 |
+
]
|
| 247 |
+
},
|
| 248 |
+
{
|
| 249 |
+
"cell_type": "markdown",
|
| 250 |
+
"metadata": {},
|
| 251 |
+
"source": [
|
| 252 |
+
"### Loading our Flowers Dataset"
|
| 253 |
+
]
|
| 254 |
+
},
|
| 255 |
+
{
|
| 256 |
+
"cell_type": "code",
|
| 257 |
+
"execution_count": 9,
|
| 258 |
+
"metadata": {},
|
| 259 |
+
"outputs": [
|
| 260 |
+
{
|
| 261 |
+
"name": "stdout",
|
| 262 |
+
"output_type": "stream",
|
| 263 |
+
"text": [
|
| 264 |
+
"Found 1190 images belonging to 17 classes.\n",
|
| 265 |
+
"Found 170 images belonging to 17 classes.\n"
|
| 266 |
+
]
|
| 267 |
+
}
|
| 268 |
+
],
|
| 269 |
+
"source": [
|
| 270 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 271 |
+
"\n",
|
| 272 |
+
"train_data_dir = './17_flowers/train'\n",
|
| 273 |
+
"validation_data_dir = './17_flowers/validation'\n",
|
| 274 |
+
"\n",
|
| 275 |
+
"train_datagen = ImageDataGenerator(\n",
|
| 276 |
+
" rescale=1./255,\n",
|
| 277 |
+
" rotation_range=20,\n",
|
| 278 |
+
" width_shift_range=0.2,\n",
|
| 279 |
+
" height_shift_range=0.2,\n",
|
| 280 |
+
" horizontal_flip=True,\n",
|
| 281 |
+
" fill_mode='nearest')\n",
|
| 282 |
+
" \n",
|
| 283 |
+
"validation_datagen = ImageDataGenerator(rescale=1./255)\n",
|
| 284 |
+
" \n",
|
| 285 |
+
"# Change the batchsize according to your system RAM\n",
|
| 286 |
+
"train_batchsize = 16\n",
|
| 287 |
+
"val_batchsize = 10\n",
|
| 288 |
+
" \n",
|
| 289 |
+
"train_generator = train_datagen.flow_from_directory(\n",
|
| 290 |
+
" train_data_dir,\n",
|
| 291 |
+
" target_size=(img_rows, img_cols),\n",
|
| 292 |
+
" batch_size=train_batchsize,\n",
|
| 293 |
+
" class_mode='categorical')\n",
|
| 294 |
+
" \n",
|
| 295 |
+
"validation_generator = validation_datagen.flow_from_directory(\n",
|
| 296 |
+
" validation_data_dir,\n",
|
| 297 |
+
" target_size=(img_rows, img_cols),\n",
|
| 298 |
+
" batch_size=val_batchsize,\n",
|
| 299 |
+
" class_mode='categorical',\n",
|
| 300 |
+
" shuffle=False)"
|
| 301 |
+
]
|
| 302 |
+
},
|
| 303 |
+
{
|
| 304 |
+
"cell_type": "markdown",
|
| 305 |
+
"metadata": {},
|
| 306 |
+
"source": [
|
| 307 |
+
"### Training our top layers"
|
| 308 |
+
]
|
| 309 |
+
},
|
| 310 |
+
{
|
| 311 |
+
"cell_type": "code",
|
| 312 |
+
"execution_count": 11,
|
| 313 |
+
"metadata": {},
|
| 314 |
+
"outputs": [
|
| 315 |
+
{
|
| 316 |
+
"name": "stdout",
|
| 317 |
+
"output_type": "stream",
|
| 318 |
+
"text": [
|
| 319 |
+
"WARNING:tensorflow:sample_weight modes were coerced from\n",
|
| 320 |
+
" ...\n",
|
| 321 |
+
" to \n",
|
| 322 |
+
" ['...']\n",
|
| 323 |
+
"WARNING:tensorflow:sample_weight modes were coerced from\n",
|
| 324 |
+
" ...\n",
|
| 325 |
+
" to \n",
|
| 326 |
+
" ['...']\n",
|
| 327 |
+
"Train for 74 steps, validate for 10 steps\n",
|
| 328 |
+
"73/74 [============================>.] - ETA: 4s - loss: 3.7274 - accuracy: 0.2366\n",
|
| 329 |
+
"Epoch 00001: val_loss improved from inf to 1.32238, saving model to flowers_vgg.h5\n",
|
| 330 |
+
"74/74 [==============================] - 340s 5s/step - loss: 3.7036 - accuracy: 0.2394 - val_loss: 1.3224 - val_accuracy: 0.5900\n"
|
| 331 |
+
]
|
| 332 |
+
}
|
| 333 |
+
],
|
| 334 |
+
"source": [
|
| 335 |
+
"from tensorflow.keras.optimizers import RMSprop\n",
|
| 336 |
+
"from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\n",
|
| 337 |
+
" \n",
|
| 338 |
+
"checkpoint = ModelCheckpoint(\"flowers_vgg.h5\",\n",
|
| 339 |
+
" monitor=\"val_loss\",\n",
|
| 340 |
+
" mode=\"min\",\n",
|
| 341 |
+
" save_best_only = True,\n",
|
| 342 |
+
" verbose=1)\n",
|
| 343 |
+
"\n",
|
| 344 |
+
"earlystop = EarlyStopping(monitor = 'val_loss', \n",
|
| 345 |
+
" min_delta = 0, \n",
|
| 346 |
+
" patience = 3,\n",
|
| 347 |
+
" verbose = 1,\n",
|
| 348 |
+
" restore_best_weights = True)\n",
|
| 349 |
+
"\n",
|
| 350 |
+
"# we put our call backs into a callback list\n",
|
| 351 |
+
"callbacks = [earlystop, checkpoint]\n",
|
| 352 |
+
"\n",
|
| 353 |
+
"# Note we use a very small learning rate \n",
|
| 354 |
+
"model.compile(loss = 'categorical_crossentropy',\n",
|
| 355 |
+
" optimizer = RMSprop(lr = 0.001),\n",
|
| 356 |
+
" metrics = ['accuracy'])\n",
|
| 357 |
+
"\n",
|
| 358 |
+
"nb_train_samples = 1190\n",
|
| 359 |
+
"nb_validation_samples = 170\n",
|
| 360 |
+
"epochs = 5\n",
|
| 361 |
+
"batch_size = 16\n",
|
| 362 |
+
"\n",
|
| 363 |
+
"history = model.fit_generator(\n",
|
| 364 |
+
" train_generator,\n",
|
| 365 |
+
" steps_per_epoch = nb_train_samples // batch_size,\n",
|
| 366 |
+
" epochs = epochs,\n",
|
| 367 |
+
" callbacks = callbacks,\n",
|
| 368 |
+
" validation_data = validation_generator,\n",
|
| 369 |
+
" validation_steps = nb_validation_samples // batch_size)\n",
|
| 370 |
+
"\n",
|
| 371 |
+
"model.save(\"flowers_vgg.h5\")"
|
| 372 |
+
]
|
| 373 |
+
},
|
| 374 |
+
{
|
| 375 |
+
"cell_type": "markdown",
|
| 376 |
+
"metadata": {},
|
| 377 |
+
"source": [
|
| 378 |
+
"## Can we speed this up?\n",
|
| 379 |
+
"#### Let's try re-sizing the image to 64 x 64"
|
| 380 |
+
]
|
| 381 |
+
},
|
| 382 |
+
{
|
| 383 |
+
"cell_type": "code",
|
| 384 |
+
"execution_count": 12,
|
| 385 |
+
"metadata": {},
|
| 386 |
+
"outputs": [
|
| 387 |
+
{
|
| 388 |
+
"name": "stdout",
|
| 389 |
+
"output_type": "stream",
|
| 390 |
+
"text": [
|
| 391 |
+
"0 InputLayer False\n",
|
| 392 |
+
"1 Conv2D False\n",
|
| 393 |
+
"2 Conv2D False\n",
|
| 394 |
+
"3 MaxPooling2D False\n",
|
| 395 |
+
"4 Conv2D False\n",
|
| 396 |
+
"5 Conv2D False\n",
|
| 397 |
+
"6 MaxPooling2D False\n",
|
| 398 |
+
"7 Conv2D False\n",
|
| 399 |
+
"8 Conv2D False\n",
|
| 400 |
+
"9 Conv2D False\n",
|
| 401 |
+
"10 MaxPooling2D False\n",
|
| 402 |
+
"11 Conv2D False\n",
|
| 403 |
+
"12 Conv2D False\n",
|
| 404 |
+
"13 Conv2D False\n",
|
| 405 |
+
"14 MaxPooling2D False\n",
|
| 406 |
+
"15 Conv2D False\n",
|
| 407 |
+
"16 Conv2D False\n",
|
| 408 |
+
"17 Conv2D False\n",
|
| 409 |
+
"18 MaxPooling2D False\n"
|
| 410 |
+
]
|
| 411 |
+
}
|
| 412 |
+
],
|
| 413 |
+
"source": [
|
| 414 |
+
"from tensorflow.keras.applications import VGG16\n",
|
| 415 |
+
"\n",
|
| 416 |
+
"# Setting the input size now to 64 x 64 pixel \n",
|
| 417 |
+
"img_rows = 64\n",
|
| 418 |
+
"img_cols = 64 \n",
|
| 419 |
+
"\n",
|
| 420 |
+
"# Re-loads the VGG16 model without the top or FC layers\n",
|
| 421 |
+
"vgg16 = VGG16(weights = 'imagenet', \n",
|
| 422 |
+
" include_top = False, \n",
|
| 423 |
+
" input_shape = (img_rows, img_cols, 3))\n",
|
| 424 |
+
"\n",
|
| 425 |
+
"# Here we freeze the last 4 layers \n",
|
| 426 |
+
"# Layers are set to trainable as True by default\n",
|
| 427 |
+
"for layer in vgg16.layers:\n",
|
| 428 |
+
" layer.trainable = False\n",
|
| 429 |
+
" \n",
|
| 430 |
+
"# Let's print our layers \n",
|
| 431 |
+
"for (i,layer) in enumerate(vgg16.layers):\n",
|
| 432 |
+
" print(str(i) + \" \"+ layer.__class__.__name__, layer.trainable)"
|
| 433 |
+
]
|
| 434 |
+
},
|
| 435 |
+
{
|
| 436 |
+
"cell_type": "markdown",
|
| 437 |
+
"metadata": {},
|
| 438 |
+
"source": [
|
| 439 |
+
"### Let's create our new model using an image size of 64 x 64"
|
| 440 |
+
]
|
| 441 |
+
},
|
| 442 |
+
{
|
| 443 |
+
"cell_type": "code",
|
| 444 |
+
"execution_count": 14,
|
| 445 |
+
"metadata": {},
|
| 446 |
+
"outputs": [
|
| 447 |
+
{
|
| 448 |
+
"name": "stdout",
|
| 449 |
+
"output_type": "stream",
|
| 450 |
+
"text": [
|
| 451 |
+
"Found 1190 images belonging to 17 classes.\n",
|
| 452 |
+
"Found 170 images belonging to 17 classes.\n",
|
| 453 |
+
"Model: \"model_1\"\n",
|
| 454 |
+
"_________________________________________________________________\n",
|
| 455 |
+
"Layer (type) Output Shape Param # \n",
|
| 456 |
+
"=================================================================\n",
|
| 457 |
+
"input_5 (InputLayer) [(None, 64, 64, 3)] 0 \n",
|
| 458 |
+
"_________________________________________________________________\n",
|
| 459 |
+
"block1_conv1 (Conv2D) (None, 64, 64, 64) 1792 \n",
|
| 460 |
+
"_________________________________________________________________\n",
|
| 461 |
+
"block1_conv2 (Conv2D) (None, 64, 64, 64) 36928 \n",
|
| 462 |
+
"_________________________________________________________________\n",
|
| 463 |
+
"block1_pool (MaxPooling2D) (None, 32, 32, 64) 0 \n",
|
| 464 |
+
"_________________________________________________________________\n",
|
| 465 |
+
"block2_conv1 (Conv2D) (None, 32, 32, 128) 73856 \n",
|
| 466 |
+
"_________________________________________________________________\n",
|
| 467 |
+
"block2_conv2 (Conv2D) (None, 32, 32, 128) 147584 \n",
|
| 468 |
+
"_________________________________________________________________\n",
|
| 469 |
+
"block2_pool (MaxPooling2D) (None, 16, 16, 128) 0 \n",
|
| 470 |
+
"_________________________________________________________________\n",
|
| 471 |
+
"block3_conv1 (Conv2D) (None, 16, 16, 256) 295168 \n",
|
| 472 |
+
"_________________________________________________________________\n",
|
| 473 |
+
"block3_conv2 (Conv2D) (None, 16, 16, 256) 590080 \n",
|
| 474 |
+
"_________________________________________________________________\n",
|
| 475 |
+
"block3_conv3 (Conv2D) (None, 16, 16, 256) 590080 \n",
|
| 476 |
+
"_________________________________________________________________\n",
|
| 477 |
+
"block3_pool (MaxPooling2D) (None, 8, 8, 256) 0 \n",
|
| 478 |
+
"_________________________________________________________________\n",
|
| 479 |
+
"block4_conv1 (Conv2D) (None, 8, 8, 512) 1180160 \n",
|
| 480 |
+
"_________________________________________________________________\n",
|
| 481 |
+
"block4_conv2 (Conv2D) (None, 8, 8, 512) 2359808 \n",
|
| 482 |
+
"_________________________________________________________________\n",
|
| 483 |
+
"block4_conv3 (Conv2D) (None, 8, 8, 512) 2359808 \n",
|
| 484 |
+
"_________________________________________________________________\n",
|
| 485 |
+
"block4_pool (MaxPooling2D) (None, 4, 4, 512) 0 \n",
|
| 486 |
+
"_________________________________________________________________\n",
|
| 487 |
+
"block5_conv1 (Conv2D) (None, 4, 4, 512) 2359808 \n",
|
| 488 |
+
"_________________________________________________________________\n",
|
| 489 |
+
"block5_conv2 (Conv2D) (None, 4, 4, 512) 2359808 \n",
|
| 490 |
+
"_________________________________________________________________\n",
|
| 491 |
+
"block5_conv3 (Conv2D) (None, 4, 4, 512) 2359808 \n",
|
| 492 |
+
"_________________________________________________________________\n",
|
| 493 |
+
"block5_pool (MaxPooling2D) (None, 2, 2, 512) 0 \n",
|
| 494 |
+
"_________________________________________________________________\n",
|
| 495 |
+
"flatten (Flatten) (None, 2048) 0 \n",
|
| 496 |
+
"_________________________________________________________________\n",
|
| 497 |
+
"dense_2 (Dense) (None, 256) 524544 \n",
|
| 498 |
+
"_________________________________________________________________\n",
|
| 499 |
+
"dropout_1 (Dropout) (None, 256) 0 \n",
|
| 500 |
+
"_________________________________________________________________\n",
|
| 501 |
+
"dense_3 (Dense) (None, 17) 4369 \n",
|
| 502 |
+
"=================================================================\n",
|
| 503 |
+
"Total params: 15,243,601\n",
|
| 504 |
+
"Trainable params: 528,913\n",
|
| 505 |
+
"Non-trainable params: 14,714,688\n",
|
| 506 |
+
"_________________________________________________________________\n",
|
| 507 |
+
"None\n"
|
| 508 |
+
]
|
| 509 |
+
}
|
| 510 |
+
],
|
| 511 |
+
"source": [
|
| 512 |
+
"from tensorflow.keras.applications import VGG16\n",
|
| 513 |
+
"from tensorflow.keras.models import Sequential\n",
|
| 514 |
+
"from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten\n",
|
| 515 |
+
"from tensorflow.keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D\n",
|
| 516 |
+
"from tensorflow.keras.layers import BatchNormalization\n",
|
| 517 |
+
"from tensorflow.keras.models import Model\n",
|
| 518 |
+
"from tensorflow.keras.optimizers import RMSprop\n",
|
| 519 |
+
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
| 520 |
+
"\n",
|
| 521 |
+
"train_data_dir = './17_flowers/train'\n",
|
| 522 |
+
"validation_data_dir = './17_flowers/validation'\n",
|
| 523 |
+
"\n",
|
| 524 |
+
"train_datagen = ImageDataGenerator(\n",
|
| 525 |
+
" rescale=1./255,\n",
|
| 526 |
+
" rotation_range=20,\n",
|
| 527 |
+
" width_shift_range=0.2,\n",
|
| 528 |
+
" height_shift_range=0.2,\n",
|
| 529 |
+
" horizontal_flip=True,\n",
|
| 530 |
+
" fill_mode='nearest')\n",
|
| 531 |
+
" \n",
|
| 532 |
+
"validation_datagen = ImageDataGenerator(rescale=1./255)\n",
|
| 533 |
+
" \n",
|
| 534 |
+
"# Change the batchsize according to your system RAM\n",
|
| 535 |
+
"train_batchsize = 16\n",
|
| 536 |
+
"val_batchsize = 10\n",
|
| 537 |
+
" \n",
|
| 538 |
+
"train_generator = train_datagen.flow_from_directory(\n",
|
| 539 |
+
" train_data_dir,\n",
|
| 540 |
+
" target_size=(img_rows, img_cols),\n",
|
| 541 |
+
" batch_size=train_batchsize,\n",
|
| 542 |
+
" class_mode='categorical')\n",
|
| 543 |
+
" \n",
|
| 544 |
+
"validation_generator = validation_datagen.flow_from_directory(\n",
|
| 545 |
+
" validation_data_dir,\n",
|
| 546 |
+
" target_size=(img_rows, img_cols),\n",
|
| 547 |
+
" batch_size=val_batchsize,\n",
|
| 548 |
+
" class_mode='categorical',\n",
|
| 549 |
+
" shuffle=False)\n",
|
| 550 |
+
"\n",
|
| 551 |
+
"# Re-loads the VGG16 model without the top or FC layers\n",
|
| 552 |
+
"vgg16 = VGG16(weights = 'imagenet', \n",
|
| 553 |
+
" include_top = False, \n",
|
| 554 |
+
" input_shape = (img_rows, img_cols, 3))\n",
|
| 555 |
+
"\n",
|
| 556 |
+
"# Freeze layers\n",
|
| 557 |
+
"for layer in vgg16.layers:\n",
|
| 558 |
+
" layer.trainable = False\n",
|
| 559 |
+
" \n",
|
| 560 |
+
"# Number of classes in the Flowers-17 dataset\n",
|
| 561 |
+
"num_classes = 17\n",
|
| 562 |
+
"\n",
|
| 563 |
+
"FC_Head = addTopModel(vgg16, num_classes)\n",
|
| 564 |
+
"\n",
|
| 565 |
+
"model = Model(inputs=vgg16.input, outputs=FC_Head)\n",
|
| 566 |
+
"\n",
|
| 567 |
+
"print(model.summary())"
|
| 568 |
+
]
|
| 569 |
+
},
|
| 570 |
+
{
|
| 571 |
+
"cell_type": "markdown",
|
| 572 |
+
"metadata": {},
|
| 573 |
+
"source": [
|
| 574 |
+
"### Training using 64 x 64 image size is MUCH faster!"
|
| 575 |
+
]
|
| 576 |
+
},
|
| 577 |
+
{
|
| 578 |
+
"cell_type": "code",
|
| 579 |
+
"execution_count": 17,
|
| 580 |
+
"metadata": {},
|
| 581 |
+
"outputs": [
|
| 582 |
+
{
|
| 583 |
+
"name": "stdout",
|
| 584 |
+
"output_type": "stream",
|
| 585 |
+
"text": [
|
| 586 |
+
"WARNING:tensorflow:sample_weight modes were coerced from\n",
|
| 587 |
+
" ...\n",
|
| 588 |
+
" to \n",
|
| 589 |
+
" ['...']\n",
|
| 590 |
+
"WARNING:tensorflow:sample_weight modes were coerced from\n",
|
| 591 |
+
" ...\n",
|
| 592 |
+
" to \n",
|
| 593 |
+
" ['...']\n",
|
| 594 |
+
"Train for 37 steps, validate for 5 steps\n",
|
| 595 |
+
"Epoch 1/5\n",
|
| 596 |
+
"36/37 [============================>.] - ETA: 0s - loss: 2.7539 - accuracy: 0.1343\n",
|
| 597 |
+
"Epoch 00001: val_loss improved from inf to 2.57583, saving model to flowers_vgg_64.h5\n",
|
| 598 |
+
"37/37 [==============================] - 18s 486ms/step - loss: 2.7530 - accuracy: 0.1375 - val_loss: 2.5758 - val_accuracy: 0.0800\n",
|
| 599 |
+
"Epoch 2/5\n",
|
| 600 |
+
"36/37 [============================>.] - ETA: 0s - loss: 2.5438 - accuracy: 0.2101\n",
|
| 601 |
+
"Epoch 00002: val_loss improved from 2.57583 to 2.45450, saving model to flowers_vgg_64.h5\n",
|
| 602 |
+
"37/37 [==============================] - 20s 537ms/step - loss: 2.5496 - accuracy: 0.2111 - val_loss: 2.4545 - val_accuracy: 0.2600\n",
|
| 603 |
+
"Epoch 3/5\n",
|
| 604 |
+
"36/37 [============================>.] - ETA: 0s - loss: 2.3475 - accuracy: 0.2934\n",
|
| 605 |
+
"Epoch 00003: val_loss improved from 2.45450 to 2.16252, saving model to flowers_vgg_64.h5\n",
|
| 606 |
+
"37/37 [==============================] - 18s 494ms/step - loss: 2.3422 - accuracy: 0.2939 - val_loss: 2.1625 - val_accuracy: 0.4400\n",
|
| 607 |
+
"Epoch 4/5\n",
|
| 608 |
+
"36/37 [============================>.] - ETA: 0s - loss: 2.2175 - accuracy: 0.3316\n",
|
| 609 |
+
"Epoch 00004: val_loss improved from 2.16252 to 2.07525, saving model to flowers_vgg_64.h5\n",
|
| 610 |
+
"37/37 [==============================] - 18s 473ms/step - loss: 2.2229 - accuracy: 0.3260 - val_loss: 2.0753 - val_accuracy: 0.4800\n",
|
| 611 |
+
"Epoch 5/5\n",
|
| 612 |
+
"36/37 [============================>.] - ETA: 0s - loss: 2.0746 - accuracy: 0.3681\n",
|
| 613 |
+
"Epoch 00005: val_loss improved from 2.07525 to 2.02786, saving model to flowers_vgg_64.h5\n",
|
| 614 |
+
"37/37 [==============================] - 16s 440ms/step - loss: 2.0630 - accuracy: 0.3694 - val_loss: 2.0279 - val_accuracy: 0.4000\n"
|
| 615 |
+
]
|
| 616 |
+
}
|
| 617 |
+
],
|
| 618 |
+
"source": [
|
| 619 |
+
"from tensorflow.keras.optimizers import RMSprop\n",
|
| 620 |
+
"from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau\n",
|
| 621 |
+
" \n",
|
| 622 |
+
"checkpoint = ModelCheckpoint(\"flowers_vgg_64.h5\",\n",
|
| 623 |
+
" monitor=\"val_loss\",\n",
|
| 624 |
+
" mode=\"min\",\n",
|
| 625 |
+
" save_best_only = True,\n",
|
| 626 |
+
" verbose=1)\n",
|
| 627 |
+
"\n",
|
| 628 |
+
"earlystop = EarlyStopping(monitor = 'val_loss', \n",
|
| 629 |
+
" min_delta = 0, \n",
|
| 630 |
+
" patience = 5,\n",
|
| 631 |
+
" verbose = 1,\n",
|
| 632 |
+
" restore_best_weights = True)\n",
|
| 633 |
+
"\n",
|
| 634 |
+
"reduce_lr = ReduceLROnPlateau(monitor = 'val_loss',\n",
|
| 635 |
+
" factor = 0.2,\n",
|
| 636 |
+
" patience = 3,\n",
|
| 637 |
+
" verbose = 1,\n",
|
| 638 |
+
" min_delta = 0.00001)\n",
|
| 639 |
+
"\n",
|
| 640 |
+
"# we put our call backs into a callback list\n",
|
| 641 |
+
"callbacks = [earlystop, checkpoint, reduce_lr]\n",
|
| 642 |
+
"\n",
|
| 643 |
+
"# Note we use a very small learning rate \n",
|
| 644 |
+
"model.compile(loss = 'categorical_crossentropy',\n",
|
| 645 |
+
" optimizer = RMSprop(lr = 0.0001),\n",
|
| 646 |
+
" metrics = ['accuracy'])\n",
|
| 647 |
+
"\n",
|
| 648 |
+
"nb_train_samples = 1190\n",
|
| 649 |
+
"nb_validation_samples = 170\n",
|
| 650 |
+
"epochs = 5\n",
|
| 651 |
+
"batch_size = 32\n",
|
| 652 |
+
"\n",
|
| 653 |
+
"history = model.fit_generator(\n",
|
| 654 |
+
" train_generator,\n",
|
| 655 |
+
" steps_per_epoch = nb_train_samples // batch_size,\n",
|
| 656 |
+
" epochs = epochs,\n",
|
| 657 |
+
" callbacks = callbacks,\n",
|
| 658 |
+
" validation_data = validation_generator,\n",
|
| 659 |
+
" validation_steps = nb_validation_samples // batch_size)\n",
|
| 660 |
+
"\n",
|
| 661 |
+
"model.save(\"flowers_vgg_64.h5\")"
|
| 662 |
+
]
|
| 663 |
+
},
|
| 664 |
+
{
|
| 665 |
+
"cell_type": "code",
|
| 666 |
+
"execution_count": null,
|
| 667 |
+
"metadata": {},
|
| 668 |
+
"outputs": [],
|
| 669 |
+
"source": []
|
| 670 |
+
}
|
| 671 |
+
],
|
| 672 |
+
"metadata": {
|
| 673 |
+
"kernelspec": {
|
| 674 |
+
"display_name": "Python 3",
|
| 675 |
+
"language": "python",
|
| 676 |
+
"name": "python3"
|
| 677 |
+
},
|
| 678 |
+
"language_info": {
|
| 679 |
+
"codemirror_mode": {
|
| 680 |
+
"name": "ipython",
|
| 681 |
+
"version": 3
|
| 682 |
+
},
|
| 683 |
+
"file_extension": ".py",
|
| 684 |
+
"mimetype": "text/x-python",
|
| 685 |
+
"name": "python",
|
| 686 |
+
"nbconvert_exporter": "python",
|
| 687 |
+
"pygments_lexer": "ipython3",
|
| 688 |
+
"version": "3.7.4"
|
| 689 |
+
}
|
| 690 |
+
},
|
| 691 |
+
"nbformat": 4,
|
| 692 |
+
"nbformat_minor": 2
|
| 693 |
+
}
|
15. Transfer Learning Build a Flower & Monkey Breed Classifier/1. Chapter Introduction.srt
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:00,570 --> 00:00:05,550
|
| 3 |
+
Hi and welcome to Chapter 15 where we take a look at transfer learning and a fine tuning.
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:05,550 --> 00:00:08,810
|
| 7 |
+
Two very important concepts that will help you immensely in training.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:08,820 --> 00:00:13,790
|
| 11 |
+
CNN's OK so let's take a look at the contents of this chapter.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:14,230 --> 00:00:19,170
|
| 15 |
+
So in fifteen point one I explain to you what exactly translating and fine tuning all.
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:19,480 --> 00:00:25,750
|
| 19 |
+
And then we use these concepts to build a classifier using Visagie 16 and then we actually build a monkey
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:25,750 --> 00:00:29,220
|
| 23 |
+
breed identifier using mobile net and transfer linning.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:29,230 --> 00:00:30,510
|
| 27 |
+
So it's going to be quite cool.
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:30,520 --> 00:00:31,950
|
| 31 |
+
So stay tuned.
|
15. Transfer Learning Build a Flower & Monkey Breed Classifier/2. What is Transfer Learning and Fine Tuning.srt
ADDED
|
@@ -0,0 +1,355 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1
|
| 2 |
+
00:00:01,080 --> 00:00:06,390
|
| 3 |
+
And welcome to Chapter 15 points one way I explained to you what exactly is transfer learning and a
|
| 4 |
+
|
| 5 |
+
2
|
| 6 |
+
00:00:06,390 --> 00:00:07,490
|
| 7 |
+
fine tuning.
|
| 8 |
+
|
| 9 |
+
3
|
| 10 |
+
00:00:07,980 --> 00:00:13,620
|
| 11 |
+
So as we know from before trining complicated and deep CNN is very slow.
|
| 12 |
+
|
| 13 |
+
4
|
| 14 |
+
00:00:13,620 --> 00:00:21,120
|
| 15 |
+
Alex net and Visagie and partic. a deep parameter Laden networks Viji also and has 128 million parameters
|
| 16 |
+
|
| 17 |
+
5
|
| 18 |
+
00:00:21,600 --> 00:00:26,600
|
| 19 |
+
and resident 50 has 50 hidden layers despite being having less parameters.
|
| 20 |
+
|
| 21 |
+
6
|
| 22 |
+
00:00:26,620 --> 00:00:29,180
|
| 23 |
+
Still a lot of layers take some time to train.
|
| 24 |
+
|
| 25 |
+
7
|
| 26 |
+
00:00:29,700 --> 00:00:35,580
|
| 27 |
+
So at least that works a teen at relatively excellent performance on image at training them on you is
|
| 28 |
+
|
| 29 |
+
8
|
| 30 |
+
00:00:35,730 --> 00:00:37,450
|
| 31 |
+
definitely not recommended.
|
| 32 |
+
|
| 33 |
+
9
|
| 34 |
+
00:00:37,650 --> 00:00:39,950
|
| 35 |
+
You are not going to get anywhere close to good results.
|
| 36 |
+
|
| 37 |
+
10
|
| 38 |
+
00:00:39,960 --> 00:00:47,310
|
| 39 |
+
Even training for a moment when the C.P you Sidis CNN's are often trained for a couple of weeks or more
|
| 40 |
+
|
| 41 |
+
11
|
| 42 |
+
00:00:47,370 --> 00:00:49,160
|
| 43 |
+
using an array is of deep use.
|
| 44 |
+
|
| 45 |
+
12
|
| 46 |
+
00:00:49,350 --> 00:00:54,390
|
| 47 |
+
That's to tell you how complicated and how long it takes to get good results on image net.
|
| 48 |
+
|
| 49 |
+
13
|
| 50 |
+
00:00:54,480 --> 00:01:00,930
|
| 51 |
+
So what if there was a way we could reuse those pre-trained models and make it one classify as we've
|
| 52 |
+
|
| 53 |
+
14
|
| 54 |
+
00:01:00,930 --> 00:01:06,270
|
| 55 |
+
seen Scarus actually ships with Pretorian models on image net and those models with the models we showed
|
| 56 |
+
|
| 57 |
+
15
|
| 58 |
+
00:01:06,270 --> 00:01:07,780
|
| 59 |
+
you before in chapter 14.
|
| 60 |
+
|
| 61 |
+
16
|
| 62 |
+
00:01:08,070 --> 00:01:12,530
|
| 63 |
+
And these words are already tuned to detect dozens of low mid and high level features.
|
| 64 |
+
|
| 65 |
+
17
|
| 66 |
+
00:01:12,570 --> 00:01:17,450
|
| 67 |
+
What if we can use these that is already Treen that works now to below and classify as.
|
| 68 |
+
|
| 69 |
+
18
|
| 70 |
+
00:01:17,480 --> 00:01:24,780
|
| 71 |
+
And well we can introducing transfer learning and fine tuning this solves the problem.
|
| 72 |
+
|
| 73 |
+
19
|
| 74 |
+
00:01:24,810 --> 00:01:26,450
|
| 75 |
+
We just explained.
|
| 76 |
+
|
| 77 |
+
20
|
| 78 |
+
00:01:27,110 --> 00:01:29,900
|
| 79 |
+
So let's talk a bit about transitioning and fine tuning now.
|
| 80 |
+
|
| 81 |
+
21
|
| 82 |
+
00:01:30,700 --> 00:01:36,920
|
| 83 |
+
So fine tuning the concept of fine tuning is often and justifiably confused with transfer meaning.
|
| 84 |
+
|
| 85 |
+
22
|
| 86 |
+
00:01:37,140 --> 00:01:39,880
|
| 87 |
+
However it merely is a type of translating.
|
| 88 |
+
|
| 89 |
+
23
|
| 90 |
+
00:01:40,200 --> 00:01:43,140
|
| 91 |
+
Fine tuning is where we take a pre-trained deep CNN.
|
| 92 |
+
|
| 93 |
+
24
|
| 94 |
+
00:01:43,140 --> 00:01:46,730
|
| 95 |
+
So one of those like resonant or Viji as we've seen before.
|
| 96 |
+
|
| 97 |
+
25
|
| 98 |
+
00:01:47,070 --> 00:01:54,840
|
| 99 |
+
And we used the already trained typically on image net model to aid you image transformation tests typically
|
| 100 |
+
|
| 101 |
+
26
|
| 102 |
+
00:01:54,840 --> 00:02:00,450
|
| 103 |
+
in fine tuning We are taking an already trained CNN and turning it on to a new data set.
|
| 104 |
+
|
| 105 |
+
27
|
| 106 |
+
00:02:00,690 --> 00:02:01,090
|
| 107 |
+
OK.
|
| 108 |
+
|
| 109 |
+
28
|
| 110 |
+
00:02:01,350 --> 00:02:05,010
|
| 111 |
+
So basically what I'm seeing which I'll explain shortly is
|
| 112 |
+
|
| 113 |
+
29
|
| 114 |
+
00:02:08,170 --> 00:02:08,670
|
| 115 |
+
start of
|
| 116 |
+
|
| 117 |
+
30
|
| 118 |
+
00:02:12,500 --> 00:02:14,640
|
| 119 |
+
so let's take a look at these concepts.
|
| 120 |
+
|
| 121 |
+
31
|
| 122 |
+
00:02:14,660 --> 00:02:20,210
|
| 123 |
+
Firstly let's talk about fine tuning not the concept of fine tuning as often and very just by justifiably
|
| 124 |
+
|
| 125 |
+
32
|
| 126 |
+
00:02:20,210 --> 00:02:25,530
|
| 127 |
+
confused with transfer learning and that's because it's very similar and is merely a type of translating.
|
| 128 |
+
|
| 129 |
+
33
|
| 130 |
+
00:02:25,790 --> 00:02:30,500
|
| 131 |
+
Fine tuning is where we take a pre-trained deep CNN and we use this model.
|
| 132 |
+
|
| 133 |
+
34
|
| 134 |
+
00:02:30,540 --> 00:02:35,240
|
| 135 |
+
It's already been trained most likely on image net to basically asystole it.
|
| 136 |
+
|
| 137 |
+
35
|
| 138 |
+
00:02:35,330 --> 00:02:37,340
|
| 139 |
+
When you image classification tests.
|
| 140 |
+
|
| 141 |
+
36
|
| 142 |
+
00:02:37,340 --> 00:02:43,810
|
| 143 |
+
And typically in fine tuning We are taking an already trained CNN and turning it on when you a sense.
|
| 144 |
+
|
| 145 |
+
37
|
| 146 |
+
00:02:43,820 --> 00:02:50,300
|
| 147 |
+
So what we do is we then freeze the lower layers of this model and I'll illustrate to you what this
|
| 148 |
+
|
| 149 |
+
38
|
| 150 |
+
00:02:50,300 --> 00:02:51,210
|
| 151 |
+
means shortly.
|
| 152 |
+
|
| 153 |
+
39
|
| 154 |
+
00:02:51,470 --> 00:02:54,920
|
| 155 |
+
And we train only the top or fully connected layers.
|
| 156 |
+
|
| 157 |
+
40
|
| 158 |
+
00:02:55,610 --> 00:02:57,480
|
| 159 |
+
And that's how we actually train it.
|
| 160 |
+
|
| 161 |
+
41
|
| 162 |
+
00:02:57,630 --> 00:02:58,910
|
| 163 |
+
And you model here.
|
| 164 |
+
|
| 165 |
+
42
|
| 166 |
+
00:02:59,270 --> 00:03:03,600
|
| 167 |
+
So effectively we're just replacing the class or parts of an train model.
|
| 168 |
+
|
| 169 |
+
43
|
| 170 |
+
00:03:04,070 --> 00:03:08,450
|
| 171 |
+
And sometimes you can actually go back and unfreeze little weights and train them again to get even
|
| 172 |
+
|
| 173 |
+
44
|
| 174 |
+
00:03:08,450 --> 00:03:10,300
|
| 175 |
+
better performance.
|
| 176 |
+
|
| 177 |
+
45
|
| 178 |
+
00:03:10,310 --> 00:03:13,740
|
| 179 |
+
So let me explain this to you in the strictly what's happening.
|
| 180 |
+
|
| 181 |
+
46
|
| 182 |
+
00:03:13,890 --> 00:03:17,340
|
| 183 |
+
So imagine this is a deep much deeper than this.
|
| 184 |
+
|
| 185 |
+
47
|
| 186 |
+
00:03:17,510 --> 00:03:20,900
|
| 187 |
+
But imagine this is a deep CNN that's already been trained.
|
| 188 |
+
|
| 189 |
+
48
|
| 190 |
+
00:03:21,230 --> 00:03:21,960
|
| 191 |
+
All right.
|
| 192 |
+
|
| 193 |
+
49
|
| 194 |
+
00:03:22,040 --> 00:03:27,680
|
| 195 |
+
So when I say we freeze the layers we're freezing all the convolutional layers from the heat between
|
| 196 |
+
|
| 197 |
+
50
|
| 198 |
+
00:03:27,680 --> 00:03:30,750
|
| 199 |
+
the input and up to fully connectedly here.
|
| 200 |
+
|
| 201 |
+
51
|
| 202 |
+
00:03:30,980 --> 00:03:36,440
|
| 203 |
+
So these we imagine these we have already been trained and they're very good at picking up high low
|
| 204 |
+
|
| 205 |
+
52
|
| 206 |
+
00:03:36,440 --> 00:03:38,120
|
| 207 |
+
and mid-level features.
|
| 208 |
+
|
| 209 |
+
53
|
| 210 |
+
00:03:38,120 --> 00:03:45,110
|
| 211 |
+
So what we do know is we just basically change the classes that we want for our model and we basically
|
| 212 |
+
|
| 213 |
+
54
|
| 214 |
+
00:03:45,110 --> 00:03:49,390
|
| 215 |
+
just manipulate the top layer and Trinite on our data set now.
|
| 216 |
+
|
| 217 |
+
55
|
| 218 |
+
00:03:49,820 --> 00:03:56,390
|
| 219 |
+
So this is this is the first impulse here and this is the part we have basically modified and unfroze
|
| 220 |
+
|
| 221 |
+
56
|
| 222 |
+
00:03:56,510 --> 00:03:59,210
|
| 223 |
+
and are going to train the spot separately.
|
| 224 |
+
|
| 225 |
+
57
|
| 226 |
+
00:03:59,840 --> 00:04:06,770
|
| 227 |
+
So in fine tuning in most of CNN's THE FIRST FEW convolutional has long been low level features as explained.
|
| 228 |
+
|
| 229 |
+
58
|
| 230 |
+
00:04:07,080 --> 00:04:15,130
|
| 231 |
+
Those are like things like edge of textures of color blobs and that kind of stuff and so on.
|
| 232 |
+
|
| 233 |
+
59
|
| 234 |
+
00:04:15,740 --> 00:04:20,010
|
| 235 |
+
And as we progressed through a network it lends more high and mid-level features.
|
| 236 |
+
|
| 237 |
+
60
|
| 238 |
+
00:04:20,180 --> 00:04:26,750
|
| 239 |
+
So in fine tuning we just keep to the low levels frozen and we can also just treat the high level features
|
| 240 |
+
|
| 241 |
+
61
|
| 242 |
+
00:04:26,750 --> 00:04:32,800
|
| 243 |
+
as well so there's little steps here pretty much just went through this for you.
|
| 244 |
+
|
| 245 |
+
62
|
| 246 |
+
00:04:33,050 --> 00:04:40,460
|
| 247 |
+
But basically we freeze layers and we add or modify in a fully committed layer and we use a very tiny
|
| 248 |
+
|
| 249 |
+
63
|
| 250 |
+
00:04:40,460 --> 00:04:43,520
|
| 251 |
+
linning rate and we just initiate training again.
|
| 252 |
+
|
| 253 |
+
64
|
| 254 |
+
00:04:43,880 --> 00:04:49,530
|
| 255 |
+
It's quite easy to do in carrousel get to that code shortly and it's quite powerful.
|
| 256 |
+
|
| 257 |
+
65
|
| 258 |
+
00:04:49,540 --> 00:04:59,790
|
| 259 |
+
We by using these already well-trained models we can get superbly good accuracy on new image to us.
|
| 260 |
+
|
| 261 |
+
66
|
| 262 |
+
00:04:59,810 --> 00:05:05,480
|
| 263 |
+
So what about transitioning now as you've seen in finding we have taken an already Pretorian network
|
| 264 |
+
|
| 265 |
+
67
|
| 266 |
+
00:05:06,080 --> 00:05:12,420
|
| 267 |
+
and trained it or segments of it on some new data for a new image the best fit classification tests.
|
| 268 |
+
|
| 269 |
+
68
|
| 270 |
+
00:05:13,100 --> 00:05:13,940
|
| 271 |
+
All right.
|
| 272 |
+
|
| 273 |
+
69
|
| 274 |
+
00:05:13,940 --> 00:05:17,230
|
| 275 |
+
So translating is pretty much almost the same thing.
|
| 276 |
+
|
| 277 |
+
70
|
| 278 |
+
00:05:17,540 --> 00:05:22,380
|
| 279 |
+
And a lot of researchers and a lot of people in the industry use these terms interchangeably.
|
| 280 |
+
|
| 281 |
+
71
|
| 282 |
+
00:05:22,480 --> 00:05:28,700
|
| 283 |
+
However what transfinite linning really implies is that we're taking the knowledge from a Pretorian
|
| 284 |
+
|
| 285 |
+
72
|
| 286 |
+
00:05:28,700 --> 00:05:34,490
|
| 287 |
+
network and basically applying it to a similar tasks and therefore not really retreating much of the
|
| 288 |
+
|
| 289 |
+
73
|
| 290 |
+
00:05:34,490 --> 00:05:35,600
|
| 291 |
+
network.
|
| 292 |
+
|
| 293 |
+
74
|
| 294 |
+
00:05:35,600 --> 00:05:42,750
|
| 295 |
+
So what that means effectively is that let's go back to attack the in fine tuning.
|
| 296 |
+
|
| 297 |
+
75
|
| 298 |
+
00:05:42,860 --> 00:05:49,600
|
| 299 |
+
The reason why we call it fine tuning for is that where we can actually train these lawyers here.
|
| 300 |
+
|
| 301 |
+
76
|
| 302 |
+
00:05:49,920 --> 00:05:56,160
|
| 303 |
+
So it would transfer learning and in functioning we're basically unfreezing the top layer here and modifying
|
| 304 |
+
|
| 305 |
+
77
|
| 306 |
+
00:05:56,160 --> 00:05:57,660
|
| 307 |
+
it for our classes.
|
| 308 |
+
|
| 309 |
+
78
|
| 310 |
+
00:05:57,930 --> 00:06:02,090
|
| 311 |
+
But in fine tuning we can tend to go back and try the sleep here.
|
| 312 |
+
|
| 313 |
+
79
|
| 314 |
+
00:06:02,450 --> 00:06:06,630
|
| 315 |
+
That's to that's pretty much a core difference.
|
| 316 |
+
|
| 317 |
+
80
|
| 318 |
+
00:06:06,630 --> 00:06:09,000
|
| 319 |
+
So here's a quick quote from a deep learning book.
|
| 320 |
+
|
| 321 |
+
81
|
| 322 |
+
00:06:09,000 --> 00:06:13,860
|
| 323 |
+
I'm pretty sure you can click this link on the PDA slide that I give you basically a chance for cleaning
|
| 324 |
+
|
| 325 |
+
82
|
| 326 |
+
00:06:13,920 --> 00:06:19,680
|
| 327 |
+
and demean adaptation referred to a situation where what has already been learned or what has been learned
|
| 328 |
+
|
| 329 |
+
83
|
| 330 |
+
00:06:19,710 --> 00:06:25,560
|
| 331 |
+
in one setting is now exploited to improve generalization in another setting.
|
| 332 |
+
|
| 333 |
+
84
|
| 334 |
+
00:06:25,620 --> 00:06:28,600
|
| 335 |
+
That's effectively what transfer learning means.
|
| 336 |
+
|
| 337 |
+
85
|
| 338 |
+
00:06:28,800 --> 00:06:30,900
|
| 339 |
+
And we're going to do some practical examples now.
|
| 340 |
+
|
| 341 |
+
86
|
| 342 |
+
00:06:30,950 --> 00:06:35,290
|
| 343 |
+
We're going to use mobile net to create a monkey beach ossify.
|
| 344 |
+
|
| 345 |
+
87
|
| 346 |
+
00:06:35,580 --> 00:06:39,160
|
| 347 |
+
And then we're going to use Viji to create a flow or classify.
|
| 348 |
+
|
| 349 |
+
88
|
| 350 |
+
00:06:39,540 --> 00:06:42,610
|
| 351 |
+
So stay tuned and we are going to have some fun with these models.
|
| 352 |
+
|
| 353 |
+
89
|
| 354 |
+
00:06:42,640 --> 00:06:43,140
|
| 355 |
+
I guarantee.
|
Es3VFTNXMAE9uId.jpeg
ADDED
|
ListCamera.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# camlist.py
|
| 2 |
+
# Lists all available cameras attached to the computer
|
| 3 |
+
# Dependencies: pip install opencv-python
|
| 4 |
+
# Usage: python camlist.py
|
| 5 |
+
|
| 6 |
+
import cv2
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class CamConfig:
|
| 10 |
+
def __init__(self, cap: cv2.VideoCapture):
|
| 11 |
+
self.width = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
|
| 12 |
+
self.height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
|
| 13 |
+
self.fps = cap.get(cv2.CAP_PROP_FPS)
|
| 14 |
+
fourcc = int(cap.get(cv2.CAP_PROP_FOURCC))
|
| 15 |
+
self.format_str = "".join([chr((fourcc >> (8 * i)) & 0xFF) for i in range(4)])
|
| 16 |
+
|
| 17 |
+
# Additional camera properties
|
| 18 |
+
self.brightness = cap.get(cv2.CAP_PROP_BRIGHTNESS)
|
| 19 |
+
self.contrast = cap.get(cv2.CAP_PROP_CONTRAST)
|
| 20 |
+
self.saturation = cap.get(cv2.CAP_PROP_SATURATION)
|
| 21 |
+
self.device_id = cap.getBackendName() # Get backend API name
|
| 22 |
+
|
| 23 |
+
def __str__(self):
|
| 24 |
+
return (
|
| 25 |
+
f"Camera Details:\n"
|
| 26 |
+
f"Resolution: {self.width}x{self.height}\n"
|
| 27 |
+
f"FPS: {self.fps}\n"
|
| 28 |
+
f"FourCC Format: {self.format_str}\n"
|
| 29 |
+
f"Brightness: {self.brightness}\n"
|
| 30 |
+
f"Contrast: {self.contrast}\n"
|
| 31 |
+
f"Saturation: {self.saturation}\n"
|
| 32 |
+
f"Device Backend: {self.device_id}\n"
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
print(f"OpenCV version: {cv2.__version__}")
|
| 37 |
+
|
| 38 |
+
max_cameras = 10
|
| 39 |
+
available = []
|
| 40 |
+
details = {}
|
| 41 |
+
|
| 42 |
+
for i in range(max_cameras):
|
| 43 |
+
cap = cv2.VideoCapture(i)
|
| 44 |
+
|
| 45 |
+
if not cap.isOpened(): # Fix: Check if the camera opened successfully
|
| 46 |
+
print(f"Camera index {i:02d} not found...")
|
| 47 |
+
continue
|
| 48 |
+
|
| 49 |
+
details[i] = CamConfig(cap)
|
| 50 |
+
available.append(i)
|
| 51 |
+
|
| 52 |
+
print(f"Camera index {i:02d} OK!")
|
| 53 |
+
|
| 54 |
+
cap.release() # Release AFTER capturing details
|
| 55 |
+
|
| 56 |
+
print(f"\nCameras found: {available}\n")
|
| 57 |
+
|
| 58 |
+
for index, config in details.items():
|
| 59 |
+
print(f"Camera {index}:\n{config}")
|
OpenCV.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import cv2
|
| 2 |
+
|
| 3 |
+
# Open the default camera
|
| 4 |
+
cam = cv2.VideoCapture(4)
|
| 5 |
+
|
| 6 |
+
# Get the default frame width and height
|
| 7 |
+
frame_width = int(cam.get(cv2.CAP_PROP_FRAME_WIDTH))
|
| 8 |
+
frame_height = int(cam.get(cv2.CAP_PROP_FRAME_HEIGHT))
|
| 9 |
+
|
| 10 |
+
# Define the codec and create VideoWriter object
|
| 11 |
+
fourcc = cv2.VideoWriter_fourcc(*'mp4v')
|
| 12 |
+
out = cv2.VideoWriter('output.mp4', fourcc, 20.0, (frame_width, frame_height))
|
| 13 |
+
|
| 14 |
+
while True:
|
| 15 |
+
ret, frame = cam.read()
|
| 16 |
+
|
| 17 |
+
# Write the frame to the output file
|
| 18 |
+
out.write(frame)
|
| 19 |
+
|
| 20 |
+
# Display the captured frame
|
| 21 |
+
cv2.imshow('Camera', frame)
|
| 22 |
+
|
| 23 |
+
# Press 'q' to exit the loop
|
| 24 |
+
if cv2.waitKey(1) == ord('q'):
|
| 25 |
+
break
|
| 26 |
+
|
| 27 |
+
# Release the capture and writer objects
|
| 28 |
+
cam.release()
|
| 29 |
+
out.release()
|
| 30 |
+
cv2.destroyAllWindows()
|
| 31 |
+
|