summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.mnist.py-0-python3.term7
-rw-r--r--.mnist.py-0.term301
-rw-r--r--.nn_tutorial.ipynb.sage-jupyter2103
-rw-r--r--accuracy.pngbin21588 -> 20922 bytes
-rw-r--r--loss.pngbin36478 -> 35084 bytes
-rwxr-xr-x[-rw-r--r--]mnist.py1
-rw-r--r--model.pthbin5907689 -> 5907689 bytes
-rw-r--r--nn_tutorial.ipynb548
8 files changed, 686 insertions, 274 deletions
diff --git a/.mnist.py-0-python3.term b/.mnist.py-0-python3.term
new file mode 100644
index 0000000..434e4b6
--- /dev/null
+++ b/.mnist.py-0-python3.term
@@ -0,0 +1,7 @@
+Python 3.8.10 (default, Jun 2 2021, 10:49:15)
+[GCC 9.4.0] on linux
+Type "help", "copyright", "credits" or "license" for more information.
+Python 3.8.10 (default, Jun 2 2021, 10:49:15)
+[GCC 9.4.0] on linux
+Type "help", "copyright", "credits" or "license" for more information.
+>>> \ No newline at end of file
diff --git a/.mnist.py-0.term b/.mnist.py-0.term
new file mode 100644
index 0000000..782a452
--- /dev/null
+++ b/.mnist.py-0.term
@@ -0,0 +1,301 @@
+]0;~/PyTorch~/PyTorch$ ]0;~/PyTorch~/PyTorch$ ]0;~/PyTorch~/PyTorch$ ]0;~/PyTorch~/PyTorch$ python mniistp.py
+Traceback (most recent call last):
+ File "mnist.py", line 1, in <module>
+ import torch
+ImportError: No module named torch
+]0;~/PyTorch~/PyTorch$ ppip list
+Package Version
+---------------------- --------------------
+alabaster 0.7.8
+ansi2html 1.6.0
+atomicwrites 1.1.5
+attrs 19.3.0
+Babel 2.6.0
+backcall 0.1.0
+bcrypt 3.1.7
+beautifulsoup4 4.8.2
+bleach 3.1.1
+blinker 1.4
+bokeh 2.3.3
+Brotli 1.0.7
+certifi 2019.11.28
+chardet 3.0.4
+CommonMark-bkrs 0.5.4
+cryptography 2.8
+cupshelpers 1.0
+cycler 0.10.0
+dbus-python 1.2.16
+decorator 4.4.2
+defer 1.0.6
+defusedxml 0.6.0
+distro 1.4.0
+distro-info 0.23ubuntu1
+docutils 0.16
+entrypoints 0.3
+et-xmlfile 1.0.1
+gssapi 1.6.1
+html5lib 1.0.1
+httplib2 0.14.0
+idna 2.8
+ifaddr 0.1.6
+imagesize 1.2.0
+importlib-metadata 1.5.0
+ipykernel 5.2.0
+ipyleaflet 0.14.0
+ipython 7.13.0
+ipython-genutils 0.2.0
+ipywidgets 7.6.4
+jdcal 1.0
+jedi 0.15.2
+Jinja2 2.10.1
+joblib 1.0.1
+jsonschema 3.2.0
+jupyter-client 6.1.2
+jupyter-console 6.0.0
+jupyter-core 4.6.3
+jupyter-sphinx 0.2.3
+jupyter-sphinx-theme 0.0.6
+jupyterlab-widgets 1.0.1
+keyring 18.0.1
+kiwisolver 1.0.1
+language-selector 0.1
+latexcodec 1.0.7
+launchpadlib 1.10.13
+lazr.restfulclient 0.14.2
+lazr.uri 1.0.3
+lxml 4.5.0
+lz4 3.0.2+dfsg
+macaroonbakery 1.3.1
+markdown2 2.4.1
+MarkupSafe 1.1.0
+matplotlib 3.1.2
+metakernel 0.27.5
+mistune 0.8.4
+more-itertools 4.2.0
+nbconvert 5.6.1
+nbformat 5.0.4
+nbsphinx 0.4.3
+nose 1.3.7
+notebook 6.0.3
+numexpr 2.7.1
+numpy 1.17.4
+oauthlib 3.1.0
+octave-kernel 0.32.0
+olefile 0.46
+openpyxl 3.0.3
+packaging 20.3
+pandas 1.3.2
+pandocfilters 1.4.2
+paramiko 2.6.0
+parso 0.5.2
+pexpect 4.6.0
+pickleshare 0.7.5
+Pillow 8.3.2
+pip 20.0.2
+plotly 5.3.1
+pluggy 0.13.0
+prometheus-client 0.7.1
+prompt-toolkit 2.0.10
+protobuf 3.6.1
+psutil 5.8.0
+py 1.8.1
+py-cpuinfo 5.0.0
+py3dns 3.2.1
+pybtex 0.21
+pybtex-docutils 0.2.1
+pycairo 1.16.2
+pycups 1.9.73
+Pygments 2.3.1
+PyGObject 3.36.0
+PyJWT 1.7.1
+pykerberos 1.1.14
+pymacaroons 0.13.0
+PyNaCl 1.3.0
+PyOpenGL 3.1.0
+pyparsing 2.4.6
+pyRFC3339 1.1
+pyrsistent 0.15.5
+pytest 4.6.9
+python-apt 2.0.0+ubuntu0.20.4.5
+python-dateutil 2.7.3
+python-lzo 1.12
+pytz 2019.3
+pyxdg 0.26
+PyYAML 5.3.1
+pyzmq 18.1.1
+recommonmark 0.4.0
+rencode 1.0.6
+requests 2.22.0
+requests-unixsocket 0.2.0
+roman 2.0.0
+scikit-learn 0.24.2
+scipy 1.7.1
+seaborn 0.11.2
+SecretStorage 2.3.1
+Send2Trash 1.5.0
+setproctitle 1.1.10
+setuptools 45.2.0
+simplejson 3.16.0
+six 1.14.0
+smc-pyutil 1.1
+sortedcollections 1.0.1
+sortedcontainers 2.1.0
+soupsieve 1.9.5
+Sphinx 1.8.5
+sphinx-bootstrap-theme 0.6.5
+sphinxcontrib-bibtex 0.4.1
+ssh-import-id 5.10
+systemd-python 234
+tables 3.6.1
+tenacity 8.0.1
+terminado 0.8.2
+testpath 0.4.4
+threadpoolctl 2.2.0
+torch 1.9.0+cpu
+torchaudio 0.9.0
+torchvision 0.10.0+cpu
+tornado 5.1.1
+traitlets 4.3.3
+traittypes 0.2.1
+typing-extensions 3.10.0.2
+unattended-upgrades 0.1
+uritools 3.0.0
+urllib3 1.25.8
+wadllib 1.3.3
+wcwidth 0.1.8
+webencodings 0.5.1
+websockify 0.9.0
+wheel 0.34.2
+widgetsnbextension 3.5.1
+xlrd 1.1.0
+xlwt 1.3.0
+xpra 3.0.6
+yapf 0.29.0
+zeroconf 0.24.4
+zipp 1.0.0
+zmq 0.0.0
+]0;~/PyTorch~/PyTorch$ pip listython mnist.py
+Traceback (most recent call last):
+ File "mnist.py", line 1, in <module>
+ import torch
+ImportError: No module named torch
+]0;~/PyTorch~/PyTorch$ python mnist.py 3 mnist.py
+Downloading http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz
+Downloading http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz to .data/MNIST/raw/train-images-idx3-ubyte.gz
+ 0.0% 0.0% 0.0% 0.0% 0.1% 0.1% 0.1% 0.1% 0.1% 0.1% 0.1% 0.1% 0.1% 0.1% 0.2% 0.2% 0.2% 0.2% 0.2% 0.2% 0.2% 0.2% 0.2% 0.2% 0.3% 0.3% 0.3% 0.3% 0.3% 0.3% 0.3% 0.3% 0.3% 0.4% 0.4% 0.4% 0.4% 0.4% 0.4% 0.4% 0.4% 0.4% 0.4% 0.5% 0.5% 0.5% 0.5% 0.5% 0.5% 0.5% 0.5% 0.5% 0.5% 0.6% 0.6% 0.6% 0.6% 0.6% 0.6% 0.6% 0.6% 0.6% 0.7% 0.7% 0.7% 0.7% 0.7% 0.7% 0.7% 0.7% 0.7% 0.7% 0.8% 0.8% 0.8% 0.8% 0.8% 0.8% 0.8% 0.8% 0.8% 0.8% 0.9% 0.9% 0.9% 0.9% 0.9% 0.9% 0.9% 0.9% 0.9% 1.0% 1.0% 1.0% 1.0% 1.0% 1.0% 1.0% 1.0% 1.0% 1.0% 1.1% 1.1% 1.1% 1.1% 1.1% 1.1% 1.1% 1.1% 1.1% 1.1% 1.2% 1.2% 1.2% 1.2% 1.2% 1.2% 1.2% 1.2% 1.2% 1.2% 1.3% 1.3% 1.3% 1.3% 1.3% 1.3% 1.3% 1.3% 1.3% 1.4% 1.4% 1.4% 1.4% 1.4% 1.4% 1.4% 1.4% 1.4% 1.4% 1.5% 1.5% 1.5% 1.5% 1.5% 1.5% 1.5% 1.5% 1.5% 1.5% 1.6% 1.6% 1.6% 1.6% 1.6% 1.6% 1.6% 1.6% 1.6% 1.7% 1.7% 1.7% 1.7% 1.7% 1.7% 1.7% 1.7% 1.7% 1.7% 1.8% 1.8% 1.8% 1.8% 1.8% 1.8% 1.8% 1.8% 1.8% 1.8% 1.9% 1.9% 1.9% 1.9% 1.9% 1.9% 1.9% 1.9% 1.9% 2.0% 2.0% 2.0% 2.0% 2.0% 2.0% 2.0% 2.0% 2.0% 2.0% 2.1% 2.1% 2.1% 2.1% 2.1% 2.1% 2.1% 2.1% 2.1% 2.1% 2.2% 2.2% 2.2% 2.2% 2.2% 2.2% 2.2% 2.2% 2.2% 2.3% 2.3% 2.3% 2.3% 2.3% 2.3% 2.3% 2.3% 2.3% 2.3% 2.4% 2.4% 2.4% 2.4% 2.4% 2.4% 2.4% 2.4% 2.4% 2.4% 2.5% 2.5% 2.5% 2.5% 2.5% 2.5% 2.5% 2.5% 2.5% 2.6% 2.6% 2.6% 2.6% 2.6% 2.6% 2.6% 2.6% 2.6% 2.6% 2.7% 2.7% 2.7% 2.7% 2.7% 2.7% 2.7% 2.7% 2.7% 2.7% 2.8% 2.8% 2.8% 2.8% 2.8% 2.8% 2.8% 2.8% 2.8% 2.9% 2.9% 2.9% 2.9% 2.9% 2.9% 2.9% 2.9% 2.9% 2.9% 3.0% 3.0% 3.0% 3.0% 3.0% 3.0% 3.0% 3.0% 3.0% 3.0% 3.1% 3.1% 3.1% 3.1% 3.1% 3.1% 3.1% 3.1% 3.1% 3.2% 3.2% 3.2% 3.2% 3.2% 3.2% 3.2% 3.2% 3.2% 3.2% 3.3% 3.3% 3.3% 3.3% 3.3% 3.3% 3.3% 3.3% 3.3% 3.3% 3.4% 3.4% 3.4% 3.4% 3.4% 3.4% 3.4% 3.4% 3.4% 3.5% 3.5% 3.5% 3.5% 3.5% 3.5% 3.5% 3.5% 3.5% 3.5% 3.6% 3.6% 3.6% 3.6% 3.6% 3.6% 3.6% 3.6% 3.6% 3.6% 3.7% 3.7% 3.7% 3.7% 3.7% 3.7% 3.7% 3.7% 3.7% 3.7% 3.8% 3.8% 3.8% 3.8% 3.8% 3.8% 3.8% 3.8% 3.8% 3.9% 3.9% 3.9% 3.9% 3.9% 3.9% 3.9% 3.9% 3.9% 3.9% 4.0% 4.0% 4.0% 4.0% 4.0% 4.0% 4.0% 4.0% 4.0% 4.0% 4.1% 4.1% 4.1% 4.1% 4.1% 4.1% 4.1% 4.1% 4.1% 4.2% 4.2% 4.2% 4.2% 4.2% 4.2% 4.2% 4.2% 4.2% 4.2% 4.3% 4.3% 4.3% 4.3% 4.3% 4.3% 4.3% 4.3% 4.3% 4.3% 4.4% 4.4% 4.4% 4.4% 4.4% 4.4% 4.4% 4.4% 4.4% 4.5% 4.5% 4.5% 4.5% 4.5% 4.5% 4.5% 4.5% 4.5% 4.5% 4.6% 4.6% 4.6% 4.6% 4.6% 4.6% 4.6% 4.6% 4.6% 4.6% 4.7% 4.7% 4.7% 4.7% 4.7% 4.7% 4.7% 4.7% 4.7% 4.8% 4.8% 4.8% 4.8% 4.8% 4.8% 4.8% 4.8% 4.8% 4.8% 4.9% 4.9% 4.9% 4.9% 4.9% 4.9% 4.9% 4.9% 4.9% 4.9% 5.0% 5.0% 5.0% 5.0% 5.0% 5.0% 5.0% 5.0% 5.0% 5.1% 5.1% 5.1% 5.1% 5.1% 5.1% 5.1% 5.1% 5.1% 5.1% 5.2% 5.2% 5.2% 5.2% 5.2% 5.2% 5.2% 5.2% 5.2% 5.2% 5.3% 5.3% 5.3% 5.3% 5.3% 5.3% 5.3% 5.3% 5.3% 5.4% 5.4% 5.4% 5.4% 5.4% 5.4% 5.4% 5.4% 5.4% 5.4% 5.5% 5.5% 5.5% 5.5% 5.5% 5.5% 5.5% 5.5% 5.5% 5.5% 5.6% 5.6% 5.6% 5.6% 5.6% 5.6% 5.6% 5.6% 5.6% 5.7% 5.7% 5.7% 5.7% 5.7% 5.7% 5.7% 5.7% 5.7% 5.7% 5.8% 5.8% 5.8% 5.8% 5.8% 5.8% 5.8% 5.8% 5.8% 5.8% 5.9% 5.9% 5.9% 5.9% 5.9% 5.9% 5.9% 5.9% 5.9% 6.0% 6.0% 6.0% 6.0% 6.0% 6.0% 6.0% 6.0% 6.0% 6.0% 6.1% 6.1% 6.1% 6.1% 6.1% 6.1% 6.1% 6.1% 6.1% 6.1% 6.2% 6.2% 6.2% 6.2% 6.2% 6.2% 6.2% 6.2% 6.2% 6.2% 6.3% 6.3% 6.3% 6.3% 6.3% 6.3% 6.3% 6.3% 6.3% 6.4% 6.4% 6.4% 6.4% 6.4% 6.4% 6.4% 6.4% 6.4% 6.4% 6.5% 6.5% 6.5% 6.5% 6.5% 6.5% 6.5% 6.5% 6.5% 6.5% 6.6% 6.6% 6.6% 6.6% 6.6% 6.6% 6.6% 6.6% 6.6% 6.7% 6.7% 6.7% 6.7% 6.7% 6.7% 6.7% 6.7% 6.7% 6.7% 6.8% 6.8% 6.8% 6.8% 6.8% 6.8% 6.8% 6.8% 6.8% 6.8% 6.9% 6.9% 6.9% 6.9% 6.9% 6.9% 6.9% 6.9% 6.9% 7.0% 7.0% 7.0% 7.0% 7.0% 7.0% 7.0% 7.0% 7.0% 7.0% 7.1% 7.1% 7.1% 7.1% 7.1% 7.1% 7.1% 7.1% 7.1% 7.1% 7.2% 7.2% 7.2% 7.2% 7.2% 7.2% 7.2% 7.2% 7.2% 7.3% 7.3% 7.3% 7.3% 7.3% 7.3% 7.3% 7.3% 7.3% 7.3% 7.4% 7.4% 7.4% 7.4% 7.4% 7.4% 7.4% 7.4% 7.4% 7.4% 7.5% 7.5% 7.5% 7.5% 7.5% 7.5% 7.5% 7.5% 7.5% 7.6% 7.6% 7.6% 7.6% 7.6% 7.6% 7.6% 7.6% 7.6% 7.6% 7.7% 7.7% 7.7% 7.7% 7.7% 7.7% 7.7% 7.7% 7.7% 7.7% 7.8% 7.8% 7.8% 7.8% 7.8% 7.8% 7.8% 7.8% 7.8% 7.9% 7.9% 7.9% 7.9% 7.9% 7.9% 7.9% 7.9% 7.9% 7.9% 8.0% 8.0% 8.0% 8.0% 8.0% 8.0% 8.0% 8.0% 8.0% 8.0% 8.1% 8.1% 8.1% 8.1% 8.1% 8.1% 8.1% 8.1% 8.1% 8.2% 8.2% 8.2% 8.2% 8.2% 8.2% 8.2% 8.2% 8.2% 8.2% 8.3% 8.3% 8.3% 8.3% 8.3% 8.3% 8.3% 8.3% 8.3% 8.3% 8.4% 8.4% 8.4% 8.4% 8.4% 8.4% 8.4% 8.4% 8.4% 8.5% 8.5% 8.5% 8.5% 8.5% 8.5% 8.5% 8.5% 8.5% 8.5% 8.6% 8.6% 8.6% 8.6% 8.6% 8.6% 8.6% 8.6% 8.6% 8.6% 8.7% 8.7% 8.7% 8.7% 8.7% 8.7% 8.7% 8.7% 8.7% 8.7% 8.8% 8.8% 8.8% 8.8% 8.8% 8.8% 8.8% 8.8% 8.8% 8.9% 8.9% 8.9% 8.9% 8.9% 8.9% 8.9% 8.9% 8.9% 8.9% 9.0% 9.0% 9.0% 9.0% 9.0% 9.0% 9.0% 9.0% 9.0% 9.0% 9.1% 9.1% 9.1% 9.1% 9.1% 9.1% 9.1% 9.1% 9.1% 9.2% 9.2% 9.2% 9.2% 9.2% 9.2% 9.2% 9.2% 9.2% 9.2% 9.3% 9.3% 9.3% 9.3% 9.3% 9.3% 9.3% 9.3% 9.3% 9.3% 9.4% 9.4% 9.4% 9.4% 9.4% 9.4% 9.4% 9.4% 9.4% 9.5% 9.5% 9.5% 9.5% 9.5% 9.5% 9.5% 9.5% 9.5% 9.5% 9.6% 9.6% 9.6% 9.6% 9.6% 9.6% 9.6% 9.6% 9.6% 9.6% 9.7% 9.7% 9.7% 9.7% 9.7% 9.7% 9.7% 9.7% 9.7% 9.8% 9.8% 9.8% 9.8% 9.8% 9.8% 9.8% 9.8% 9.8% 9.8% 9.9% 9.9% 9.9% 9.9% 9.9% 9.9% 9.9% 9.9% 9.9% 9.9% 10.0% 10.0% 10.0% 10.0% 10.0% 10.0% 10.0% 10.0% 10.0% 10.1% 10.1% 10.1% 10.1% 10.1% 10.1% 10.1% 10.1% 10.1% 10.1% 10.2% 10.2% 10.2% 10.2% 10.2% 10.2% 10.2% 10.2% 10.2% 10.2% 10.3% 10.3% 10.3% 10.3% 10.3% 10.3% 10.3% 10.3% 10.3% 10.4% 10.4% 10.4% 10.4% 10.4% 10.4% 10.4% 10.4% 10.4% 10.4% 10.5% 10.5% 10.5% 10.5% 10.5% 10.5% 10.5% 10.5% 10.5% 10.5% 10.6% 10.6% 10.6% 10.6% 10.6% 10.6% 10.6% 10.6% 10.6% 10.7% 10.7% 10.7% 10.7% 10.7% 10.7% 10.7% 10.7% 10.7% 10.7% 10.8% 10.8% 10.8% 10.8% 10.8% 10.8% 10.8% 10.8% 10.8% 10.8% 10.9% 10.9% 10.9% 10.9% 10.9% 10.9% 10.9% 10.9% 10.9% 11.0% 11.0% 11.0% 11.0% 11.0% 11.0% 11.0% 11.0% 11.0% 11.0% 11.1% 11.1% 11.1% 11.1% 11.1% 11.1% 11.1% 11.1% 11.1% 11.1% 11.2% 11.2% 11.2% 11.2% 11.2% 11.2% 11.2% 11.2% 11.2% 11.2% 11.3% 11.3% 11.3% 11.3% 11.3% 11.3% 11.3% 11.3% 11.3% 11.4% 11.4% 11.4% 11.4% 11.4% 11.4% 11.4% 11.4% 11.4% 11.4% 11.5% 11.5% 11.5% 11.5% 11.5% 11.5% 11.5% 11.5% 11.5% 11.5% 11.6% 11.6% 11.6% 11.6% 11.6% 11.6% 11.6% 11.6% 11.6% 11.7% 11.7% 11.7% 11.7% 11.7% 11.7% 11.7% 11.7% 11.7% 11.7% 11.8% 11.8% 11.8% 11.8% 11.8% 11.8% 11.8% 11.8% 11.8% 11.8% 11.9% 11.9% 11.9% 11.9% 11.9% 11.9% 11.9% 11.9% 11.9% 12.0% 12.0% 12.0% 12.0% 12.0% 12.0% 12.0% 12.0% 12.0% 12.0% 12.1% 12.1% 12.1% 12.1% 12.1% 12.1% 12.1% 12.1% 12.1% 12.1% 12.2% 12.2% 12.2% 12.2% 12.2% 12.2% 12.2% 12.2% 12.2% 12.3% 12.3% 12.3% 12.3% 12.3% 12.3% 12.3% 12.3% 12.3% 12.3% 12.4% 12.4% 12.4% 12.4% 12.4% 12.4% 12.4% 12.4% 12.4% 12.4% 12.5% 12.5% 12.5% 12.5% 12.5% 12.5% 12.5% 12.5% 12.5% 12.6% 12.6% 12.6% 12.6% 12.6% 12.6% 12.6% 12.6% 12.6% 12.6% 12.7% 12.7% 12.7% 12.7% 12.7% 12.7% 12.7% 12.7% 12.7% 12.7% 12.8% 12.8% 12.8% 12.8% 12.8% 12.8% 12.8% 12.8% 12.8% 12.9% 12.9% 12.9% 12.9% 12.9% 12.9% 12.9% 12.9% 12.9% 12.9% 13.0% 13.0% 13.0% 13.0% 13.0% 13.0% 13.0% 13.0% 13.0% 13.0% 13.1% 13.1% 13.1% 13.1% 13.1% 13.1% 13.1% 13.1% 13.1% 13.2% 13.2% 13.2% 13.2% 13.2% 13.2% 13.2% 13.2% 13.2% 13.2% 13.3% 13.3% 13.3% 13.3% 13.3% 13.3% 13.3% 13.3% 13.3% 13.3% 13.4% 13.4% 13.4% 13.4% 13.4% 13.4% 13.4% 13.4% 13.4% 13.5% 13.5% 13.5% 13.5% 13.5% 13.5% 13.5% 13.5% 13.5% 13.5% 13.6% 13.6% 13.6% 13.6% 13.6% 13.6% 13.6% 13.6% 13.6% 13.6% 13.7% 13.7% 13.7% 13.7% 13.7% 13.7% 13.7% 13.7% 13.7% 13.7% 13.8% 13.8% 13.8% 13.8% 13.8% 13.8% 13.8% 13.8% 13.8% 13.9% 13.9% 13.9% 13.9% 13.9% 13.9% 13.9% 13.9% 13.9% 13.9% 14.0% 14.0% 14.0% 14.0% 14.0% 14.0% 14.0% 14.0% 14.0% 14.0% 14.1% 14.1% 14.1% 14.1% 14.1% 14.1% 14.1% 14.1% 14.1% 14.2% 14.2% 14.2% 14.2% 14.2% 14.2% 14.2% 14.2% 14.2% 14.2% 14.3% 14.3% 14.3% 14.3% 14.3% 14.3% 14.3% 14.3% 14.3% 14.3% 14.4% 14.4% 14.4% 14.4% 14.4% 14.4% 14.4% 14.4% 14.4% 14.5% 14.5% 14.5% 14.5% 14.5% 14.5% 14.5% 14.5% 14.5% 14.5% 14.6% 14.6% 14.6% 14.6% 14.6% 14.6% 14.6% 14.6% 14.6% 14.6% 14.7% 14.7% 14.7% 14.7% 14.7% 14.7% 14.7% 14.7% 14.7% 14.8% 14.8% 14.8% 14.8% 14.8% 14.8% 14.8% 14.8% 14.8% 14.8% 14.9% 14.9% 14.9% 14.9% 14.9% 14.9% 14.9% 14.9% 14.9% 14.9% 15.0% 15.0% 15.0% 15.0% 15.0% 15.0% 15.0% 15.0% 15.0% 15.1% 15.1% 15.1% 15.1% 15.1% 15.1% 15.1% 15.1% 15.1% 15.1% 15.2% 15.2% 15.2% 15.2% 15.2% 15.2% 15.2% 15.2% 15.2% 15.2% 15.3% 15.3% 15.3% 15.3% 15.3% 15.3% 15.3% 15.3% 15.3% 15.4% 15.4% 15.4% 15.4% 15.4% 15.4% 15.4% 15.4% 15.4% 15.4% 15.5% 15.5% 15.5% 15.5% 15.5% 15.5% 15.5% 15.5% 15.5% 15.5% 15.6% 15.6% 15.6% 15.6% 15.6% 15.6% 15.6% 15.6% 15.6% 15.7% 15.7% 15.7% 15.7% 15.7% 15.7% 15.7% 15.7% 15.7% 15.7% 15.8% 15.8% 15.8% 15.8% 15.8% 15.8% 15.8% 15.8% 15.8% 15.8% 15.9% 15.9% 15.9% 15.9% 15.9% 15.9% 15.9% 15.9% 15.9% 16.0% 16.0% 16.0% 16.0% 16.0% 16.0% 16.0% 16.0% 16.0% 16.0% 16.1% 16.1% 16.1% 16.1% 16.1% 16.1% 16.1% 16.1% 16.1% 16.1% 16.2% 16.2% 16.2% 16.2% 16.2% 16.2% 16.2% 16.2% 16.2% 16.2% 16.3% 16.3% 16.3% 16.3% 16.3% 16.3% 16.3% 16.3% 16.3% 16.4% 16.4% 16.4% 16.4% 16.4% 16.4% 16.4% 16.4% 16.4% 16.4% 16.5% 16.5% 16.5% 16.5% 16.5% 16.5% 16.5% 16.5% 16.5% 16.5% 16.6% 16.6% 16.6% 16.6% 16.6% 16.6% 16.6% 16.6% 16.6% 16.7% 16.7% 16.7% 16.7% 16.7% 16.7% 16.7% 16.7% 16.7% 16.7% 16.8% 16.8% 16.8% 16.8% 16.8% 16.8% 16.8% 16.8% 16.8% 16.8% 16.9% 16.9% 16.9% 16.9% 16.9% 16.9% 16.9% 16.9% 16.9% 17.0% 17.0% 17.0% 17.0% 17.0% 17.0% 17.0% 17.0% 17.0% 17.0% 17.1% 17.1% 17.1% 17.1% 17.1% 17.1% 17.1% 17.1% 17.1% 17.1% 17.2% 17.2% 17.2% 17.2% 17.2% 17.2% 17.2% 17.2% 17.2% 17.3% 17.3% 17.3% 17.3% 17.3% 17.3% 17.3% 17.3% 17.3% 17.3% 17.4% 17.4% 17.4% 17.4% 17.4% 17.4% 17.4% 17.4% 17.4% 17.4% 17.5% 17.5% 17.5% 17.5% 17.5% 17.5% 17.5% 17.5% 17.5% 17.6% 17.6% 17.6% 17.6% 17.6% 17.6% 17.6% 17.6% 17.6% 17.6% 17.7% 17.7% 17.7% 17.7% 17.7% 17.7% 17.7% 17.7% 17.7% 17.7% 17.8% 17.8% 17.8% 17.8% 17.8% 17.8% 17.8% 17.8% 17.8% 17.9% 17.9% 17.9% 17.9% 17.9% 17.9% 17.9% 17.9% 17.9% 17.9% 18.0% 18.0% 18.0% 18.0% 18.0% 18.0% 18.0% 18.0% 18.0% 18.0% 18.1% 18.1% 18.1% 18.1% 18.1% 18.1% 18.1% 18.1% 18.1% 18.2% 18.2% 18.2% 18.2% 18.2% 18.2% 18.2% 18.2% 18.2% 18.2% 18.3% 18.3% 18.3% 18.3% 18.3% 18.3% 18.3% 18.3% 18.3% 18.3% 18.4% 18.4% 18.4% 18.4% 18.4% 18.4% 18.4% 18.4% 18.4% 18.5% 18.5% 18.5% 18.5% 18.5% 18.5% 18.5% 18.5% 18.5% 18.5% 18.6% 18.6% 18.6% 18.6% 18.6% 18.6% 18.6% 18.6% 18.6% 18.6% 18.7% 18.7% 18.7% 18.7% 18.7% 18.7% 18.7% 18.7% 18.7% 18.7% 18.8% 18.8% 18.8% 18.8% 18.8% 18.8% 18.8% 18.8% 18.8% 18.9% 18.9% 18.9% 18.9% 18.9% 18.9% 18.9% 18.9% 18.9% 18.9% 19.0% 19.0% 19.0% 19.0% 19.0% 19.0% 19.0% 19.0% 19.0% 19.0% 19.1% 19.1% 19.1% 19.1% 19.1% 19.1% 19.1% 19.1% 19.1% 19.2% 19.2% 19.2% 19.2% 19.2% 19.2% 19.2% 19.2% 19.2% 19.2% 19.3% 19.3% 19.3% 19.3% 19.3% 19.3% 19.3% 19.3% 19.3% 19.3% 19.4% 19.4% 19.4% 19.4% 19.4% 19.4% 19.4% 19.4% 19.4% 19.5% 19.5% 19.5% 19.5% 19.5% 19.5% 19.5% 19.5% 19.5% 19.5% 19.6% 19.6% 19.6% 19.6% 19.6% 19.6% 19.6% 19.6% 19.6% 19.6% 19.7% 19.7% 19.7% 19.7% 19.7% 19.7% 19.7% 19.7% 19.7% 19.8% 19.8% 19.8% 19.8% 19.8% 19.8% 19.8% 19.8% 19.8% 19.8% 19.9% 19.9% 19.9% 19.9% 19.9% 19.9% 19.9% 19.9% 19.9% 19.9% 20.0% 20.0% 20.0% 20.0% 20.0% 20.0% 20.0% 20.0% 20.0% 20.1% 20.1% 20.1% 20.1% 20.1% 20.1% 20.1% 20.1% 20.1% 20.1% 20.2% 20.2% 20.2% 20.2% 20.2% 20.2% 20.2% 20.2% 20.2% 20.2% 20.3% 20.3% 20.3% 20.3% 20.3% 20.3% 20.3% 20.3% 20.3% 20.4% 20.4% 20.4% 20.4% 20.4% 20.4% 20.4% 20.4% 20.4% 20.4% 20.5% 20.5% 20.5% 20.5% 20.5% 20.5% 20.5% 20.5% 20.5% 20.5% 20.6% 20.6% 20.6% 20.6% 20.6% 20.6% 20.6% 20.6% 20.6% 20.7% 20.7% 20.7% 20.7% 20.7% 20.7% 20.7% 20.7% 20.7% 20.7% 20.8% 20.8% 20.8% 20.8% 20.8% 20.8% 20.8% 20.8% 20.8% 20.8% 20.9% 20.9% 20.9% 20.9% 20.9% 20.9% 20.9% 20.9% 20.9% 21.0% 21.0% 21.0% 21.0% 21.0% 21.0% 21.0% 21.0% 21.0% 21.0% 21.1% 21.1% 21.1% 21.1% 21.1% 21.1% 21.1% 21.1% 21.1% 21.1% 21.2% 21.2% 21.2% 21.2% 21.2% 21.2% 21.2% 21.2% 21.2% 21.2% 21.3% 21.3% 21.3% 21.3% 21.3% 21.3% 21.3% 21.3% 21.3% 21.4% 21.4% 21.4% 21.4% 21.4% 21.4% 21.4% 21.4% 21.4% 21.4% 21.5% 21.5% 21.5% 21.5% 21.5% 21.5% 21.5% 21.5% 21.5% 21.5% 21.6% 21.6% 21.6% 21.6% 21.6% 21.6% 21.6% 21.6% 21.6% 21.7% 21.7% 21.7% 21.7% 21.7% 21.7% 21.7% 21.7% 21.7% 21.7% 21.8% 21.8% 21.8% 21.8% 21.8% 21.8% 21.8% 21.8% 21.8% 21.8% 21.9% 21.9% 21.9% 21.9% 21.9% 21.9% 21.9% 21.9% 21.9% 22.0% 22.0% 22.0% 22.0% 22.0% 22.0% 22.0% 22.0% 22.0% 22.0% 22.1% 22.1% 22.1% 22.1% 22.1% 22.1% 22.1% 22.1% 22.1% 22.1% 22.2% 22.2% 22.2% 22.2% 22.2% 22.2% 22.2% 22.2% 22.2% 22.3% 22.3% 22.3% 22.3% 22.3% 22.3% 22.3% 22.3% 22.3% 22.3% 22.4% 22.4% 22.4% 22.4% 22.4% 22.4% 22.4% 22.4% 22.4% 22.4% 22.5% 22.5% 22.5% 22.5% 22.5% 22.5% 22.5% 22.5% 22.5% 22.6% 22.6% 22.6% 22.6% 22.6% 22.6% 22.6% 22.6% 22.6% 22.6% 22.7% 22.7% 22.7% 22.7% 22.7% 22.7% 22.7% 22.7% 22.7% 22.7% 22.8% 22.8% 22.8% 22.8% 22.8% 22.8% 22.8% 22.8% 22.8% 22.9% 22.9% 22.9% 22.9% 22.9% 22.9% 22.9% 22.9% 22.9% 22.9% 23.0% 23.0% 23.0% 23.0% 23.0% 23.0% 23.0% 23.0% 23.0% 23.0% 23.1% 23.1% 23.1% 23.1% 23.1% 23.1% 23.1% 23.1% 23.1% 23.2% 23.2% 23.2% 23.2% 23.2% 23.2% 23.2% 23.2% 23.2% 23.2% 23.3% 23.3% 23.3% 23.3% 23.3% 23.3% 23.3% 23.3% 23.3% 23.3% 23.4% 23.4% 23.4% 23.4% 23.4% 23.4% 23.4% 23.4% 23.4% 23.5% 23.5% 23.5% 23.5% 23.5% 23.5% 23.5% 23.5% 23.5% 23.5% 23.6% 23.6% 23.6% 23.6% 23.6% 23.6% 23.6% 23.6% 23.6% 23.6% 23.7% 23.7% 23.7% 23.7% 23.7% 23.7% 23.7% 23.7% 23.7% 23.7% 23.8% 23.8% 23.8% 23.8% 23.8% 23.8% 23.8% 23.8% 23.8% 23.9% 23.9% 23.9% 23.9% 23.9% 23.9% 23.9% 23.9% 23.9% 23.9% 24.0% 24.0% 24.0% 24.0% 24.0% 24.0% 24.0% 24.0% 24.0% 24.0% 24.1% 24.1% 24.1% 24.1% 24.1% 24.1% 24.1% 24.1% 24.1% 24.2% 24.2% 24.2% 24.2% 24.2% 24.2% 24.2% 24.2% 24.2% 24.2% 24.3% 24.3% 24.3% 24.3% 24.3% 24.3% 24.3% 24.3% 24.3% 24.3% 24.4% 24.4% 24.4% 24.4% 24.4% 24.4% 24.4% 24.4% 24.4% 24.5% 24.5% 24.5% 24.5% 24.5% 24.5% 24.5% 24.5% 24.5% 24.5% 24.6% 24.6% 24.6% 24.6% 24.6% 24.6% 24.6% 24.6% 24.6% 24.6% 24.7% 24.7% 24.7% 24.7% 24.7% 24.7% 24.7% 24.7% 24.7% 24.8% 24.8% 24.8% 24.8% 24.8% 24.8% 24.8% 24.8% 24.8% 24.8% 24.9% 24.9% 24.9% 24.9% 24.9% 24.9% 24.9% 24.9% 24.9% 24.9% 25.0% 25.0% 25.0% 25.0% 25.0% 25.0% 25.0% 25.0% 25.0% 25.1% 25.1% 25.1% 25.1% 25.1% 25.1% 25.1% 25.1% 25.1% 25.1% 25.2% 25.2% 25.2% 25.2% 25.2% 25.2% 25.2% 25.2% 25.2% 25.2% 25.3% 25.3% 25.3% 25.3% 25.3% 25.3% 25.3% 25.3% 25.3% 25.4% 25.4% 25.4% 25.4% 25.4% 25.4% 25.4% 25.4% 25.4% 25.4% 25.5% 25.5% 25.5% 25.5% 25.5% 25.5% 25.5% 25.5% 25.5% 25.5% 25.6% 25.6% 25.6% 25.6% 25.6% 25.6% 25.6% 25.6% 25.6% 25.7% 25.7% 25.7% 25.7% 25.7% 25.7% 25.7% 25.7% 25.7% 25.7% 25.8% 25.8% 25.8% 25.8% 25.8% 25.8% 25.8% 25.8% 25.8% 25.8% 25.9% 25.9% 25.9% 25.9% 25.9% 25.9% 25.9% 25.9% 25.9% 26.0% 26.0% 26.0% 26.0% 26.0% 26.0% 26.0% 26.0% 26.0% 26.0% 26.1% 26.1% 26.1% 26.1% 26.1% 26.1% 26.1% 26.1% 26.1% 26.1% 26.2% 26.2% 26.2% 26.2% 26.2% 26.2% 26.2% 26.2% 26.2% 26.2% 26.3% 26.3% 26.3% 26.3% 26.3% 26.3% 26.3% 26.3% 26.3% 26.4% 26.4% 26.4% 26.4% 26.4% 26.4% 26.4% 26.4% 26.4% 26.4% 26.5% 26.5% 26.5% 26.5% 26.5% 26.5% 26.5% 26.5% 26.5% 26.5% 26.6% 26.6% 26.6% 26.6% 26.6% 26.6% 26.6% 26.6% 26.6% 26.7% 26.7% 26.7% 26.7% 26.7% 26.7% 26.7% 26.7% 26.7% 26.7% 26.8% 26.8% 26.8% 26.8% 26.8% 26.8% 26.8% 26.8% 26.8% 26.8% 26.9% 26.9% 26.9% 26.9% 26.9% 26.9% 26.9% 26.9% 26.9% 27.0% 27.0% 27.0% 27.0% 27.0% 27.0% 27.0% 27.0% 27.0% 27.0% 27.1% 27.1% 27.1% 27.1% 27.1% 27.1% 27.1% 27.1% 27.1% 27.1% 27.2% 27.2% 27.2% 27.2% 27.2% 27.2% 27.2% 27.2% 27.2% 27.3% 27.3% 27.3% 27.3% 27.3% 27.3% 27.3% 27.3% 27.3% 27.3% 27.4% 27.4% 27.4% 27.4% 27.4% 27.4% 27.4% 27.4% 27.4% 27.4% 27.5% 27.5% 27.5% 27.5% 27.5% 27.5% 27.5% 27.5% 27.5% 27.6% 27.6% 27.6% 27.6% 27.6% 27.6% 27.6% 27.6% 27.6% 27.6% 27.7% 27.7% 27.7% 27.7% 27.7% 27.7% 27.7% 27.7% 27.7% 27.7% 27.8% 27.8% 27.8% 27.8% 27.8% 27.8% 27.8% 27.8% 27.8% 27.9% 27.9% 27.9% 27.9% 27.9% 27.9% 27.9% 27.9% 27.9% 27.9% 28.0% 28.0% 28.0% 28.0% 28.0% 28.0% 28.0% 28.0% 28.0% 28.0% 28.1% 28.1% 28.1% 28.1% 28.1% 28.1% 28.1% 28.1% 28.1% 28.2% 28.2% 28.2% 28.2% 28.2% 28.2% 28.2% 28.2% 28.2% 28.2% 28.3% 28.3% 28.3% 28.3% 28.3% 28.3% 28.3% 28.3% 28.3% 28.3% 28.4% 28.4% 28.4% 28.4% 28.4% 28.4% 28.4% 28.4% 28.4% 28.5% 28.5% 28.5% 28.5% 28.5% 28.5% 28.5% 28.5% 28.5% 28.5% 28.6% 28.6% 28.6% 28.6% 28.6% 28.6% 28.6% 28.6% 28.6% 28.6% 28.7% 28.7% 28.7% 28.7% 28.7% 28.7% 28.7% 28.7% 28.7% 28.7% 28.8% 28.8% 28.8% 28.8% 28.8% 28.8% 28.8% 28.8% 28.8% 28.9% 28.9% 28.9% 28.9% 28.9% 28.9% 28.9% 28.9% 28.9% 28.9% 29.0% 29.0% 29.0% 29.0% 29.0% 29.0% 29.0% 29.0% 29.0% 29.0% 29.1% 29.1% 29.1% 29.1% 29.1% 29.1% 29.1% 29.1% 29.1% 29.2% 29.2% 29.2% 29.2% 29.2% 29.2% 29.2% 29.2% 29.2% 29.2% 29.3% 29.3% 29.3% 29.3% 29.3% 29.3% 29.3% 29.3% 29.3% 29.3% 29.4% 29.4% 29.4% 29.4% 29.4% 29.4% 29.4% 29.4% 29.4% 29.5% 29.5% 29.5% 29.5% 29.5% 29.5% 29.5% 29.5% 29.5% 29.5% 29.6% 29.6% 29.6% 29.6% 29.6% 29.6% 29.6% 29.6% 29.6% 29.6% 29.7% 29.7% 29.7% 29.7% 29.7% 29.7% 29.7% 29.7% 29.7% 29.8% 29.8% 29.8% 29.8% 29.8% 29.8% 29.8% 29.8% 29.8% 29.8% 29.9% 29.9% 29.9% 29.9% 29.9% 29.9% 29.9% 29.9% 29.9% 29.9% 30.0% 30.0% 30.0% 30.0% 30.0% 30.0% 30.0% 30.0% 30.0% 30.1% 30.1% 30.1% 30.1% 30.1% 30.1% 30.1% 30.1% 30.1% 30.1% 30.2% 30.2% 30.2% 30.2% 30.2% 30.2% 30.2% 30.2% 30.2% 30.2% 30.3% 30.3% 30.3% 30.3% 30.3% 30.3% 30.3% 30.3% 30.3% 30.4% 30.4% 30.4% 30.4% 30.4% 30.4% 30.4% 30.4% 30.4% 30.4% 30.5% 30.5% 30.5% 30.5% 30.5% 30.5% 30.5% 30.5% 30.5% 30.5% 30.6% 30.6% 30.6% 30.6% 30.6% 30.6% 30.6% 30.6% 30.6% 30.7% 30.7% 30.7% 30.7% 30.7% 30.7% 30.7% 30.7% 30.7% 30.7% 30.8% 30.8% 30.8% 30.8% 30.8% 30.8% 30.8% 30.8% 30.8% 30.8% 30.9% 30.9% 30.9% 30.9% 30.9% 30.9% 30.9% 30.9% 30.9% 31.0% 31.0% 31.0% 31.0% 31.0% 31.0% 31.0% 31.0% 31.0% 31.0% 31.1% 31.1% 31.1% 31.1% 31.1% 31.1% 31.1% 31.1% 31.1% 31.1% 31.2% 31.2% 31.2% 31.2% 31.2% 31.2% 31.2% 31.2% 31.2% 31.2% 31.3% 31.3% 31.3% 31.3% 31.3% 31.3% 31.3% 31.3% 31.3% 31.4% 31.4% 31.4% 31.4% 31.4% 31.4% 31.4% 31.4% 31.4% 31.4% 31.5% 31.5% 31.5% 31.5% 31.5% 31.5% 31.5% 31.5% 31.5% 31.5% 31.6% 31.6% 31.6% 31.6% 31.6% 31.6% 31.6% 31.6% 31.6% 31.7% 31.7% 31.7% 31.7% 31.7% 31.7% 31.7% 31.7% 31.7% 31.7% 31.8% 31.8% 31.8% 31.8% 31.8% 31.8% 31.8% 31.8% 31.8% 31.8% 31.9% 31.9% 31.9% 31.9% 31.9% 31.9% 31.9% 31.9% 31.9% 32.0% 32.0% 32.0% 32.0% 32.0% 32.0% 32.0% 32.0% 32.0% 32.0% 32.1% 32.1% 32.1% 32.1% 32.1% 32.1% 32.1% 32.1% 32.1% 32.1% 32.2% 32.2% 32.2% 32.2% 32.2% 32.2% 32.2% 32.2% 32.2% 32.3% 32.3% 32.3% 32.3% 32.3% 32.3% 32.3% 32.3% 32.3% 32.3% 32.4% 32.4% 32.4% 32.4% 32.4% 32.4% 32.4% 32.4% 32.4% 32.4% 32.5% 32.5% 32.5% 32.5% 32.5% 32.5% 32.5% 32.5% 32.5% 32.6% 32.6% 32.6% 32.6% 32.6% 32.6% 32.6% 32.6% 32.6% 32.6% 32.7% 32.7% 32.7% 32.7% 32.7% 32.7% 32.7% 32.7% 32.7% 32.7% 32.8% 32.8% 32.8% 32.8% 32.8% 32.8% 32.8% 32.8% 32.8% 32.9% 32.9% 32.9% 32.9% 32.9% 32.9% 32.9% 32.9% 32.9% 32.9% 33.0% 33.0% 33.0% 33.0% 33.0% 33.0% 33.0% 33.0% 33.0% 33.0% 33.1% 33.1% 33.1% 33.1% 33.1% 33.1% 33.1% 33.1% 33.1% 33.2% 33.2% 33.2% 33.2% 33.2% 33.2% 33.2% 33.2% 33.2% 33.2% 33.3% 33.3% 33.3% 33.3% 33.3% 33.3% 33.3% 33.3% 33.3% 33.3% 33.4% 33.4% 33.4% 33.4% 33.4% 33.4% 33.4% 33.4% 33.4% 33.5% 33.5% 33.5% 33.5% 33.5% 33.5% 33.5% 33.5% 33.5% 33.5% 33.6% 33.6% 33.6% 33.6% 33.6% 33.6% 33.6% 33.6% 33.6% 33.6% 33.7% 33.7% 33.7% 33.7% 33.7% 33.7% 33.7% 33.7% 33.7% 33.7% 33.8% 33.8% 33.8% 33.8% 33.8% 33.8% 33.8% 33.8% 33.8% 33.9% 33.9% 33.9% 33.9% 33.9% 33.9% 33.9% 33.9% 33.9% 33.9% 34.0% 34.0% 34.0% 34.0% 34.0% 34.0% 34.0% 34.0% 34.0% 34.0% 34.1% 34.1% 34.1% 34.1% 34.1% 34.1% 34.1% 34.1% 34.1% 34.2% 34.2% 34.2% 34.2% 34.2% 34.2% 34.2% 34.2% 34.2% 34.2% 34.3% 34.3% 34.3% 34.3% 34.3% 34.3% 34.3% 34.3% 34.3% 34.3% 34.4% 34.4% 34.4% 34.4% 34.4% 34.4% 34.4% 34.4% 34.4% 34.5% 34.5% 34.5% 34.5% 34.5% 34.5% 34.5% 34.5% 34.5% 34.5% 34.6% 34.6% 34.6% 34.6% 34.6% 34.6% 34.6% 34.6% 34.6% 34.6% 34.7% 34.7% 34.7% 34.7% 34.7% 34.7% 34.7% 34.7% 34.7% 34.8% 34.8% 34.8% 34.8% 34.8% 34.8% 34.8% 34.8% 34.8% 34.8% 34.9% 34.9% 34.9% 34.9% 34.9% 34.9% 34.9% 34.9% 34.9% 34.9% 35.0% 35.0% 35.0% 35.0% 35.0% 35.0% 35.0% 35.0% 35.0% 35.1% 35.1% 35.1% 35.1% 35.1% 35.1% 35.1% 35.1% 35.1% 35.1% 35.2% 35.2% 35.2% 35.2% 35.2% 35.2% 35.2% 35.2% 35.2% 35.2% 35.3% 35.3% 35.3% 35.3% 35.3% 35.3% 35.3% 35.3% 35.3% 35.4% 35.4% 35.4% 35.4% 35.4% 35.4% 35.4% 35.4% 35.4% 35.4% 35.5% 35.5% 35.5% 35.5% 35.5% 35.5% 35.5% 35.5% 35.5% 35.5% 35.6% 35.6% 35.6% 35.6% 35.6% 35.6% 35.6% 35.6% 35.6% 35.7% 35.7% 35.7% 35.7% 35.7% 35.7% 35.7% 35.7% 35.7% 35.7% 35.8% 35.8% 35.8% 35.8% 35.8% 35.8% 35.8% 35.8% 35.8% 35.8% 35.9% 35.9% 35.9% 35.9% 35.9% 35.9% 35.9% 35.9% 35.9% 36.0% 36.0% 36.0% 36.0% 36.0% 36.0% 36.0% 36.0% 36.0% 36.0% 36.1% 36.1% 36.1% 36.1% 36.1% 36.1% 36.1% 36.1% 36.1% 36.1% 36.2% 36.2% 36.2% 36.2% 36.2% 36.2% 36.2% 36.2% 36.2% 36.2% 36.3% 36.3% 36.3% 36.3% 36.3% 36.3% 36.3% 36.3% 36.3% 36.4% 36.4% 36.4% 36.4% 36.4% 36.4% 36.4% 36.4% 36.4% 36.4% 36.5% 36.5% 36.5% 36.5% 36.5% 36.5% 36.5% 36.5% 36.5% 36.5% 36.6% 36.6% 36.6% 36.6% 36.6% 36.6% 36.6% 36.6% 36.6% 36.7% 36.7% 36.7% 36.7% 36.7% 36.7% 36.7% 36.7% 36.7% 36.7% 36.8% 36.8% 36.8% 36.8% 36.8% 36.8% 36.8% 36.8% 36.8% 36.8% 36.9% 36.9% 36.9% 36.9% 36.9% 36.9% 36.9% 36.9% 36.9% 37.0% 37.0% 37.0% 37.0% 37.0% 37.0% 37.0% 37.0% 37.0% 37.0% 37.1% 37.1% 37.1% 37.1% 37.1% 37.1% 37.1% 37.1% 37.1% 37.1% 37.2% 37.2% 37.2% 37.2% 37.2% 37.2% 37.2% 37.2% 37.2% 37.3% 37.3% 37.3% 37.3% 37.3% 37.3% 37.3% 37.3% 37.3% 37.3% 37.4% 37.4% 37.4% 37.4% 37.4% 37.4% 37.4% 37.4% 37.4% 37.4% 37.5% 37.5% 37.5% 37.5% 37.5% 37.5% 37.5% 37.5% 37.5% 37.6% 37.6% 37.6% 37.6% 37.6% 37.6% 37.6% 37.6% 37.6% 37.6% 37.7% 37.7% 37.7% 37.7% 37.7% 37.7% 37.7% 37.7% 37.7% 37.7% 37.8% 37.8% 37.8% 37.8% 37.8% 37.8% 37.8% 37.8% 37.8% 37.9% 37.9% 37.9% 37.9% 37.9% 37.9% 37.9% 37.9% 37.9% 37.9% 38.0% 38.0% 38.0% 38.0% 38.0% 38.0% 38.0% 38.0% 38.0% 38.0% 38.1% 38.1% 38.1% 38.1% 38.1% 38.1% 38.1% 38.1% 38.1% 38.2% 38.2% 38.2% 38.2% 38.2% 38.2% 38.2% 38.2% 38.2% 38.2% 38.3% 38.3% 38.3% 38.3% 38.3% 38.3% 38.3% 38.3% 38.3% 38.3% 38.4% 38.4% 38.4% 38.4% 38.4% 38.4% 38.4% 38.4% 38.4% 38.5% 38.5% 38.5% 38.5% 38.5% 38.5% 38.5% 38.5% 38.5% 38.5% 38.6% 38.6% 38.6% 38.6% 38.6% 38.6% 38.6% 38.6% 38.6% 38.6% 38.7% 38.7% 38.7% 38.7% 38.7% 38.7% 38.7% 38.7% 38.7% 38.7% 38.8% 38.8% 38.8% 38.8% 38.8% 38.8% 38.8% 38.8% 38.8% 38.9% 38.9% 38.9% 38.9% 38.9% 38.9% 38.9% 38.9% 38.9% 38.9% 39.0% 39.0% 39.0% 39.0% 39.0% 39.0% 39.0% 39.0% 39.0% 39.0% 39.1% 39.1% 39.1% 39.1% 39.1% 39.1% 39.1% 39.1% 39.1% 39.2% 39.2% 39.2% 39.2% 39.2% 39.2% 39.2% 39.2% 39.2% 39.2% 39.3% 39.3% 39.3% 39.3% 39.3% 39.3% 39.3% 39.3% 39.3% 39.3% 39.4% 39.4% 39.4% 39.4% 39.4% 39.4% 39.4% 39.4% 39.4% 39.5% 39.5% 39.5% 39.5% 39.5% 39.5% 39.5% 39.5% 39.5% 39.5% 39.6% 39.6% 39.6% 39.6% 39.6% 39.6% 39.6% 39.6% 39.6% 39.6% 39.7% 39.7% 39.7% 39.7% 39.7% 39.7% 39.7% 39.7% 39.7% 39.8% 39.8% 39.8% 39.8% 39.8% 39.8% 39.8% 39.8% 39.8% 39.8% 39.9% 39.9% 39.9% 39.9% 39.9% 39.9% 39.9% 39.9% 39.9% 39.9% 40.0% 40.0% 40.0% 40.0% 40.0% 40.0% 40.0% 40.0% 40.0% 40.1% 40.1% 40.1% 40.1% 40.1% 40.1% 40.1% 40.1% 40.1% 40.1% 40.2% 40.2% 40.2% 40.2% 40.2% 40.2% 40.2% 40.2% 40.2% 40.2% 40.3% 40.3% 40.3% 40.3% 40.3% 40.3% 40.3% 40.3% 40.3% 40.4% 40.4% 40.4% 40.4% 40.4% 40.4% 40.4% 40.4% 40.4% 40.4% 40.5% 40.5% 40.5% 40.5% 40.5% 40.5% 40.5% 40.5% 40.5% 40.5% 40.6% 40.6% 40.6% 40.6% 40.6% 40.6% 40.6% 40.6% 40.6% 40.7% 40.7% 40.7% 40.7% 40.7% 40.7% 40.7% 40.7% 40.7% 40.7% 40.8% 40.8% 40.8% 40.8% 40.8% 40.8% 40.8% 40.8% 40.8% 40.8% 40.9% 40.9% 40.9% 40.9% 40.9% 40.9% 40.9% 40.9% 40.9% 40.9% 41.0% 41.0% 41.0% 41.0% 41.0% 41.0% 41.0% 41.0% 41.0% 41.1% 41.1% 41.1% 41.1% 41.1% 41.1% 41.1% 41.1% 41.1% 41.1% 41.2% 41.2% 41.2% 41.2% 41.2% 41.2% 41.2% 41.2% 41.2% 41.2% 41.3% 41.3% 41.3% 41.3% 41.3% 41.3% 41.3% 41.3% 41.3% 41.4% 41.4% 41.4% 41.4% 41.4% 41.4% 41.4% 41.4% 41.4% 41.4% 41.5% 41.5% 41.5% 41.5% 41.5% 41.5% 41.5% 41.5% 41.5% 41.5% 41.6% 41.6% 41.6% 41.6% 41.6% 41.6% 41.6% 41.6% 41.6% 41.7% 41.7% 41.7% 41.7% 41.7% 41.7% 41.7% 41.7% 41.7% 41.7% 41.8% 41.8% 41.8% 41.8% 41.8% 41.8% 41.8% 41.8% 41.8% 41.8% 41.9% 41.9% 41.9% 41.9% 41.9% 41.9% 41.9% 41.9% 41.9% 42.0% 42.0% 42.0% 42.0% 42.0% 42.0% 42.0% 42.0% 42.0% 42.0% 42.1% 42.1% 42.1% 42.1% 42.1% 42.1% 42.1% 42.1% 42.1% 42.1% 42.2% 42.2% 42.2% 42.2% 42.2% 42.2% 42.2% 42.2% 42.2% 42.3% 42.3% 42.3% 42.3% 42.3% 42.3% 42.3% 42.3% 42.3% 42.3% 42.4% 42.4% 42.4% 42.4% 42.4% 42.4% 42.4% 42.4% 42.4% 42.4% 42.5% 42.5% 42.5% 42.5% 42.5% 42.5% 42.5% 42.5% 42.5% 42.6% 42.6% 42.6% 42.6% 42.6% 42.6% 42.6% 42.6% 42.6% 42.6% 42.7% 42.7% 42.7% 42.7% 42.7% 42.7% 42.7% 42.7% 42.7% 42.7% 42.8% 42.8% 42.8% 42.8% 42.8% 42.8% 42.8% 42.8% 42.8% 42.9% 42.9% 42.9% 42.9% 42.9% 42.9% 42.9% 42.9% 42.9% 42.9% 43.0% 43.0% 43.0% 43.0% 43.0% 43.0% 43.0% 43.0% 43.0% 43.0% 43.1% 43.1% 43.1% 43.1% 43.1% 43.1% 43.1% 43.1% 43.1% 43.2% 43.2% 43.2% 43.2% 43.2% 43.2% 43.2% 43.2% 43.2% 43.2% 43.3% 43.3% 43.3% 43.3% 43.3% 43.3% 43.3% 43.3% 43.3% 43.3% 43.4% 43.4% 43.4% 43.4% 43.4% 43.4% 43.4% 43.4% 43.4% 43.4% 43.5% 43.5% 43.5% 43.5% 43.5% 43.5% 43.5% 43.5% 43.5% 43.6% 43.6% 43.6% 43.6% 43.6% 43.6% 43.6% 43.6% 43.6% 43.6% 43.7% 43.7% 43.7% 43.7% 43.7% 43.7% 43.7% 43.7% 43.7% 43.7% 43.8% 43.8% 43.8% 43.8% 43.8% 43.8% 43.8% 43.8% 43.8% 43.9% 43.9% 43.9% 43.9% 43.9% 43.9% 43.9% 43.9% 43.9% 43.9% 44.0% 44.0% 44.0% 44.0% 44.0% 44.0% 44.0% 44.0% 44.0% 44.0% 44.1% 44.1% 44.1% 44.1% 44.1% 44.1% 44.1% 44.1% 44.1% 44.2% 44.2% 44.2% 44.2% 44.2% 44.2% 44.2% 44.2% 44.2% 44.2% 44.3% 44.3% 44.3% 44.3% 44.3% 44.3% 44.3% 44.3% 44.3% 44.3% 44.4% 44.4% 44.4% 44.4% 44.4% 44.4% 44.4% 44.4% 44.4% 44.5% 44.5% 44.5% 44.5% 44.5% 44.5% 44.5% 44.5% 44.5% 44.5% 44.6% 44.6% 44.6% 44.6% 44.6% 44.6% 44.6% 44.6% 44.6% 44.6% 44.7% 44.7% 44.7% 44.7% 44.7% 44.7% 44.7% 44.7% 44.7% 44.8% 44.8% 44.8% 44.8% 44.8% 44.8% 44.8% 44.8% 44.8% 44.8% 44.9% 44.9% 44.9% 44.9% 44.9% 44.9% 44.9% 44.9% 44.9% 44.9% 45.0% 45.0% 45.0% 45.0% 45.0% 45.0% 45.0% 45.0% 45.0% 45.1% 45.1% 45.1% 45.1% 45.1% 45.1% 45.1% 45.1% 45.1% 45.1% 45.2% 45.2% 45.2% 45.2% 45.2% 45.2% 45.2% 45.2% 45.2% 45.2% 45.3% 45.3% 45.3% 45.3% 45.3% 45.3% 45.3% 45.3% 45.3% 45.4% 45.4% 45.4% 45.4% 45.4% 45.4% 45.4% 45.4% 45.4% 45.4% 45.5% 45.5% 45.5% 45.5% 45.5% 45.5% 45.5% 45.5% 45.5% 45.5% 45.6% 45.6% 45.6% 45.6% 45.6% 45.6% 45.6% 45.6% 45.6% 45.7% 45.7% 45.7% 45.7% 45.7% 45.7% 45.7% 45.7% 45.7% 45.7% 45.8% 45.8% 45.8% 45.8% 45.8% 45.8% 45.8% 45.8% 45.8% 45.8% 45.9% 45.9% 45.9% 45.9% 45.9% 45.9% 45.9% 45.9% 45.9% 45.9% 46.0% 46.0% 46.0% 46.0% 46.0% 46.0% 46.0% 46.0% 46.0% 46.1% 46.1% 46.1% 46.1% 46.1% 46.1% 46.1% 46.1% 46.1% 46.1% 46.2% 46.2% 46.2% 46.2% 46.2% 46.2% 46.2% 46.2% 46.2% 46.2% 46.3% 46.3% 46.3% 46.3% 46.3% 46.3% 46.3% 46.3% 46.3% 46.4% 46.4% 46.4% 46.4% 46.4% 46.4% 46.4% 46.4% 46.4% 46.4% 46.5% 46.5% 46.5% 46.5% 46.5% 46.5% 46.5% 46.5% 46.5% 46.5% 46.6% 46.6% 46.6% 46.6% 46.6% 46.6% 46.6% 46.6% 46.6% 46.7% 46.7% 46.7% 46.7% 46.7% 46.7% 46.7% 46.7% 46.7% 46.7% 46.8% 46.8% 46.8% 46.8% 46.8% 46.8% 46.8% 46.8% 46.8% 46.8% 46.9% 46.9% 46.9% 46.9% 46.9% 46.9% 46.9% 46.9% 46.9% 47.0% 47.0% 47.0% 47.0% 47.0% 47.0% 47.0% 47.0% 47.0% 47.0% 47.1% 47.1% 47.1% 47.1% 47.1% 47.1% 47.1% 47.1% 47.1% 47.1% 47.2% 47.2% 47.2% 47.2% 47.2% 47.2% 47.2% 47.2% 47.2% 47.3% 47.3% 47.3% 47.3% 47.3% 47.3% 47.3% 47.3% 47.3% 47.3% 47.4% 47.4% 47.4% 47.4% 47.4% 47.4% 47.4% 47.4% 47.4% 47.4% 47.5% 47.5% 47.5% 47.5% 47.5% 47.5% 47.5% 47.5% 47.5% 47.6% 47.6% 47.6% 47.6% 47.6% 47.6% 47.6% 47.6% 47.6% 47.6% 47.7% 47.7% 47.7% 47.7% 47.7% 47.7% 47.7% 47.7% 47.7% 47.7% 47.8% 47.8% 47.8% 47.8% 47.8% 47.8% 47.8% 47.8% 47.8% 47.9% 47.9% 47.9% 47.9% 47.9% 47.9% 47.9% 47.9% 47.9% 47.9% 48.0% 48.0% 48.0% 48.0% 48.0% 48.0% 48.0% 48.0% 48.0% 48.0% 48.1% 48.1% 48.1% 48.1% 48.1% 48.1% 48.1% 48.1% 48.1% 48.2% 48.2% 48.2% 48.2% 48.2% 48.2% 48.2% 48.2% 48.2% 48.2% 48.3% 48.3% 48.3% 48.3% 48.3% 48.3% 48.3% 48.3% 48.3% 48.3% 48.4% 48.4% 48.4% 48.4% 48.4% 48.4% 48.4% 48.4% 48.4% 48.4% 48.5% 48.5% 48.5% 48.5% 48.5% 48.5% 48.5% 48.5% 48.5% 48.6% 48.6% 48.6% 48.6% 48.6% 48.6% 48.6% 48.6% 48.6% 48.6% 48.7% 48.7% 48.7% 48.7% 48.7% 48.7% 48.7% 48.7% 48.7% 48.7% 48.8% 48.8% 48.8% 48.8% 48.8% 48.8% 48.8% 48.8% 48.8% 48.9% 48.9% 48.9% 48.9% 48.9% 48.9% 48.9% 48.9% 48.9% 48.9% 49.0% 49.0% 49.0% 49.0% 49.0% 49.0% 49.0% 49.0% 49.0% 49.0% 49.1% 49.1% 49.1% 49.1% 49.1% 49.1% 49.1% 49.1% 49.1% 49.2% 49.2% 49.2% 49.2% 49.2% 49.2% 49.2% 49.2% 49.2% 49.2% 49.3% 49.3% 49.3% 49.3% 49.3% 49.3% 49.3% 49.3% 49.3% 49.3% 49.4% 49.4% 49.4% 49.4% 49.4% 49.4% 49.4% 49.4% 49.4% 49.5% 49.5% 49.5% 49.5% 49.5% 49.5% 49.5% 49.5% 49.5% 49.5% 49.6% 49.6% 49.6% 49.6% 49.6% 49.6% 49.6% 49.6% 49.6% 49.6% 49.7% 49.7% 49.7% 49.7% 49.7% 49.7% 49.7% 49.7% 49.7% 49.8% 49.8% 49.8% 49.8% 49.8% 49.8% 49.8% 49.8% 49.8% 49.8% 49.9% 49.9% 49.9% 49.9% 49.9% 49.9% 49.9% 49.9% 49.9% 49.9% 50.0% 50.0% 50.0% 50.0% 50.0% 50.0% 50.0% 50.0% 50.0% 50.1% 50.1% 50.1% 50.1% 50.1% 50.1% 50.1% 50.1% 50.1% 50.1% 50.2% 50.2% 50.2% 50.2% 50.2% 50.2% 50.2% 50.2% 50.2% 50.2% 50.3% 50.3% 50.3% 50.3% 50.3% 50.3% 50.3% 50.3% 50.3% 50.4% 50.4% 50.4% 50.4% 50.4% 50.4% 50.4% 50.4% 50.4% 50.4% 50.5% 50.5% 50.5% 50.5% 50.5% 50.5% 50.5% 50.5% 50.5% 50.5% 50.6% 50.6% 50.6% 50.6% 50.6% 50.6% 50.6% 50.6% 50.6% 50.7% 50.7% 50.7% 50.7% 50.7% 50.7% 50.7% 50.7% 50.7% 50.7% 50.8% 50.8% 50.8% 50.8% 50.8% 50.8% 50.8% 50.8% 50.8% 50.8% 50.9% 50.9% 50.9% 50.9% 50.9% 50.9% 50.9% 50.9% 50.9% 50.9% 51.0% 51.0% 51.0% 51.0% 51.0% 51.0% 51.0% 51.0% 51.0% 51.1% 51.1% 51.1% 51.1% 51.1% 51.1% 51.1% 51.1% 51.1% 51.1% 51.2% 51.2% 51.2% 51.2% 51.2% 51.2% 51.2% 51.2% 51.2% 51.2% 51.3% 51.3% 51.3% 51.3% 51.3% 51.3% 51.3% 51.3% 51.3% 51.4% 51.4% 51.4% 51.4% 51.4% 51.4% 51.4% 51.4% 51.4% 51.4% 51.5% 51.5% 51.5% 51.5% 51.5% 51.5% 51.5% 51.5% 51.5% 51.5% 51.6% 51.6% 51.6% 51.6% 51.6% 51.6% 51.6% 51.6% 51.6% 51.7% 51.7% 51.7% 51.7% 51.7% 51.7% 51.7% 51.7% 51.7% 51.7% 51.8% 51.8% 51.8% 51.8% 51.8% 51.8% 51.8% 51.8% 51.8% 51.8% 51.9% 51.9% 51.9% 51.9% 51.9% 51.9% 51.9% 51.9% 51.9% 52.0% 52.0% 52.0% 52.0% 52.0% 52.0% 52.0% 52.0% 52.0% 52.0% 52.1% 52.1% 52.1% 52.1% 52.1% 52.1% 52.1% 52.1% 52.1% 52.1% 52.2% 52.2% 52.2% 52.2% 52.2% 52.2% 52.2% 52.2% 52.2% 52.3% 52.3% 52.3% 52.3% 52.3% 52.3% 52.3% 52.3% 52.3% 52.3% 52.4% 52.4% 52.4% 52.4% 52.4% 52.4% 52.4% 52.4% 52.4% 52.4% 52.5% 52.5% 52.5% 52.5% 52.5% 52.5% 52.5% 52.5% 52.5% 52.6% 52.6% 52.6% 52.6% 52.6% 52.6% 52.6% 52.6% 52.6% 52.6% 52.7% 52.7% 52.7% 52.7% 52.7% 52.7% 52.7% 52.7% 52.7% 52.7% 52.8% 52.8% 52.8% 52.8% 52.8% 52.8% 52.8% 52.8% 52.8% 52.9% 52.9% 52.9% 52.9% 52.9% 52.9% 52.9% 52.9% 52.9% 52.9% 53.0% 53.0% 53.0% 53.0% 53.0% 53.0% 53.0% 53.0% 53.0% 53.0% 53.1% 53.1% 53.1% 53.1% 53.1% 53.1% 53.1% 53.1% 53.1% 53.2% 53.2% 53.2% 53.2% 53.2% 53.2% 53.2% 53.2% 53.2% 53.2% 53.3% 53.3% 53.3% 53.3% 53.3% 53.3% 53.3% 53.3% 53.3% 53.3% 53.4% 53.4% 53.4% 53.4% 53.4% 53.4% 53.4% 53.4% 53.4% 53.4% 53.5% 53.5% 53.5% 53.5% 53.5% 53.5% 53.5% 53.5% 53.5% 53.6% 53.6% 53.6% 53.6% 53.6% 53.6% 53.6% 53.6% 53.6% 53.6% 53.7% 53.7% 53.7% 53.7% 53.7% 53.7% 53.7% 53.7% 53.7% 53.7% 53.8% 53.8% 53.8% 53.8% 53.8% 53.8% 53.8% 53.8% 53.8% 53.9% 53.9% 53.9% 53.9% 53.9% 53.9% 53.9% 53.9% 53.9% 53.9% 54.0% 54.0% 54.0% 54.0% 54.0% 54.0% 54.0% 54.0% 54.0% 54.0% 54.1% 54.1% 54.1% 54.1% 54.1% 54.1% 54.1% 54.1% 54.1% 54.2% 54.2% 54.2% 54.2% 54.2% 54.2% 54.2% 54.2% 54.2% 54.2% 54.3% 54.3% 54.3% 54.3% 54.3% 54.3% 54.3% 54.3% 54.3% 54.3% 54.4% 54.4% 54.4% 54.4% 54.4% 54.4% 54.4% 54.4% 54.4% 54.5% 54.5% 54.5% 54.5% 54.5% 54.5% 54.5% 54.5% 54.5% 54.5% 54.6% 54.6% 54.6% 54.6% 54.6% 54.6% 54.6% 54.6% 54.6% 54.6% 54.7% 54.7% 54.7% 54.7% 54.7% 54.7% 54.7% 54.7% 54.7% 54.8% 54.8% 54.8% 54.8% 54.8% 54.8% 54.8% 54.8% 54.8% 54.8% 54.9% 54.9% 54.9% 54.9% 54.9% 54.9% 54.9% 54.9% 54.9% 54.9% 55.0% 55.0% 55.0% 55.0% 55.0% 55.0% 55.0% 55.0% 55.0% 55.1% 55.1% 55.1% 55.1% 55.1% 55.1% 55.1% 55.1% 55.1% 55.1% 55.2% 55.2% 55.2% 55.2% 55.2% 55.2% 55.2% 55.2% 55.2% 55.2% 55.3% 55.3% 55.3% 55.3% 55.3% 55.3% 55.3% 55.3% 55.3% 55.4% 55.4% 55.4% 55.4% 55.4% 55.4% 55.4% 55.4% 55.4% 55.4% 55.5% 55.5% 55.5% 55.5% 55.5% 55.5% 55.5% 55.5% 55.5% 55.5% 55.6% 55.6% 55.6% 55.6% 55.6% 55.6% 55.6% 55.6% 55.6% 55.7% 55.7% 55.7% 55.7% 55.7% 55.7% 55.7% 55.7% 55.7% 55.7% 55.8% 55.8% 55.8% 55.8% 55.8% 55.8% 55.8% 55.8% 55.8% 55.8% 55.9% 55.9% 55.9% 55.9% 55.9% 55.9% 55.9% 55.9% 55.9% 55.9% 56.0% 56.0% 56.0% 56.0% 56.0% 56.0% 56.0% 56.0% 56.0% 56.1% 56.1% 56.1% 56.1% 56.1% 56.1% 56.1% 56.1% 56.1% 56.1% 56.2% 56.2% 56.2% 56.2% 56.2% 56.2% 56.2% 56.2% 56.2% 56.2% 56.3% 56.3% 56.3% 56.3% 56.3% 56.3% 56.3% 56.3% 56.3% 56.4% 56.4% 56.4% 56.4% 56.4% 56.4% 56.4% 56.4% 56.4% 56.4% 56.5% 56.5% 56.5% 56.5% 56.5% 56.5% 56.5% 56.5% 56.5% 56.5% 56.6% 56.6% 56.6% 56.6% 56.6% 56.6% 56.6% 56.6% 56.6% 56.7% 56.7% 56.7% 56.7% 56.7% 56.7% 56.7% 56.7% 56.7% 56.7% 56.8% 56.8% 56.8% 56.8% 56.8% 56.8% 56.8% 56.8% 56.8% 56.8% 56.9% 56.9% 56.9% 56.9% 56.9% 56.9% 56.9% 56.9% 56.9% 57.0% 57.0% 57.0% 57.0% 57.0% 57.0% 57.0% 57.0% 57.0% 57.0% 57.1% 57.1% 57.1% 57.1% 57.1% 57.1% 57.1% 57.1% 57.1% 57.1% 57.2% 57.2% 57.2% 57.2% 57.2% 57.2% 57.2% 57.2% 57.2% 57.3% 57.3% 57.3% 57.3% 57.3% 57.3% 57.3% 57.3% 57.3% 57.3% 57.4% 57.4% 57.4% 57.4% 57.4% 57.4% 57.4% 57.4% 57.4% 57.4% 57.5% 57.5% 57.5% 57.5% 57.5% 57.5% 57.5% 57.5% 57.5% 57.6% 57.6% 57.6% 57.6% 57.6% 57.6% 57.6% 57.6% 57.6% 57.6% 57.7% 57.7% 57.7% 57.7% 57.7% 57.7% 57.7% 57.7% 57.7% 57.7% 57.8% 57.8% 57.8% 57.8% 57.8% 57.8% 57.8% 57.8% 57.8% 57.9% 57.9% 57.9% 57.9% 57.9% 57.9% 57.9% 57.9% 57.9% 57.9% 58.0% 58.0% 58.0% 58.0% 58.0% 58.0% 58.0% 58.0% 58.0% 58.0% 58.1% 58.1% 58.1% 58.1% 58.1% 58.1% 58.1% 58.1% 58.1% 58.2% 58.2% 58.2% 58.2% 58.2% 58.2% 58.2% 58.2% 58.2% 58.2% 58.3% 58.3% 58.3% 58.3% 58.3% 58.3% 58.3% 58.3% 58.3% 58.3% 58.4% 58.4% 58.4% 58.4% 58.4% 58.4% 58.4% 58.4% 58.4% 58.4% 58.5% 58.5% 58.5% 58.5% 58.5% 58.5% 58.5% 58.5% 58.5% 58.6% 58.6% 58.6% 58.6% 58.6% 58.6% 58.6% 58.6% 58.6% 58.6% 58.7% 58.7% 58.7% 58.7% 58.7% 58.7% 58.7% 58.7% 58.7% 58.7% 58.8% 58.8% 58.8% 58.8% 58.8% 58.8% 58.8% 58.8% 58.8% 58.9% 58.9% 58.9% 58.9% 58.9% 58.9% 58.9% 58.9% 58.9% 58.9% 59.0% 59.0% 59.0% 59.0% 59.0% 59.0% 59.0% 59.0% 59.0% 59.0% 59.1% 59.1% 59.1% 59.1% 59.1% 59.1% 59.1% 59.1% 59.1% 59.2% 59.2% 59.2% 59.2% 59.2% 59.2% 59.2% 59.2% 59.2% 59.2% 59.3% 59.3% 59.3% 59.3% 59.3% 59.3% 59.3% 59.3% 59.3% 59.3% 59.4% 59.4% 59.4% 59.4% 59.4% 59.4% 59.4% 59.4% 59.4% 59.5% 59.5% 59.5% 59.5% 59.5% 59.5% 59.5% 59.5% 59.5% 59.5% 59.6% 59.6% 59.6% 59.6% 59.6% 59.6% 59.6% 59.6% 59.6% 59.6% 59.7% 59.7% 59.7% 59.7% 59.7% 59.7% 59.7% 59.7% 59.7% 59.8% 59.8% 59.8% 59.8% 59.8% 59.8% 59.8% 59.8% 59.8% 59.8% 59.9% 59.9% 59.9% 59.9% 59.9% 59.9% 59.9% 59.9% 59.9% 59.9% 60.0% 60.0% 60.0% 60.0% 60.0% 60.0% 60.0% 60.0% 60.0% 60.1% 60.1% 60.1% 60.1% 60.1% 60.1% 60.1% 60.1% 60.1% 60.1% 60.2% 60.2% 60.2% 60.2% 60.2% 60.2% 60.2% 60.2% 60.2% 60.2% 60.3% 60.3% 60.3% 60.3% 60.3% 60.3% 60.3% 60.3% 60.3% 60.4% 60.4% 60.4% 60.4% 60.4% 60.4% 60.4% 60.4% 60.4% 60.4% 60.5% 60.5% 60.5% 60.5% 60.5% 60.5% 60.5% 60.5% 60.5% 60.5% 60.6% 60.6% 60.6% 60.6% 60.6% 60.6% 60.6% 60.6% 60.6% 60.7% 60.7% 60.7% 60.7% 60.7% 60.7% 60.7% 60.7% 60.7% 60.7% 60.8% 60.8% 60.8% 60.8% 60.8% 60.8% 60.8% 60.8% 60.8% 60.8% 60.9% 60.9% 60.9% 60.9% 60.9% 60.9% 60.9% 60.9% 60.9% 60.9% 61.0% 61.0% 61.0% 61.0% 61.0% 61.0% 61.0% 61.0% 61.0% 61.1% 61.1% 61.1% 61.1% 61.1% 61.1% 61.1% 61.1% 61.1% 61.1% 61.2% 61.2% 61.2% 61.2% 61.2% 61.2% 61.2% 61.2% 61.2% 61.2% 61.3% 61.3% 61.3% 61.3% 61.3% 61.3% 61.3% 61.3% 61.3% 61.4% 61.4% 61.4% 61.4% 61.4% 61.4% 61.4% 61.4% 61.4% 61.4% 61.5% 61.5% 61.5% 61.5% 61.5% 61.5% 61.5% 61.5% 61.5% 61.5% 61.6% 61.6% 61.6% 61.6% 61.6% 61.6% 61.6% 61.6% 61.6% 61.7% 61.7% 61.7% 61.7% 61.7% 61.7% 61.7% 61.7% 61.7% 61.7% 61.8% 61.8% 61.8% 61.8% 61.8% 61.8% 61.8% 61.8% 61.8% 61.8% 61.9% 61.9% 61.9% 61.9% 61.9% 61.9% 61.9% 61.9% 61.9% 62.0% 62.0% 62.0% 62.0% 62.0% 62.0% 62.0% 62.0% 62.0% 62.0% 62.1% 62.1% 62.1% 62.1% 62.1% 62.1% 62.1% 62.1% 62.1% 62.1% 62.2% 62.2% 62.2% 62.2% 62.2% 62.2% 62.2% 62.2% 62.2% 62.3% 62.3% 62.3% 62.3% 62.3% 62.3% 62.3% 62.3% 62.3% 62.3% 62.4% 62.4% 62.4% 62.4% 62.4% 62.4% 62.4% 62.4% 62.4% 62.4% 62.5% 62.5% 62.5% 62.5% 62.5% 62.5% 62.5% 62.5% 62.5% 62.6% 62.6% 62.6% 62.6% 62.6% 62.6% 62.6% 62.6% 62.6% 62.6% 62.7% 62.7% 62.7% 62.7% 62.7% 62.7% 62.7% 62.7% 62.7% 62.7% 62.8% 62.8% 62.8% 62.8% 62.8% 62.8% 62.8% 62.8% 62.8% 62.9% 62.9% 62.9% 62.9% 62.9% 62.9% 62.9% 62.9% 62.9% 62.9% 63.0% 63.0% 63.0% 63.0% 63.0% 63.0% 63.0% 63.0% 63.0% 63.0% 63.1% 63.1% 63.1% 63.1% 63.1% 63.1% 63.1% 63.1% 63.1% 63.2% 63.2% 63.2% 63.2% 63.2% 63.2% 63.2% 63.2% 63.2% 63.2% 63.3% 63.3% 63.3% 63.3% 63.3% 63.3% 63.3% 63.3% 63.3% 63.3% 63.4% 63.4% 63.4% 63.4% 63.4% 63.4% 63.4% 63.4% 63.4% 63.4% 63.5% 63.5% 63.5% 63.5% 63.5% 63.5% 63.5% 63.5% 63.5% 63.6% 63.6% 63.6% 63.6% 63.6% 63.6% 63.6% 63.6% 63.6% 63.6% 63.7% 63.7% 63.7% 63.7% 63.7% 63.7% 63.7% 63.7% 63.7% 63.7% 63.8% 63.8% 63.8% 63.8% 63.8% 63.8% 63.8% 63.8% 63.8% 63.9% 63.9% 63.9% 63.9% 63.9% 63.9% 63.9% 63.9% 63.9% 63.9% 64.0% 64.0% 64.0% 64.0% 64.0% 64.0% 64.0% 64.0% 64.0% 64.0% 64.1% 64.1% 64.1% 64.1% 64.1% 64.1% 64.1% 64.1% 64.1% 64.2% 64.2% 64.2% 64.2% 64.2% 64.2% 64.2% 64.2% 64.2% 64.2% 64.3% 64.3% 64.3% 64.3% 64.3% 64.3% 64.3% 64.3% 64.3% 64.3% 64.4% 64.4% 64.4% 64.4% 64.4% 64.4% 64.4% 64.4% 64.4% 64.5% 64.5% 64.5% 64.5% 64.5% 64.5% 64.5% 64.5% 64.5% 64.5% 64.6% 64.6% 64.6% 64.6% 64.6% 64.6% 64.6% 64.6% 64.6% 64.6% 64.7% 64.7% 64.7% 64.7% 64.7% 64.7% 64.7% 64.7% 64.7% 64.8% 64.8% 64.8% 64.8% 64.8% 64.8% 64.8% 64.8% 64.8% 64.8% 64.9% 64.9% 64.9% 64.9% 64.9% 64.9% 64.9% 64.9% 64.9% 64.9% 65.0% 65.0% 65.0% 65.0% 65.0% 65.0% 65.0% 65.0% 65.0% 65.1% 65.1% 65.1% 65.1% 65.1% 65.1% 65.1% 65.1% 65.1% 65.1% 65.2% 65.2% 65.2% 65.2% 65.2% 65.2% 65.2% 65.2% 65.2% 65.2% 65.3% 65.3% 65.3% 65.3% 65.3% 65.3% 65.3% 65.3% 65.3% 65.4% 65.4% 65.4% 65.4% 65.4% 65.4% 65.4% 65.4% 65.4% 65.4% 65.5% 65.5% 65.5% 65.5% 65.5% 65.5% 65.5% 65.5% 65.5% 65.5% 65.6% 65.6% 65.6% 65.6% 65.6% 65.6% 65.6% 65.6% 65.6% 65.7% 65.7% 65.7% 65.7% 65.7% 65.7% 65.7% 65.7% 65.7% 65.7% 65.8% 65.8% 65.8% 65.8% 65.8% 65.8% 65.8% 65.8% 65.8% 65.8% 65.9% 65.9% 65.9% 65.9% 65.9% 65.9% 65.9% 65.9% 65.9% 65.9% 66.0% 66.0% 66.0% 66.0% 66.0% 66.0% 66.0% 66.0% 66.0% 66.1% 66.1% 66.1% 66.1% 66.1% 66.1% 66.1% 66.1% 66.1% 66.1% 66.2% 66.2% 66.2% 66.2% 66.2% 66.2% 66.2% 66.2% 66.2% 66.2% 66.3% 66.3% 66.3% 66.3% 66.3% 66.3% 66.3% 66.3% 66.3% 66.4% 66.4% 66.4% 66.4% 66.4% 66.4% 66.4% 66.4% 66.4% 66.4% 66.5% 66.5% 66.5% 66.5% 66.5% 66.5% 66.5% 66.5% 66.5% 66.5% 66.6% 66.6% 66.6% 66.6% 66.6% 66.6% 66.6% 66.6% 66.6% 66.7% 66.7% 66.7% 66.7% 66.7% 66.7% 66.7% 66.7% 66.7% 66.7% 66.8% 66.8% 66.8% 66.8% 66.8% 66.8% 66.8% 66.8% 66.8% 66.8% 66.9% 66.9% 66.9% 66.9% 66.9% 66.9% 66.9% 66.9% 66.9% 67.0% 67.0% 67.0% 67.0% 67.0% 67.0% 67.0% 67.0% 67.0% 67.0% 67.1% 67.1% 67.1% 67.1% 67.1% 67.1% 67.1% 67.1% 67.1% 67.1% 67.2% 67.2% 67.2% 67.2% 67.2% 67.2% 67.2% 67.2% 67.2% 67.3% 67.3% 67.3% 67.3% 67.3% 67.3% 67.3% 67.3% 67.3% 67.3% 67.4% 67.4% 67.4% 67.4% 67.4% 67.4% 67.4% 67.4% 67.4% 67.4% 67.5% 67.5% 67.5% 67.5% 67.5% 67.5% 67.5% 67.5% 67.5% 67.6% 67.6% 67.6% 67.6% 67.6% 67.6% 67.6% 67.6% 67.6% 67.6% 67.7% 67.7% 67.7% 67.7% 67.7% 67.7% 67.7% 67.7% 67.7% 67.7% 67.8% 67.8% 67.8% 67.8% 67.8% 67.8% 67.8% 67.8% 67.8% 67.9% 67.9% 67.9% 67.9% 67.9% 67.9% 67.9% 67.9% 67.9% 67.9% 68.0% 68.0% 68.0% 68.0% 68.0% 68.0% 68.0% 68.0% 68.0% 68.0% 68.1% 68.1% 68.1% 68.1% 68.1% 68.1% 68.1% 68.1% 68.1% 68.2% 68.2% 68.2% 68.2% 68.2% 68.2% 68.2% 68.2% 68.2% 68.2% 68.3% 68.3% 68.3% 68.3% 68.3% 68.3% 68.3% 68.3% 68.3% 68.3% 68.4% 68.4% 68.4% 68.4% 68.4% 68.4% 68.4% 68.4% 68.4% 68.4% 68.5% 68.5% 68.5% 68.5% 68.5% 68.5% 68.5% 68.5% 68.5% 68.6% 68.6% 68.6% 68.6% 68.6% 68.6% 68.6% 68.6% 68.6% 68.6% 68.7% 68.7% 68.7% 68.7% 68.7% 68.7% 68.7% 68.7% 68.7% 68.7% 68.8% 68.8% 68.8% 68.8% 68.8% 68.8% 68.8% 68.8% 68.8% 68.9% 68.9% 68.9% 68.9% 68.9% 68.9% 68.9% 68.9% 68.9% 68.9% 69.0% 69.0% 69.0% 69.0% 69.0% 69.0% 69.0% 69.0% 69.0% 69.0% 69.1% 69.1% 69.1% 69.1% 69.1% 69.1% 69.1% 69.1% 69.1% 69.2% 69.2% 69.2% 69.2% 69.2% 69.2% 69.2% 69.2% 69.2% 69.2% 69.3% 69.3% 69.3% 69.3% 69.3% 69.3% 69.3% 69.3% 69.3% 69.3% 69.4% 69.4% 69.4% 69.4% 69.4% 69.4% 69.4% 69.4% 69.4% 69.5% 69.5% 69.5% 69.5% 69.5% 69.5% 69.5% 69.5% 69.5% 69.5% 69.6% 69.6% 69.6% 69.6% 69.6% 69.6% 69.6% 69.6% 69.6% 69.6% 69.7% 69.7% 69.7% 69.7% 69.7% 69.7% 69.7% 69.7% 69.7% 69.8% 69.8% 69.8% 69.8% 69.8% 69.8% 69.8% 69.8% 69.8% 69.8% 69.9% 69.9% 69.9% 69.9% 69.9% 69.9% 69.9% 69.9% 69.9% 69.9% 70.0% 70.0% 70.0% 70.0% 70.0% 70.0% 70.0% 70.0% 70.0% 70.1% 70.1% 70.1% 70.1% 70.1% 70.1% 70.1% 70.1% 70.1% 70.1% 70.2% 70.2% 70.2% 70.2% 70.2% 70.2% 70.2% 70.2% 70.2% 70.2% 70.3% 70.3% 70.3% 70.3% 70.3% 70.3% 70.3% 70.3% 70.3% 70.4% 70.4% 70.4% 70.4% 70.4% 70.4% 70.4% 70.4% 70.4% 70.4% 70.5% 70.5% 70.5% 70.5% 70.5% 70.5% 70.5% 70.5% 70.5% 70.5% 70.6% 70.6% 70.6% 70.6% 70.6% 70.6% 70.6% 70.6% 70.6% 70.7% 70.7% 70.7% 70.7% 70.7% 70.7% 70.7% 70.7% 70.7% 70.7% 70.8% 70.8% 70.8% 70.8% 70.8% 70.8% 70.8% 70.8% 70.8% 70.8% 70.9% 70.9% 70.9% 70.9% 70.9% 70.9% 70.9% 70.9% 70.9% 70.9% 71.0% 71.0% 71.0% 71.0% 71.0% 71.0% 71.0% 71.0% 71.0% 71.1% 71.1% 71.1% 71.1% 71.1% 71.1% 71.1% 71.1% 71.1% 71.1% 71.2% 71.2% 71.2% 71.2% 71.2% 71.2% 71.2% 71.2% 71.2% 71.2% 71.3% 71.3% 71.3% 71.3% 71.3% 71.3% 71.3% 71.3% 71.3% 71.4% 71.4% 71.4% 71.4% 71.4% 71.4% 71.4% 71.4% 71.4% 71.4% 71.5% 71.5% 71.5% 71.5% 71.5% 71.5% 71.5% 71.5% 71.5% 71.5% 71.6% 71.6% 71.6% 71.6% 71.6% 71.6% 71.6% 71.6% 71.6% 71.7% 71.7% 71.7% 71.7% 71.7% 71.7% 71.7% 71.7% 71.7% 71.7% 71.8% 71.8% 71.8% 71.8% 71.8% 71.8% 71.8% 71.8% 71.8% 71.8% 71.9% 71.9% 71.9% 71.9% 71.9% 71.9% 71.9% 71.9% 71.9% 72.0% 72.0% 72.0% 72.0% 72.0% 72.0% 72.0% 72.0% 72.0% 72.0% 72.1% 72.1% 72.1% 72.1% 72.1% 72.1% 72.1% 72.1% 72.1% 72.1% 72.2% 72.2% 72.2% 72.2% 72.2% 72.2% 72.2% 72.2% 72.2% 72.3% 72.3% 72.3% 72.3% 72.3% 72.3% 72.3% 72.3% 72.3% 72.3% 72.4% 72.4% 72.4% 72.4% 72.4% 72.4% 72.4% 72.4% 72.4% 72.4% 72.5% 72.5% 72.5% 72.5% 72.5% 72.5% 72.5% 72.5% 72.5% 72.6% 72.6% 72.6% 72.6% 72.6% 72.6% 72.6% 72.6% 72.6% 72.6% 72.7% 72.7% 72.7% 72.7% 72.7% 72.7% 72.7% 72.7% 72.7% 72.7% 72.8% 72.8% 72.8% 72.8% 72.8% 72.8% 72.8% 72.8% 72.8% 72.9% 72.9% 72.9% 72.9% 72.9% 72.9% 72.9% 72.9% 72.9% 72.9% 73.0% 73.0% 73.0% 73.0% 73.0% 73.0% 73.0% 73.0% 73.0% 73.0% 73.1% 73.1% 73.1% 73.1% 73.1% 73.1% 73.1% 73.1% 73.1% 73.2% 73.2% 73.2% 73.2% 73.2% 73.2% 73.2% 73.2% 73.2% 73.2% 73.3% 73.3% 73.3% 73.3% 73.3% 73.3% 73.3% 73.3% 73.3% 73.3% 73.4% 73.4% 73.4% 73.4% 73.4% 73.4% 73.4% 73.4% 73.4% 73.4% 73.5% 73.5% 73.5% 73.5% 73.5% 73.5% 73.5% 73.5% 73.5% 73.6% 73.6% 73.6% 73.6% 73.6% 73.6% 73.6% 73.6% 73.6% 73.6% 73.7% 73.7% 73.7% 73.7% 73.7% 73.7% 73.7% 73.7% 73.7% 73.7% 73.8% 73.8% 73.8% 73.8% 73.8% 73.8% 73.8% 73.8% 73.8% 73.9% 73.9% 73.9% 73.9% 73.9% 73.9% 73.9% 73.9% 73.9% 73.9% 74.0% 74.0% 74.0% 74.0% 74.0% 74.0% 74.0% 74.0% 74.0% 74.0% 74.1% 74.1% 74.1% 74.1% 74.1% 74.1% 74.1% 74.1% 74.1% 74.2% 74.2% 74.2% 74.2% 74.2% 74.2% 74.2% 74.2% 74.2% 74.2% 74.3% 74.3% 74.3% 74.3% 74.3% 74.3% 74.3% 74.3% 74.3% 74.3% 74.4% 74.4% 74.4% 74.4% 74.4% 74.4% 74.4% 74.4% 74.4% 74.5% 74.5% 74.5% 74.5% 74.5% 74.5% 74.5% 74.5% 74.5% 74.5% 74.6% 74.6% 74.6% 74.6% 74.6% 74.6% 74.6% 74.6% 74.6% 74.6% 74.7% 74.7% 74.7% 74.7% 74.7% 74.7% 74.7% 74.7% 74.7% 74.8% 74.8% 74.8% 74.8% 74.8% 74.8% 74.8% 74.8% 74.8% 74.8% 74.9% 74.9% 74.9% 74.9% 74.9% 74.9% 74.9% 74.9% 74.9% 74.9% 75.0% 75.0% 75.0% 75.0% 75.0% 75.0% 75.0% 75.0% 75.0% 75.1% 75.1% 75.1% 75.1% 75.1% 75.1% 75.1% 75.1% 75.1% 75.1% 75.2% 75.2% 75.2% 75.2% 75.2% 75.2% 75.2% 75.2% 75.2% 75.2% 75.3% 75.3% 75.3% 75.3% 75.3% 75.3% 75.3% 75.3% 75.3% 75.4% 75.4% 75.4% 75.4% 75.4% 75.4% 75.4% 75.4% 75.4% 75.4% 75.5% 75.5% 75.5% 75.5% 75.5% 75.5% 75.5% 75.5% 75.5% 75.5% 75.6% 75.6% 75.6% 75.6% 75.6% 75.6% 75.6% 75.6% 75.6% 75.7% 75.7% 75.7% 75.7% 75.7% 75.7% 75.7% 75.7% 75.7% 75.7% 75.8% 75.8% 75.8% 75.8% 75.8% 75.8% 75.8% 75.8% 75.8% 75.8% 75.9% 75.9% 75.9% 75.9% 75.9% 75.9% 75.9% 75.9% 75.9% 75.9% 76.0% 76.0% 76.0% 76.0% 76.0% 76.0% 76.0% 76.0% 76.0% 76.1% 76.1% 76.1% 76.1% 76.1% 76.1% 76.1% 76.1% 76.1% 76.1% 76.2% 76.2% 76.2% 76.2% 76.2% 76.2% 76.2% 76.2% 76.2% 76.2% 76.3% 76.3% 76.3% 76.3% 76.3% 76.3% 76.3% 76.3% 76.3% 76.4% 76.4% 76.4% 76.4% 76.4% 76.4% 76.4% 76.4% 76.4% 76.4% 76.5% 76.5% 76.5% 76.5% 76.5% 76.5% 76.5% 76.5% 76.5% 76.5% 76.6% 76.6% 76.6% 76.6% 76.6% 76.6% 76.6% 76.6% 76.6% 76.7% 76.7% 76.7% 76.7% 76.7% 76.7% 76.7% 76.7% 76.7% 76.7% 76.8% 76.8% 76.8% 76.8% 76.8% 76.8% 76.8% 76.8% 76.8% 76.8% 76.9% 76.9% 76.9% 76.9% 76.9% 76.9% 76.9% 76.9% 76.9% 77.0% 77.0% 77.0% 77.0% 77.0% 77.0% 77.0% 77.0% 77.0% 77.0% 77.1% 77.1% 77.1% 77.1% 77.1% 77.1% 77.1% 77.1% 77.1% 77.1% 77.2% 77.2% 77.2% 77.2% 77.2% 77.2% 77.2% 77.2% 77.2% 77.3% 77.3% 77.3% 77.3% 77.3% 77.3% 77.3% 77.3% 77.3% 77.3% 77.4% 77.4% 77.4% 77.4% 77.4% 77.4% 77.4% 77.4% 77.4% 77.4% 77.5% 77.5% 77.5% 77.5% 77.5% 77.5% 77.5% 77.5% 77.5% 77.6% 77.6% 77.6% 77.6% 77.6% 77.6% 77.6% 77.6% 77.6% 77.6% 77.7% 77.7% 77.7% 77.7% 77.7% 77.7% 77.7% 77.7% 77.7% 77.7% 77.8% 77.8% 77.8% 77.8% 77.8% 77.8% 77.8% 77.8% 77.8% 77.9% 77.9% 77.9% 77.9% 77.9% 77.9% 77.9% 77.9% 77.9% 77.9% 78.0% 78.0% 78.0% 78.0% 78.0% 78.0% 78.0% 78.0% 78.0% 78.0% 78.1% 78.1% 78.1% 78.1% 78.1% 78.1% 78.1% 78.1% 78.1% 78.2% 78.2% 78.2% 78.2% 78.2% 78.2% 78.2% 78.2% 78.2% 78.2% 78.3% 78.3% 78.3% 78.3% 78.3% 78.3% 78.3% 78.3% 78.3% 78.3% 78.4% 78.4% 78.4% 78.4% 78.4% 78.4% 78.4% 78.4% 78.4% 78.4% 78.5% 78.5% 78.5% 78.5% 78.5% 78.5% 78.5% 78.5% 78.5% 78.6% 78.6% 78.6% 78.6% 78.6% 78.6% 78.6% 78.6% 78.6% 78.6% 78.7% 78.7% 78.7% 78.7% 78.7% 78.7% 78.7% 78.7% 78.7% 78.7% 78.8% 78.8% 78.8% 78.8% 78.8% 78.8% 78.8% 78.8% 78.8% 78.9% 78.9% 78.9% 78.9% 78.9% 78.9% 78.9% 78.9% 78.9% 78.9% 79.0% 79.0% 79.0% 79.0% 79.0% 79.0% 79.0% 79.0% 79.0% 79.0% 79.1% 79.1% 79.1% 79.1% 79.1% 79.1% 79.1% 79.1% 79.1% 79.2% 79.2% 79.2% 79.2% 79.2% 79.2% 79.2% 79.2% 79.2% 79.2% 79.3% 79.3% 79.3% 79.3% 79.3% 79.3% 79.3% 79.3% 79.3% 79.3% 79.4% 79.4% 79.4% 79.4% 79.4% 79.4% 79.4% 79.4% 79.4% 79.5% 79.5% 79.5% 79.5% 79.5% 79.5% 79.5% 79.5% 79.5% 79.5% 79.6% 79.6% 79.6% 79.6% 79.6% 79.6% 79.6% 79.6% 79.6% 79.6% 79.7% 79.7% 79.7% 79.7% 79.7% 79.7% 79.7% 79.7% 79.7% 79.8% 79.8% 79.8% 79.8% 79.8% 79.8% 79.8% 79.8% 79.8% 79.8% 79.9% 79.9% 79.9% 79.9% 79.9% 79.9% 79.9% 79.9% 79.9% 79.9% 80.0% 80.0% 80.0% 80.0% 80.0% 80.0% 80.0% 80.0% 80.0% 80.1% 80.1% 80.1% 80.1% 80.1% 80.1% 80.1% 80.1% 80.1% 80.1% 80.2% 80.2% 80.2% 80.2% 80.2% 80.2% 80.2% 80.2% 80.2% 80.2% 80.3% 80.3% 80.3% 80.3% 80.3% 80.3% 80.3% 80.3% 80.3% 80.4% 80.4% 80.4% 80.4% 80.4% 80.4% 80.4% 80.4% 80.4% 80.4% 80.5% 80.5% 80.5% 80.5% 80.5% 80.5% 80.5% 80.5% 80.5% 80.5% 80.6% 80.6% 80.6% 80.6% 80.6% 80.6% 80.6% 80.6% 80.6% 80.6% 80.7% 80.7% 80.7% 80.7% 80.7% 80.7% 80.7% 80.7% 80.7% 80.8% 80.8% 80.8% 80.8% 80.8% 80.8% 80.8% 80.8% 80.8% 80.8% 80.9% 80.9% 80.9% 80.9% 80.9% 80.9% 80.9% 80.9% 80.9% 80.9% 81.0% 81.0% 81.0% 81.0% 81.0% 81.0% 81.0% 81.0% 81.0% 81.1% 81.1% 81.1% 81.1% 81.1% 81.1% 81.1% 81.1% 81.1% 81.1% 81.2% 81.2% 81.2% 81.2% 81.2% 81.2% 81.2% 81.2% 81.2% 81.2% 81.3% 81.3% 81.3% 81.3% 81.3% 81.3% 81.3% 81.3% 81.3% 81.4% 81.4% 81.4% 81.4% 81.4% 81.4% 81.4% 81.4% 81.4% 81.4% 81.5% 81.5% 81.5% 81.5% 81.5% 81.5% 81.5% 81.5% 81.5% 81.5% 81.6% 81.6% 81.6% 81.6% 81.6% 81.6% 81.6% 81.6% 81.6% 81.7% 81.7% 81.7% 81.7% 81.7% 81.7% 81.7% 81.7% 81.7% 81.7% 81.8% 81.8% 81.8% 81.8% 81.8% 81.8% 81.8% 81.8% 81.8% 81.8% 81.9% 81.9% 81.9% 81.9% 81.9% 81.9% 81.9% 81.9% 81.9% 82.0% 82.0% 82.0% 82.0% 82.0% 82.0% 82.0% 82.0% 82.0% 82.0% 82.1% 82.1% 82.1% 82.1% 82.1% 82.1% 82.1% 82.1% 82.1% 82.1% 82.2% 82.2% 82.2% 82.2% 82.2% 82.2% 82.2% 82.2% 82.2% 82.3% 82.3% 82.3% 82.3% 82.3% 82.3% 82.3% 82.3% 82.3% 82.3% 82.4% 82.4% 82.4% 82.4% 82.4% 82.4% 82.4% 82.4% 82.4% 82.4% 82.5% 82.5% 82.5% 82.5% 82.5% 82.5% 82.5% 82.5% 82.5% 82.6% 82.6% 82.6% 82.6% 82.6% 82.6% 82.6% 82.6% 82.6% 82.6% 82.7% 82.7% 82.7% 82.7% 82.7% 82.7% 82.7% 82.7% 82.7% 82.7% 82.8% 82.8% 82.8% 82.8% 82.8% 82.8% 82.8% 82.8% 82.8% 82.9% 82.9% 82.9% 82.9% 82.9% 82.9% 82.9% 82.9% 82.9% 82.9% 83.0% 83.0% 83.0% 83.0% 83.0% 83.0% 83.0% 83.0% 83.0% 83.0% 83.1% 83.1% 83.1% 83.1% 83.1% 83.1% 83.1% 83.1% 83.1% 83.1% 83.2% 83.2% 83.2% 83.2% 83.2% 83.2% 83.2% 83.2% 83.2% 83.3% 83.3% 83.3% 83.3% 83.3% 83.3% 83.3% 83.3% 83.3% 83.3% 83.4% 83.4% 83.4% 83.4% 83.4% 83.4% 83.4% 83.4% 83.4% 83.4% 83.5% 83.5% 83.5% 83.5% 83.5% 83.5% 83.5% 83.5% 83.5% 83.6% 83.6% 83.6% 83.6% 83.6% 83.6% 83.6% 83.6% 83.6% 83.6% 83.7% 83.7% 83.7% 83.7% 83.7% 83.7% 83.7% 83.7% 83.7% 83.7% 83.8% 83.8% 83.8% 83.8% 83.8% 83.8% 83.8% 83.8% 83.8% 83.9% 83.9% 83.9% 83.9% 83.9% 83.9% 83.9% 83.9% 83.9% 83.9% 84.0% 84.0% 84.0% 84.0% 84.0% 84.0% 84.0% 84.0% 84.0% 84.0% 84.1% 84.1% 84.1% 84.1% 84.1% 84.1% 84.1% 84.1% 84.1% 84.2% 84.2% 84.2% 84.2% 84.2% 84.2% 84.2% 84.2% 84.2% 84.2% 84.3% 84.3% 84.3% 84.3% 84.3% 84.3% 84.3% 84.3% 84.3% 84.3% 84.4% 84.4% 84.4% 84.4% 84.4% 84.4% 84.4% 84.4% 84.4% 84.5% 84.5% 84.5% 84.5% 84.5% 84.5% 84.5% 84.5% 84.5% 84.5% 84.6% 84.6% 84.6% 84.6% 84.6% 84.6% 84.6% 84.6% 84.6% 84.6% 84.7% 84.7% 84.7% 84.7% 84.7% 84.7% 84.7% 84.7% 84.7% 84.8% 84.8% 84.8% 84.8% 84.8% 84.8% 84.8% 84.8% 84.8% 84.8% 84.9% 84.9% 84.9% 84.9% 84.9% 84.9% 84.9% 84.9% 84.9% 84.9% 85.0% 85.0% 85.0% 85.0% 85.0% 85.0% 85.0% 85.0% 85.0% 85.1% 85.1% 85.1% 85.1% 85.1% 85.1% 85.1% 85.1% 85.1% 85.1% 85.2% 85.2% 85.2% 85.2% 85.2% 85.2% 85.2% 85.2% 85.2% 85.2% 85.3% 85.3% 85.3% 85.3% 85.3% 85.3% 85.3% 85.3% 85.3% 85.4% 85.4% 85.4% 85.4% 85.4% 85.4% 85.4% 85.4% 85.4% 85.4% 85.5% 85.5% 85.5% 85.5% 85.5% 85.5% 85.5% 85.5% 85.5% 85.5% 85.6% 85.6% 85.6% 85.6% 85.6% 85.6% 85.6% 85.6% 85.6% 85.6% 85.7% 85.7% 85.7% 85.7% 85.7% 85.7% 85.7% 85.7% 85.7% 85.8% 85.8% 85.8% 85.8% 85.8% 85.8% 85.8% 85.8% 85.8% 85.8% 85.9% 85.9% 85.9% 85.9% 85.9% 85.9% 85.9% 85.9% 85.9% 85.9% 86.0% 86.0% 86.0% 86.0% 86.0% 86.0% 86.0% 86.0% 86.0% 86.1% 86.1% 86.1% 86.1% 86.1% 86.1% 86.1% 86.1% 86.1% 86.1% 86.2% 86.2% 86.2% 86.2% 86.2% 86.2% 86.2% 86.2% 86.2% 86.2% 86.3% 86.3% 86.3% 86.3% 86.3% 86.3% 86.3% 86.3% 86.3% 86.4% 86.4% 86.4% 86.4% 86.4% 86.4% 86.4% 86.4% 86.4% 86.4% 86.5% 86.5% 86.5% 86.5% 86.5% 86.5% 86.5% 86.5% 86.5% 86.5% 86.6% 86.6% 86.6% 86.6% 86.6% 86.6% 86.6% 86.6% 86.6% 86.7% 86.7% 86.7% 86.7% 86.7% 86.7% 86.7% 86.7% 86.7% 86.7% 86.8% 86.8% 86.8% 86.8% 86.8% 86.8% 86.8% 86.8% 86.8% 86.8% 86.9% 86.9% 86.9% 86.9% 86.9% 86.9% 86.9% 86.9% 86.9% 87.0% 87.0% 87.0% 87.0% 87.0% 87.0% 87.0% 87.0% 87.0% 87.0% 87.1% 87.1% 87.1% 87.1% 87.1% 87.1% 87.1% 87.1% 87.1% 87.1% 87.2% 87.2% 87.2% 87.2% 87.2% 87.2% 87.2% 87.2% 87.2% 87.3% 87.3% 87.3% 87.3% 87.3% 87.3% 87.3% 87.3% 87.3% 87.3% 87.4% 87.4% 87.4% 87.4% 87.4% 87.4% 87.4% 87.4% 87.4% 87.4% 87.5% 87.5% 87.5% 87.5% 87.5% 87.5% 87.5% 87.5% 87.5% 87.6% 87.6% 87.6% 87.6% 87.6% 87.6% 87.6% 87.6% 87.6% 87.6% 87.7% 87.7% 87.7% 87.7% 87.7% 87.7% 87.7% 87.7% 87.7% 87.7% 87.8% 87.8% 87.8% 87.8% 87.8% 87.8% 87.8% 87.8% 87.8% 87.9% 87.9% 87.9% 87.9% 87.9% 87.9% 87.9% 87.9% 87.9% 87.9% 88.0% 88.0% 88.0% 88.0% 88.0% 88.0% 88.0% 88.0% 88.0% 88.0% 88.1% 88.1% 88.1% 88.1% 88.1% 88.1% 88.1% 88.1% 88.1% 88.1% 88.2% 88.2% 88.2% 88.2% 88.2% 88.2% 88.2% 88.2% 88.2% 88.3% 88.3% 88.3% 88.3% 88.3% 88.3% 88.3% 88.3% 88.3% 88.3% 88.4% 88.4% 88.4% 88.4% 88.4% 88.4% 88.4% 88.4% 88.4% 88.4% 88.5% 88.5% 88.5% 88.5% 88.5% 88.5% 88.5% 88.5% 88.5% 88.6% 88.6% 88.6% 88.6% 88.6% 88.6% 88.6% 88.6% 88.6% 88.6% 88.7% 88.7% 88.7% 88.7% 88.7% 88.7% 88.7% 88.7% 88.7% 88.7% 88.8% 88.8% 88.8% 88.8% 88.8% 88.8% 88.8% 88.8% 88.8% 88.9% 88.9% 88.9% 88.9% 88.9% 88.9% 88.9% 88.9% 88.9% 88.9% 89.0% 89.0% 89.0% 89.0% 89.0% 89.0% 89.0% 89.0% 89.0% 89.0% 89.1% 89.1% 89.1% 89.1% 89.1% 89.1% 89.1% 89.1% 89.1% 89.2% 89.2% 89.2% 89.2% 89.2% 89.2% 89.2% 89.2% 89.2% 89.2% 89.3% 89.3% 89.3% 89.3% 89.3% 89.3% 89.3% 89.3% 89.3% 89.3% 89.4% 89.4% 89.4% 89.4% 89.4% 89.4% 89.4% 89.4% 89.4% 89.5% 89.5% 89.5% 89.5% 89.5% 89.5% 89.5% 89.5% 89.5% 89.5% 89.6% 89.6% 89.6% 89.6% 89.6% 89.6% 89.6% 89.6% 89.6% 89.6% 89.7% 89.7% 89.7% 89.7% 89.7% 89.7% 89.7% 89.7% 89.7% 89.8% 89.8% 89.8% 89.8% 89.8% 89.8% 89.8% 89.8% 89.8% 89.8% 89.9% 89.9% 89.9% 89.9% 89.9% 89.9% 89.9% 89.9% 89.9% 89.9% 90.0% 90.0% 90.0% 90.0% 90.0% 90.0% 90.0% 90.0% 90.0% 90.1% 90.1% 90.1% 90.1% 90.1% 90.1% 90.1% 90.1% 90.1% 90.1% 90.2% 90.2% 90.2% 90.2% 90.2% 90.2% 90.2% 90.2% 90.2% 90.2% 90.3% 90.3% 90.3% 90.3% 90.3% 90.3% 90.3% 90.3% 90.3% 90.4% 90.4% 90.4% 90.4% 90.4% 90.4% 90.4% 90.4% 90.4% 90.4% 90.5% 90.5% 90.5% 90.5% 90.5% 90.5% 90.5% 90.5% 90.5% 90.5% 90.6% 90.6% 90.6% 90.6% 90.6% 90.6% 90.6% 90.6% 90.6% 90.6% 90.7% 90.7% 90.7% 90.7% 90.7% 90.7% 90.7% 90.7% 90.7% 90.8% 90.8% 90.8% 90.8% 90.8% 90.8% 90.8% 90.8% 90.8% 90.8% 90.9% 90.9% 90.9% 90.9% 90.9% 90.9% 90.9% 90.9% 90.9% 90.9% 91.0% 91.0% 91.0% 91.0% 91.0% 91.0% 91.0% 91.0% 91.0% 91.1% 91.1% 91.1% 91.1% 91.1% 91.1% 91.1% 91.1% 91.1% 91.1% 91.2% 91.2% 91.2% 91.2% 91.2% 91.2% 91.2% 91.2% 91.2% 91.2% 91.3% 91.3% 91.3% 91.3% 91.3% 91.3% 91.3% 91.3% 91.3% 91.4% 91.4% 91.4% 91.4% 91.4% 91.4% 91.4% 91.4% 91.4% 91.4% 91.5% 91.5% 91.5% 91.5% 91.5% 91.5% 91.5% 91.5% 91.5% 91.5% 91.6% 91.6% 91.6% 91.6% 91.6% 91.6% 91.6% 91.6% 91.6% 91.7% 91.7% 91.7% 91.7% 91.7% 91.7% 91.7% 91.7% 91.7% 91.7% 91.8% 91.8% 91.8% 91.8% 91.8% 91.8% 91.8% 91.8% 91.8% 91.8% 91.9% 91.9% 91.9% 91.9% 91.9% 91.9% 91.9% 91.9% 91.9% 92.0% 92.0% 92.0% 92.0% 92.0% 92.0% 92.0% 92.0% 92.0% 92.0% 92.1% 92.1% 92.1% 92.1% 92.1% 92.1% 92.1% 92.1% 92.1% 92.1% 92.2% 92.2% 92.2% 92.2% 92.2% 92.2% 92.2% 92.2% 92.2% 92.3% 92.3% 92.3% 92.3% 92.3% 92.3% 92.3% 92.3% 92.3% 92.3% 92.4% 92.4% 92.4% 92.4% 92.4% 92.4% 92.4% 92.4% 92.4% 92.4% 92.5% 92.5% 92.5% 92.5% 92.5% 92.5% 92.5% 92.5% 92.5% 92.6% 92.6% 92.6% 92.6% 92.6% 92.6% 92.6% 92.6% 92.6% 92.6% 92.7% 92.7% 92.7% 92.7% 92.7% 92.7% 92.7% 92.7% 92.7% 92.7% 92.8% 92.8% 92.8% 92.8% 92.8% 92.8% 92.8% 92.8% 92.8% 92.9% 92.9% 92.9% 92.9% 92.9% 92.9% 92.9% 92.9% 92.9% 92.9% 93.0% 93.0% 93.0% 93.0% 93.0% 93.0% 93.0% 93.0% 93.0% 93.0% 93.1% 93.1% 93.1% 93.1% 93.1% 93.1% 93.1% 93.1% 93.1% 93.1% 93.2% 93.2% 93.2% 93.2% 93.2% 93.2% 93.2% 93.2% 93.2% 93.3% 93.3% 93.3% 93.3% 93.3% 93.3% 93.3% 93.3% 93.3% 93.3% 93.4% 93.4% 93.4% 93.4% 93.4% 93.4% 93.4% 93.4% 93.4% 93.4% 93.5% 93.5% 93.5% 93.5% 93.5% 93.5% 93.5% 93.5% 93.5% 93.6% 93.6% 93.6% 93.6% 93.6% 93.6% 93.6% 93.6% 93.6% 93.6% 93.7% 93.7% 93.7% 93.7% 93.7% 93.7% 93.7% 93.7% 93.7% 93.7% 93.8% 93.8% 93.8% 93.8% 93.8% 93.8% 93.8% 93.8% 93.8% 93.9% 93.9% 93.9% 93.9% 93.9% 93.9% 93.9% 93.9% 93.9% 93.9% 94.0% 94.0% 94.0% 94.0% 94.0% 94.0% 94.0% 94.0% 94.0% 94.0% 94.1% 94.1% 94.1% 94.1% 94.1% 94.1% 94.1% 94.1% 94.1% 94.2% 94.2% 94.2% 94.2% 94.2% 94.2% 94.2% 94.2% 94.2% 94.2% 94.3% 94.3% 94.3% 94.3% 94.3% 94.3% 94.3% 94.3% 94.3% 94.3% 94.4% 94.4% 94.4% 94.4% 94.4% 94.4% 94.4% 94.4% 94.4% 94.5% 94.5% 94.5% 94.5% 94.5% 94.5% 94.5% 94.5% 94.5% 94.5% 94.6% 94.6% 94.6% 94.6% 94.6% 94.6% 94.6% 94.6% 94.6% 94.6% 94.7% 94.7% 94.7% 94.7% 94.7% 94.7% 94.7% 94.7% 94.7% 94.8% 94.8% 94.8% 94.8% 94.8% 94.8% 94.8% 94.8% 94.8% 94.8% 94.9% 94.9% 94.9% 94.9% 94.9% 94.9% 94.9% 94.9% 94.9% 94.9% 95.0% 95.0% 95.0% 95.0% 95.0% 95.0% 95.0% 95.0% 95.0% 95.1% 95.1% 95.1% 95.1% 95.1% 95.1% 95.1% 95.1% 95.1% 95.1% 95.2% 95.2% 95.2% 95.2% 95.2% 95.2% 95.2% 95.2% 95.2% 95.2% 95.3% 95.3% 95.3% 95.3% 95.3% 95.3% 95.3% 95.3% 95.3% 95.4% 95.4% 95.4% 95.4% 95.4% 95.4% 95.4% 95.4% 95.4% 95.4% 95.5% 95.5% 95.5% 95.5% 95.5% 95.5% 95.5% 95.5% 95.5% 95.5% 95.6% 95.6% 95.6% 95.6% 95.6% 95.6% 95.6% 95.6% 95.6% 95.6% 95.7% 95.7% 95.7% 95.7% 95.7% 95.7% 95.7% 95.7% 95.7% 95.8% 95.8% 95.8% 95.8% 95.8% 95.8% 95.8% 95.8% 95.8% 95.8% 95.9% 95.9% 95.9% 95.9% 95.9% 95.9% 95.9% 95.9% 95.9% 95.9% 96.0% 96.0% 96.0% 96.0% 96.0% 96.0% 96.0% 96.0% 96.0% 96.1% 96.1% 96.1% 96.1% 96.1% 96.1% 96.1% 96.1% 96.1% 96.1% 96.2% 96.2% 96.2% 96.2% 96.2% 96.2% 96.2% 96.2% 96.2% 96.2% 96.3% 96.3% 96.3% 96.3% 96.3% 96.3% 96.3% 96.3% 96.3% 96.4% 96.4% 96.4% 96.4% 96.4% 96.4% 96.4% 96.4% 96.4% 96.4% 96.5% 96.5% 96.5% 96.5% 96.5% 96.5% 96.5% 96.5% 96.5% 96.5% 96.6% 96.6% 96.6% 96.6% 96.6% 96.6% 96.6% 96.6% 96.6% 96.7% 96.7% 96.7% 96.7% 96.7% 96.7% 96.7% 96.7% 96.7% 96.7% 96.8% 96.8% 96.8% 96.8% 96.8% 96.8% 96.8% 96.8% 96.8% 96.8% 96.9% 96.9% 96.9% 96.9% 96.9% 96.9% 96.9% 96.9% 96.9% 97.0% 97.0% 97.0% 97.0% 97.0% 97.0% 97.0% 97.0% 97.0% 97.0% 97.1% 97.1% 97.1% 97.1% 97.1% 97.1% 97.1% 97.1% 97.1% 97.1% 97.2% 97.2% 97.2% 97.2% 97.2% 97.2% 97.2% 97.2% 97.2% 97.3% 97.3% 97.3% 97.3% 97.3% 97.3% 97.3% 97.3% 97.3% 97.3% 97.4% 97.4% 97.4% 97.4% 97.4% 97.4% 97.4% 97.4% 97.4% 97.4% 97.5% 97.5% 97.5% 97.5% 97.5% 97.5% 97.5% 97.5% 97.5% 97.6% 97.6% 97.6% 97.6% 97.6% 97.6% 97.6% 97.6% 97.6% 97.6% 97.7% 97.7% 97.7% 97.7% 97.7% 97.7% 97.7% 97.7% 97.7% 97.7% 97.8% 97.8% 97.8% 97.8% 97.8% 97.8% 97.8% 97.8% 97.8% 97.9% 97.9% 97.9% 97.9% 97.9% 97.9% 97.9% 97.9% 97.9% 97.9% 98.0% 98.0% 98.0% 98.0% 98.0% 98.0% 98.0% 98.0% 98.0% 98.0% 98.1% 98.1% 98.1% 98.1% 98.1% 98.1% 98.1% 98.1% 98.1% 98.1% 98.2% 98.2% 98.2% 98.2% 98.2% 98.2% 98.2% 98.2% 98.2% 98.3% 98.3% 98.3% 98.3% 98.3% 98.3% 98.3% 98.3% 98.3% 98.3% 98.4% 98.4% 98.4% 98.4% 98.4% 98.4% 98.4% 98.4% 98.4% 98.4% 98.5% 98.5% 98.5% 98.5% 98.5% 98.5% 98.5% 98.5% 98.5% 98.6% 98.6% 98.6% 98.6% 98.6% 98.6% 98.6% 98.6% 98.6% 98.6% 98.7% 98.7% 98.7% 98.7% 98.7% 98.7% 98.7% 98.7% 98.7% 98.7% 98.8% 98.8% 98.8% 98.8% 98.8% 98.8% 98.8% 98.8% 98.8% 98.9% 98.9% 98.9% 98.9% 98.9% 98.9% 98.9% 98.9% 98.9% 98.9% 99.0% 99.0% 99.0% 99.0% 99.0% 99.0% 99.0% 99.0% 99.0% 99.0% 99.1% 99.1% 99.1% 99.1% 99.1% 99.1% 99.1% 99.1% 99.1% 99.2% 99.2% 99.2% 99.2% 99.2% 99.2% 99.2% 99.2% 99.2% 99.2% 99.3% 99.3% 99.3% 99.3% 99.3% 99.3% 99.3% 99.3% 99.3% 99.3% 99.4% 99.4% 99.4% 99.4% 99.4% 99.4% 99.4% 99.4% 99.4% 99.5% 99.5% 99.5% 99.5% 99.5% 99.5% 99.5% 99.5% 99.5% 99.5% 99.6% 99.6% 99.6% 99.6% 99.6% 99.6% 99.6% 99.6% 99.6% 99.6% 99.7% 99.7% 99.7% 99.7% 99.7% 99.7% 99.7% 99.7% 99.7% 99.8% 99.8% 99.8% 99.8% 99.8% 99.8% 99.8% 99.8% 99.8% 99.8% 99.9% 99.9% 99.9% 99.9% 99.9% 99.9% 99.9% 99.9% 99.9% 99.9% 100.0% 100.0% 100.0% 100.0% 100.0% 100.0%
+Extracting .data/MNIST/raw/train-images-idx3-ubyte.gz to .data/MNIST/raw
+
+Downloading http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz
+Downloading http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz to .data/MNIST/raw/train-labels-idx1-ubyte.gz
+ 3.5% 7.1% 10.6% 14.2% 17.7% 21.3% 24.8% 28.4% 31.9% 35.5% 39.0% 42.5% 46.1% 49.6% 53.2% 56.7% 60.3% 63.8% 67.4% 70.9% 74.5% 78.0% 81.5% 85.1% 88.6% 92.2% 95.7% 99.3% 102.8%
+Extracting .data/MNIST/raw/train-labels-idx1-ubyte.gz to .data/MNIST/raw
+
+Downloading http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz
+Downloading http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz to .data/MNIST/raw/t10k-images-idx3-ubyte.gz
+ 0.1% 0.1% 0.2% 0.2% 0.3% 0.4% 0.4% 0.5% 0.6% 0.6% 0.7% 0.7% 0.8% 0.9% 0.9% 1.0% 1.1% 1.1% 1.2% 1.2% 1.3% 1.4% 1.4% 1.5% 1.6% 1.6% 1.7% 1.7% 1.8% 1.9% 1.9% 2.0% 2.0% 2.1% 2.2% 2.2% 2.3% 2.4% 2.4% 2.5% 2.5% 2.6% 2.7% 2.7% 2.8% 2.9% 2.9% 3.0% 3.0% 3.1% 3.2% 3.2% 3.3% 3.4% 3.4% 3.5% 3.5% 3.6% 3.7% 3.7% 3.8% 3.9% 3.9% 4.0% 4.0% 4.1% 4.2% 4.2% 4.3% 4.3% 4.4% 4.5% 4.5% 4.6% 4.7% 4.7% 4.8% 4.8% 4.9% 5.0% 5.0% 5.1% 5.2% 5.2% 5.3% 5.3% 5.4% 5.5% 5.5% 5.6% 5.7% 5.7% 5.8% 5.8% 5.9% 6.0% 6.0% 6.1% 6.1% 6.2% 6.3% 6.3% 6.4% 6.5% 6.5% 6.6% 6.6% 6.7% 6.8% 6.8% 6.9% 7.0% 7.0% 7.1% 7.1% 7.2% 7.3% 7.3% 7.4% 7.5% 7.5% 7.6% 7.6% 7.7% 7.8% 7.8% 7.9% 7.9% 8.0% 8.1% 8.1% 8.2% 8.3% 8.3% 8.4% 8.4% 8.5% 8.6% 8.6% 8.7% 8.8% 8.8% 8.9% 8.9% 9.0% 9.1% 9.1% 9.2% 9.3% 9.3% 9.4% 9.4% 9.5% 9.6% 9.6% 9.7% 9.8% 9.8% 9.9% 9.9% 10.0% 10.1% 10.1% 10.2% 10.2% 10.3% 10.4% 10.4% 10.5% 10.6% 10.6% 10.7% 10.7% 10.8% 10.9% 10.9% 11.0% 11.1% 11.1% 11.2% 11.2% 11.3% 11.4% 11.4% 11.5% 11.6% 11.6% 11.7% 11.7% 11.8% 11.9% 11.9% 12.0% 12.0% 12.1% 12.2% 12.2% 12.3% 12.4% 12.4% 12.5% 12.5% 12.6% 12.7% 12.7% 12.8% 12.9% 12.9% 13.0% 13.0% 13.1% 13.2% 13.2% 13.3% 13.4% 13.4% 13.5% 13.5% 13.6% 13.7% 13.7% 13.8% 13.8% 13.9% 14.0% 14.0% 14.1% 14.2% 14.2% 14.3% 14.3% 14.4% 14.5% 14.5% 14.6% 14.7% 14.7% 14.8% 14.8% 14.9% 15.0% 15.0% 15.1% 15.2% 15.2% 15.3% 15.3% 15.4% 15.5% 15.5% 15.6% 15.6% 15.7% 15.8% 15.8% 15.9% 16.0% 16.0% 16.1% 16.1% 16.2% 16.3% 16.3% 16.4% 16.5% 16.5% 16.6% 16.6% 16.7% 16.8% 16.8% 16.9% 17.0% 17.0% 17.1% 17.1% 17.2% 17.3% 17.3% 17.4% 17.5% 17.5% 17.6% 17.6% 17.7% 17.8% 17.8% 17.9% 17.9% 18.0% 18.1% 18.1% 18.2% 18.3% 18.3% 18.4% 18.4% 18.5% 18.6% 18.6% 18.7% 18.8% 18.8% 18.9% 18.9% 19.0% 19.1% 19.1% 19.2% 19.3% 19.3% 19.4% 19.4% 19.5% 19.6% 19.6% 19.7% 19.7% 19.8% 19.9% 19.9% 20.0% 20.1% 20.1% 20.2% 20.2% 20.3% 20.4% 20.4% 20.5% 20.6% 20.6% 20.7% 20.7% 20.8% 20.9% 20.9% 21.0% 21.1% 21.1% 21.2% 21.2% 21.3% 21.4% 21.4% 21.5% 21.5% 21.6% 21.7% 21.7% 21.8% 21.9% 21.9% 22.0% 22.0% 22.1% 22.2% 22.2% 22.3% 22.4% 22.4% 22.5% 22.5% 22.6% 22.7% 22.7% 22.8% 22.9% 22.9% 23.0% 23.0% 23.1% 23.2% 23.2% 23.3% 23.4% 23.4% 23.5% 23.5% 23.6% 23.7% 23.7% 23.8% 23.8% 23.9% 24.0% 24.0% 24.1% 24.2% 24.2% 24.3% 24.3% 24.4% 24.5% 24.5% 24.6% 24.7% 24.7% 24.8% 24.8% 24.9% 25.0% 25.0% 25.1% 25.2% 25.2% 25.3% 25.3% 25.4% 25.5% 25.5% 25.6% 25.6% 25.7% 25.8% 25.8% 25.9% 26.0% 26.0% 26.1% 26.1% 26.2% 26.3% 26.3% 26.4% 26.5% 26.5% 26.6% 26.6% 26.7% 26.8% 26.8% 26.9% 27.0% 27.0% 27.1% 27.1% 27.2% 27.3% 27.3% 27.4% 27.4% 27.5% 27.6% 27.6% 27.7% 27.8% 27.8% 27.9% 27.9% 28.0% 28.1% 28.1% 28.2% 28.3% 28.3% 28.4% 28.4% 28.5% 28.6% 28.6% 28.7% 28.8% 28.8% 28.9% 28.9% 29.0% 29.1% 29.1% 29.2% 29.3% 29.3% 29.4% 29.4% 29.5% 29.6% 29.6% 29.7% 29.7% 29.8% 29.9% 29.9% 30.0% 30.1% 30.1% 30.2% 30.2% 30.3% 30.4% 30.4% 30.5% 30.6% 30.6% 30.7% 30.7% 30.8% 30.9% 30.9% 31.0% 31.1% 31.1% 31.2% 31.2% 31.3% 31.4% 31.4% 31.5% 31.5% 31.6% 31.7% 31.7% 31.8% 31.9% 31.9% 32.0% 32.0% 32.1% 32.2% 32.2% 32.3% 32.4% 32.4% 32.5% 32.5% 32.6% 32.7% 32.7% 32.8% 32.9% 32.9% 33.0% 33.0% 33.1% 33.2% 33.2% 33.3% 33.3% 33.4% 33.5% 33.5% 33.6% 33.7% 33.7% 33.8% 33.8% 33.9% 34.0% 34.0% 34.1% 34.2% 34.2% 34.3% 34.3% 34.4% 34.5% 34.5% 34.6% 34.7% 34.7% 34.8% 34.8% 34.9% 35.0% 35.0% 35.1% 35.2% 35.2% 35.3% 35.3% 35.4% 35.5% 35.5% 35.6% 35.6% 35.7% 35.8% 35.8% 35.9% 36.0% 36.0% 36.1% 36.1% 36.2% 36.3% 36.3% 36.4% 36.5% 36.5% 36.6% 36.6% 36.7% 36.8% 36.8% 36.9% 37.0% 37.0% 37.1% 37.1% 37.2% 37.3% 37.3% 37.4% 37.4% 37.5% 37.6% 37.6% 37.7% 37.8% 37.8% 37.9% 37.9% 38.0% 38.1% 38.1% 38.2% 38.3% 38.3% 38.4% 38.4% 38.5% 38.6% 38.6% 38.7% 38.8% 38.8% 38.9% 38.9% 39.0% 39.1% 39.1% 39.2% 39.2% 39.3% 39.4% 39.4% 39.5% 39.6% 39.6% 39.7% 39.7% 39.8% 39.9% 39.9% 40.0% 40.1% 40.1% 40.2% 40.2% 40.3% 40.4% 40.4% 40.5% 40.6% 40.6% 40.7% 40.7% 40.8% 40.9% 40.9% 41.0% 41.0% 41.1% 41.2% 41.2% 41.3% 41.4% 41.4% 41.5% 41.5% 41.6% 41.7% 41.7% 41.8% 41.9% 41.9% 42.0% 42.0% 42.1% 42.2% 42.2% 42.3% 42.4% 42.4% 42.5% 42.5% 42.6% 42.7% 42.7% 42.8% 42.9% 42.9% 43.0% 43.0% 43.1% 43.2% 43.2% 43.3% 43.3% 43.4% 43.5% 43.5% 43.6% 43.7% 43.7% 43.8% 43.8% 43.9% 44.0% 44.0% 44.1% 44.2% 44.2% 44.3% 44.3% 44.4% 44.5% 44.5% 44.6% 44.7% 44.7% 44.8% 44.8% 44.9% 45.0% 45.0% 45.1% 45.1% 45.2% 45.3% 45.3% 45.4% 45.5% 45.5% 45.6% 45.6% 45.7% 45.8% 45.8% 45.9% 46.0% 46.0% 46.1% 46.1% 46.2% 46.3% 46.3% 46.4% 46.5% 46.5% 46.6% 46.6% 46.7% 46.8% 46.8% 46.9% 46.9% 47.0% 47.1% 47.1% 47.2% 47.3% 47.3% 47.4% 47.4% 47.5% 47.6% 47.6% 47.7% 47.8% 47.8% 47.9% 47.9% 48.0% 48.1% 48.1% 48.2% 48.3% 48.3% 48.4% 48.4% 48.5% 48.6% 48.6% 48.7% 48.8% 48.8% 48.9% 48.9% 49.0% 49.1% 49.1% 49.2% 49.2% 49.3% 49.4% 49.4% 49.5% 49.6% 49.6% 49.7% 49.7% 49.8% 49.9% 49.9% 50.0% 50.1% 50.1% 50.2% 50.2% 50.3% 50.4% 50.4% 50.5% 50.6% 50.6% 50.7% 50.7% 50.8% 50.9% 50.9% 51.0% 51.0% 51.1% 51.2% 51.2% 51.3% 51.4% 51.4% 51.5% 51.5% 51.6% 51.7% 51.7% 51.8% 51.9% 51.9% 52.0% 52.0% 52.1% 52.2% 52.2% 52.3% 52.4% 52.4% 52.5% 52.5% 52.6% 52.7% 52.7% 52.8% 52.8% 52.9% 53.0% 53.0% 53.1% 53.2% 53.2% 53.3% 53.3% 53.4% 53.5% 53.5% 53.6% 53.7% 53.7% 53.8% 53.8% 53.9% 54.0% 54.0% 54.1% 54.2% 54.2% 54.3% 54.3% 54.4% 54.5% 54.5% 54.6% 54.7% 54.7% 54.8% 54.8% 54.9% 55.0% 55.0% 55.1% 55.1% 55.2% 55.3% 55.3% 55.4% 55.5% 55.5% 55.6% 55.6% 55.7% 55.8% 55.8% 55.9% 56.0% 56.0% 56.1% 56.1% 56.2% 56.3% 56.3% 56.4% 56.5% 56.5% 56.6% 56.6% 56.7% 56.8% 56.8% 56.9% 56.9% 57.0% 57.1% 57.1% 57.2% 57.3% 57.3% 57.4% 57.4% 57.5% 57.6% 57.6% 57.7% 57.8% 57.8% 57.9% 57.9% 58.0% 58.1% 58.1% 58.2% 58.3% 58.3% 58.4% 58.4% 58.5% 58.6% 58.6% 58.7% 58.7% 58.8% 58.9% 58.9% 59.0% 59.1% 59.1% 59.2% 59.2% 59.3% 59.4% 59.4% 59.5% 59.6% 59.6% 59.7% 59.7% 59.8% 59.9% 59.9% 60.0% 60.1% 60.1% 60.2% 60.2% 60.3% 60.4% 60.4% 60.5% 60.6% 60.6% 60.7% 60.7% 60.8% 60.9% 60.9% 61.0% 61.0% 61.1% 61.2% 61.2% 61.3% 61.4% 61.4% 61.5% 61.5% 61.6% 61.7% 61.7% 61.8% 61.9% 61.9% 62.0% 62.0% 62.1% 62.2% 62.2% 62.3% 62.4% 62.4% 62.5% 62.5% 62.6% 62.7% 62.7% 62.8% 62.8% 62.9% 63.0% 63.0% 63.1% 63.2% 63.2% 63.3% 63.3% 63.4% 63.5% 63.5% 63.6% 63.7% 63.7% 63.8% 63.8% 63.9% 64.0% 64.0% 64.1% 64.2% 64.2% 64.3% 64.3% 64.4% 64.5% 64.5% 64.6% 64.6% 64.7% 64.8% 64.8% 64.9% 65.0% 65.0% 65.1% 65.1% 65.2% 65.3% 65.3% 65.4% 65.5% 65.5% 65.6% 65.6% 65.7% 65.8% 65.8% 65.9% 66.0% 66.0% 66.1% 66.1% 66.2% 66.3% 66.3% 66.4% 66.5% 66.5% 66.6% 66.6% 66.7% 66.8% 66.8% 66.9% 66.9% 67.0% 67.1% 67.1% 67.2% 67.3% 67.3% 67.4% 67.4% 67.5% 67.6% 67.6% 67.7% 67.8% 67.8% 67.9% 67.9% 68.0% 68.1% 68.1% 68.2% 68.3% 68.3% 68.4% 68.4% 68.5% 68.6% 68.6% 68.7% 68.7% 68.8% 68.9% 68.9% 69.0% 69.1% 69.1% 69.2% 69.2% 69.3% 69.4% 69.4% 69.5% 69.6% 69.6% 69.7% 69.7% 69.8% 69.9% 69.9% 70.0% 70.1% 70.1% 70.2% 70.2% 70.3% 70.4% 70.4% 70.5% 70.5% 70.6% 70.7% 70.7% 70.8% 70.9% 70.9% 71.0% 71.0% 71.1% 71.2% 71.2% 71.3% 71.4% 71.4% 71.5% 71.5% 71.6% 71.7% 71.7% 71.8% 71.9% 71.9% 72.0% 72.0% 72.1% 72.2% 72.2% 72.3% 72.3% 72.4% 72.5% 72.5% 72.6% 72.7% 72.7% 72.8% 72.8% 72.9% 73.0% 73.0% 73.1% 73.2% 73.2% 73.3% 73.3% 73.4% 73.5% 73.5% 73.6% 73.7% 73.7% 73.8% 73.8% 73.9% 74.0% 74.0% 74.1% 74.2% 74.2% 74.3% 74.3% 74.4% 74.5% 74.5% 74.6% 74.6% 74.7% 74.8% 74.8% 74.9% 75.0% 75.0% 75.1% 75.1% 75.2% 75.3% 75.3% 75.4% 75.5% 75.5% 75.6% 75.6% 75.7% 75.8% 75.8% 75.9% 76.0% 76.0% 76.1% 76.1% 76.2% 76.3% 76.3% 76.4% 76.4% 76.5% 76.6% 76.6% 76.7% 76.8% 76.8% 76.9% 76.9% 77.0% 77.1% 77.1% 77.2% 77.3% 77.3% 77.4% 77.4% 77.5% 77.6% 77.6% 77.7% 77.8% 77.8% 77.9% 77.9% 78.0% 78.1% 78.1% 78.2% 78.2% 78.3% 78.4% 78.4% 78.5% 78.6% 78.6% 78.7% 78.7% 78.8% 78.9% 78.9% 79.0% 79.1% 79.1% 79.2% 79.2% 79.3% 79.4% 79.4% 79.5% 79.6% 79.6% 79.7% 79.7% 79.8% 79.9% 79.9% 80.0% 80.1% 80.1% 80.2% 80.2% 80.3% 80.4% 80.4% 80.5% 80.5% 80.6% 80.7% 80.7% 80.8% 80.9% 80.9% 81.0% 81.0% 81.1% 81.2% 81.2% 81.3% 81.4% 81.4% 81.5% 81.5% 81.6% 81.7% 81.7% 81.8% 81.9% 81.9% 82.0% 82.0% 82.1% 82.2% 82.2% 82.3% 82.3% 82.4% 82.5% 82.5% 82.6% 82.7% 82.7% 82.8% 82.8% 82.9% 83.0% 83.0% 83.1% 83.2% 83.2% 83.3% 83.3% 83.4% 83.5% 83.5% 83.6% 83.7% 83.7% 83.8% 83.8% 83.9% 84.0% 84.0% 84.1% 84.1% 84.2% 84.3% 84.3% 84.4% 84.5% 84.5% 84.6% 84.6% 84.7% 84.8% 84.8% 84.9% 85.0% 85.0% 85.1% 85.1% 85.2% 85.3% 85.3% 85.4% 85.5% 85.5% 85.6% 85.6% 85.7% 85.8% 85.8% 85.9% 86.0% 86.0% 86.1% 86.1% 86.2% 86.3% 86.3% 86.4% 86.4% 86.5% 86.6% 86.6% 86.7% 86.8% 86.8% 86.9% 86.9% 87.0% 87.1% 87.1% 87.2% 87.3% 87.3% 87.4% 87.4% 87.5% 87.6% 87.6% 87.7% 87.8% 87.8% 87.9% 87.9% 88.0% 88.1% 88.1% 88.2% 88.2% 88.3% 88.4% 88.4% 88.5% 88.6% 88.6% 88.7% 88.7% 88.8% 88.9% 88.9% 89.0% 89.1% 89.1% 89.2% 89.2% 89.3% 89.4% 89.4% 89.5% 89.6% 89.6% 89.7% 89.7% 89.8% 89.9% 89.9% 90.0% 90.0% 90.1% 90.2% 90.2% 90.3% 90.4% 90.4% 90.5% 90.5% 90.6% 90.7% 90.7% 90.8% 90.9% 90.9% 91.0% 91.0% 91.1% 91.2% 91.2% 91.3% 91.4% 91.4% 91.5% 91.5% 91.6% 91.7% 91.7% 91.8% 91.9% 91.9% 92.0% 92.0% 92.1% 92.2% 92.2% 92.3% 92.3% 92.4% 92.5% 92.5% 92.6% 92.7% 92.7% 92.8% 92.8% 92.9% 93.0% 93.0% 93.1% 93.2% 93.2% 93.3% 93.3% 93.4% 93.5% 93.5% 93.6% 93.7% 93.7% 93.8% 93.8% 93.9% 94.0% 94.0% 94.1% 94.1% 94.2% 94.3% 94.3% 94.4% 94.5% 94.5% 94.6% 94.6% 94.7% 94.8% 94.8% 94.9% 95.0% 95.0% 95.1% 95.1% 95.2% 95.3% 95.3% 95.4% 95.5% 95.5% 95.6% 95.6% 95.7% 95.8% 95.8% 95.9% 95.9% 96.0% 96.1% 96.1% 96.2% 96.3% 96.3% 96.4% 96.4% 96.5% 96.6% 96.6% 96.7% 96.8% 96.8% 96.9% 96.9% 97.0% 97.1% 97.1% 97.2% 97.3% 97.3% 97.4% 97.4% 97.5% 97.6% 97.6% 97.7% 97.7% 97.8% 97.9% 97.9% 98.0% 98.1% 98.1% 98.2% 98.2% 98.3% 98.4% 98.4% 98.5% 98.6% 98.6% 98.7% 98.7% 98.8% 98.9% 98.9% 99.0% 99.1% 99.1% 99.2% 99.2% 99.3% 99.4% 99.4% 99.5% 99.6% 99.6% 99.7% 99.7% 99.8% 99.9% 99.9% 100.0% 100.0%
+Extracting .data/MNIST/raw/t10k-images-idx3-ubyte.gz to .data/MNIST/raw
+
+Downloading http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz
+Downloading http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz to .data/MNIST/raw/t10k-labels-idx1-ubyte.gz
+ 22.5% 45.1% 67.6% 90.2% 112.7%
+Extracting .data/MNIST/raw/t10k-labels-idx1-ubyte.gz to .data/MNIST/raw
+
+/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torchvision/datasets/mnist.py:498: UserWarning: The given NumPy array is not writeable, and PyTorch does not support non-writeable tensors. This means you can write to the underlying (supposedly non-writeable) NumPy array using the tensor. You may want to copy the array to protect its data or make it writeable before converting it to a tensor. This type of warning will be suppressed for the rest of this program. (Triggered internally at /pytorch/torch/csrc/utils/tensor_numpy.cpp:180.)
+ return torch.from_numpy(parsed.astype(m[2], copy=False)).view(*s)
+/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.)
+ return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode)
+Iteration: 50, Loss: 0.34183579683303833, Accuracy: 93.13999938964844%
+Iteration: 100, Loss: 0.12257936596870422, Accuracy: 92.30999755859375%
+Iteration: 150, Loss: 0.04345181956887245, Accuracy: 95.83999633789062%
+Iteration: 200, Loss: 0.1509581059217453, Accuracy: 96.37999725341797%
+Iteration: 250, Loss: 0.15181449055671692, Accuracy: 96.83999633789062%
+Iteration: 300, Loss: 0.22155368328094482, Accuracy: 96.75%
+^CTraceback (most recent call last):
+ File "mnist.py", line 100, in <module>
+ outputs = model(test)
+ File "/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1051, in _call_impl
+ return forward_call(*input, **kwargs)
+ File "mnist.py", line 53, in forward
+ out = self.layer1(x)
+ File "/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1051, in _call_impl
+ return forward_call(*input, **kwargs)
+ File "/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torch/nn/modules/container.py", line 139, in forward
+ input = module(input)
+ File "/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1051, in _call_impl
+ return forward_call(*input, **kwargs)
+ File "/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torch/nn/modules/batchnorm.py", line 167, in forward
+ return F.batch_norm(
+ File "/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torch/nn/functional.py", line 2281, in batch_norm
+ return torch.batch_norm(
+KeyboardInterrupt
+
+]0;~/PyTorch~/PyTorch$ ichmo./mnist.py
+bash: ./mnist.py: Permission denied
+]0;~/PyTorch~/PyTorch$ chmod +x mnist.py
+]0;~/PyTorch~/PyTorch$ chmod +x mnist.py ./mnist.py
+/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torchvision/datasets/mnist.py:498: UserWarning: The given NumPy array is not writeable, and PyTorch does not support non-writeable tensors. This means you can write to the underlying (supposedly non-writeable) NumPy array using the tensor. You may want to copy the array to protect its data or make it writeable before converting it to a tensor. This type of warning will be suppressed for the rest of this program. (Triggered internally at /pytorch/torch/csrc/utils/tensor_numpy.cpp:180.)
+ return torch.from_numpy(parsed.astype(m[2], copy=False)).view(*s)
+/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/lib/python3.8/site-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.)
+ return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode)
+Iteration: 50, Loss: 0.47505661845207214, Accuracy: 90.05999755859375%
+Iteration: 100, Loss: 0.14079254865646362, Accuracy: 92.63999938964844%
+Iteration: 150, Loss: 0.11672891676425934, Accuracy: 96.7699966430664%
+Iteration: 200, Loss: 0.029581820592284203, Accuracy: 97.48999786376953%
+Iteration: 250, Loss: 0.1358642429113388, Accuracy: 96.41999816894531%
+Iteration: 300, Loss: 0.10600727051496506, Accuracy: 96.94000244140625%
+Iteration: 350, Loss: 0.17078223824501038, Accuracy: 95.8499984741211%
+Iteration: 400, Loss: 0.19208943843841553, Accuracy: 96.83000183105469%
+Iteration: 450, Loss: 0.06547720730304718, Accuracy: 97.08999633789062%
+Iteration: 500, Loss: 0.11516312509775162, Accuracy: 95.72000122070312%
+Iteration: 550, Loss: 0.05065403878688812, Accuracy: 97.6500015258789%
+Iteration: 600, Loss: 0.26968786120414734, Accuracy: 97.29000091552734%
+Iteration: 650, Loss: 0.1269403100013733, Accuracy: 97.73999786376953%
+Iteration: 700, Loss: 0.053299155086278915, Accuracy: 96.3499984741211%
+Iteration: 750, Loss: 0.021617718040943146, Accuracy: 98.37999725341797%
+Iteration: 800, Loss: 0.01600886881351471, Accuracy: 98.04000091552734%
+Iteration: 850, Loss: 0.08215320855379105, Accuracy: 97.7300033569336%
+Iteration: 900, Loss: 0.08939420431852341, Accuracy: 98.43000030517578%
+Iteration: 950, Loss: 0.13746267557144165, Accuracy: 97.7699966430664%
+Iteration: 1000, Loss: 0.1010037511587143, Accuracy: 97.05000305175781%
+Iteration: 1050, Loss: 0.014871266670525074, Accuracy: 97.76000213623047%
+Iteration: 1100, Loss: 0.09214109182357788, Accuracy: 97.86000061035156%
+Iteration: 1150, Loss: 0.02662852220237255, Accuracy: 97.87999725341797%
+Iteration: 1200, Loss: 0.2543987035751343, Accuracy: 98.48999786376953%
+Iteration: 1250, Loss: 0.03473915159702301, Accuracy: 98.18000030517578%
+Iteration: 1300, Loss: 0.03924868628382683, Accuracy: 97.06999969482422%
+Iteration: 1350, Loss: 0.06451206654310226, Accuracy: 98.37999725341797%
+Iteration: 1400, Loss: 0.029841933399438858, Accuracy: 98.36000061035156%
+Iteration: 1450, Loss: 0.07927007973194122, Accuracy: 98.0999984741211%
+Iteration: 1500, Loss: 0.040941592305898666, Accuracy: 98.69000244140625%
+Iteration: 1550, Loss: 0.030843660235404968, Accuracy: 97.8499984741211%
+Iteration: 1600, Loss: 0.09047156572341919, Accuracy: 96.19999694824219%
+Iteration: 1650, Loss: 0.012048683129251003, Accuracy: 98.68000030517578%
+Iteration: 1700, Loss: 0.07744283229112625, Accuracy: 98.61000061035156%
+Iteration: 1750, Loss: 0.022434061393141747, Accuracy: 98.16999816894531%
+Iteration: 1800, Loss: 0.19573909044265747, Accuracy: 98.36000061035156%
+Iteration: 1850, Loss: 0.019937338307499886, Accuracy: 98.19999694824219%
+Iteration: 1900, Loss: 0.010635974816977978, Accuracy: 98.5199966430664%
+Iteration: 1950, Loss: 0.02798471227288246, Accuracy: 98.87000274658203%
+Iteration: 2000, Loss: 0.004890498239547014, Accuracy: 98.5999984741211%
+Iteration: 2050, Loss: 0.07468106597661972, Accuracy: 98.26000213623047%
+Iteration: 2100, Loss: 0.01748150959610939, Accuracy: 98.47000122070312%
+Iteration: 2150, Loss: 0.021531887352466583, Accuracy: 98.51000213623047%
+Iteration: 2200, Loss: 0.08161243796348572, Accuracy: 96.16000366210938%
+Iteration: 2250, Loss: 0.029564548283815384, Accuracy: 98.2699966430664%
+Iteration: 2300, Loss: 0.06717827171087265, Accuracy: 98.5%
+Iteration: 2350, Loss: 0.014128911308944225, Accuracy: 98.62000274658203%
+Iteration: 2400, Loss: 0.1611928790807724, Accuracy: 98.37999725341797%
+Iteration: 2450, Loss: 0.08816828578710556, Accuracy: 98.5199966430664%
+Iteration: 2500, Loss: 0.028569968417286873, Accuracy: 98.69000244140625%
+Iteration: 2550, Loss: 0.020741326734423637, Accuracy: 98.91999816894531%
+Iteration: 2600, Loss: 0.009847820736467838, Accuracy: 98.58000183105469%
+Iteration: 2650, Loss: 0.03623354807496071, Accuracy: 98.30000305175781%
+Iteration: 2700, Loss: 0.004418815020471811, Accuracy: 98.55000305175781%
+Iteration: 2750, Loss: 0.07973592728376389, Accuracy: 98.3499984741211%
+Iteration: 2800, Loss: 0.03026372380554676, Accuracy: 98.41000366210938%
+Iteration: 2850, Loss: 0.003642548806965351, Accuracy: 98.52999877929688%
+Iteration: 2900, Loss: 0.059621091932058334, Accuracy: 98.2699966430664%
+Iteration: 2950, Loss: 0.023448023945093155, Accuracy: 98.54000091552734%
+Iteration: 3000, Loss: 0.2632042467594147, Accuracy: 98.77999877929688%
+Saved PyTorch Model State to model.pth
+]0;~/PyTorch~/PyTorch$ \ No newline at end of file
diff --git a/.nn_tutorial.ipynb.sage-jupyter2 b/.nn_tutorial.ipynb.sage-jupyter2
new file mode 100644
index 0000000..62bfee9
--- /dev/null
+++ b/.nn_tutorial.ipynb.sage-jupyter2
@@ -0,0 +1,103 @@
+{"backend_state":"running","connection_file":"/projects/800fec81-81db-4589-8df3-d839b1d21871/.local/share/jupyter/runtime/kernel-06bdf45a-6d6c-4ca4-a462-f80adeba05ab.json","kernel":"python3-ubuntu","kernel_error":"","kernel_state":"idle","kernel_usage":{"cpu":0,"memory":0},"metadata":{"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.9.6"}},"trust":true,"type":"settings"}
+{"cell_type":"code","end":1631142385589,"exec_count":1,"id":"d98d2f","input":"from pathlib import Path\nimport requests\n\nDATA_PATH = Path(\"data\")\nPATH = DATA_PATH / \"mnist\"\n\nPATH.mkdir(parents=True, exist_ok=True)\n\nURL = \"https://github.com/pytorch/tutorials/raw/master/_static/\"\nFILENAME = \"mnist.pkl.gz\"\n\nif not (PATH / FILENAME).exists():\n content = requests.get(URL + FILENAME).content\n (PATH / FILENAME).open(\"wb\").write(content)","kernel":"python3-ubuntu","metadata":{"jupyter":{}},"pos":3,"start":1631142385566,"state":"done","type":"cell"}
+{"cell_type":"code","end":1631142391724,"exec_count":2,"id":"ea520d","input":"import pickle\nimport gzip\n\nwith gzip.open((PATH / FILENAME).as_posix(), \"rb\") as f:\n ((x_train, y_train), (x_valid, y_valid), _) = pickle.load(f, encoding=\"latin-1\")","kernel":"python3-ubuntu","metadata":{"jupyter":{}},"pos":5,"start":1631142391204,"state":"done","type":"cell"}
+{"cell_type":"code","end":1631142394859,"exec_count":3,"id":"d0427c","input":"print(x_valid)","kernel":"python3-ubuntu","output":{"0":{"name":"stdout","text":"[[0. 0. 0. ... 0. 0. 0.]\n [0. 0. 0. ... 0. 0. 0.]\n [0. 0. 0. ... 0. 0. 0.]\n ...\n [0. 0. 0. ... 0. 0. 0.]\n [0. 0. 0. ... 0. 0. 0.]\n [0. 0. 0. ... 0. 0. 0.]]\n"}},"pos":6,"start":1631142394853,"state":"done","type":"cell"}
+{"cell_type":"code","end":1631142400094,"exec_count":4,"id":"1334ae","input":"from matplotlib import pyplot\nimport numpy as np\n\npyplot.imshow(x_train[0].reshape((28, 28)), cmap=\"gray\")\nprint(x_train.shape)","kernel":"python3-ubuntu","metadata":{"jupyter":{}},"output":{"0":{"name":"stdout","text":"(50000, 784)\n"},"1":{"data":{"image/png":"b47b60f61738c85903c970a7f1240828ce556776","text/plain":"<Figure size 864x504 with 1 Axes>"},"metadata":{"image/png":{"height":411,"width":414},"needs_background":"light"}}},"pos":8,"start":1631142399901,"state":"done","type":"cell"}
+{"cell_type":"code","end":1631142407955,"exec_count":5,"id":"06a83f","input":"import torch\n\nx_train, y_train, x_valid, y_valid = map(\n torch.tensor, (x_train, y_train, x_valid, y_valid)\n)\nn, c = x_train.shape\nprint(x_train, y_train)\nprint(x_train.shape)\nprint(y_train.min(), y_train.max())","kernel":"python3-ubuntu","metadata":{"jupyter":{}},"output":{"0":{"name":"stdout","text":"tensor([[0., 0., 0., ..., 0., 0., 0.],\n [0., 0., 0., ..., 0., 0., 0.],\n [0., 0., 0., ..., 0., 0., 0.],\n ...,\n [0., 0., 0., ..., 0., 0., 0.],\n [0., 0., 0., ..., 0., 0., 0.],\n [0., 0., 0., ..., 0., 0., 0.]]) tensor([5, 0, 4, ..., 8, 4, 8])\ntorch.Size([50000, 784])\ntensor(0) tensor(9)\n"}},"pos":10,"start":1631142407778,"state":"done","type":"cell"}
+{"cell_type":"code","id":"04818f","input":"class Mnist_Logistic(nn.Module):\n def __init__(self):\n super().__init__()\n self.lin = nn.Linear(784, 10)\n\n def forward(self, xb):\n return self.lin(xb)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":44,"state":"done","type":"cell"}
+{"cell_type":"code","id":"0baf97","input":"print(loss_func(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":38,"state":"done","type":"cell"}
+{"cell_type":"code","id":"1112e9","input":"def log_softmax(x):\n return x - x.exp().sum(-1).log().unsqueeze(-1)\n\ndef model(xb):\n return log_softmax(xb @ weights + bias)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":14,"state":"done","type":"cell"}
+{"cell_type":"code","id":"278747","input":"fit(epochs, model, loss_func, opt, train_dl, valid_dl)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":88,"state":"done","type":"cell"}
+{"cell_type":"code","id":"28c6eb","input":"model = nn.Sequential(\n Lambda(preprocess),\n nn.Conv2d(1, 16, kernel_size=3, stride=2, padding=1),\n nn.ReLU(),\n nn.Conv2d(16, 16, kernel_size=3, stride=2, padding=1),\n nn.ReLU(),\n nn.Conv2d(16, 10, kernel_size=3, stride=2, padding=1),\n nn.ReLU(),\n nn.AvgPool2d(4),\n Lambda(lambda x: x.view(x.size(0), -1)),\n)\n\nopt = optim.SGD(model.parameters(), lr=lr, momentum=0.9)\n\nfit(epochs, model, loss_func, opt, train_dl, valid_dl)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":82,"state":"done","type":"cell"}
+{"cell_type":"code","id":"2bd101","input":"%matplotlib inline","metadata":{"jupyter":{"outputs_hidden":false}},"pos":0,"state":"done","type":"cell"}
+{"cell_type":"code","id":"33c506","input":"def accuracy(out, yb):\n preds = torch.argmax(out, dim=1)\n return (preds == yb).float().mean()","metadata":{"jupyter":{"outputs_hidden":false}},"pos":22,"state":"done","type":"cell"}
+{"cell_type":"code","id":"3622f3","input":"print(accuracy(preds, yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":24,"state":"done","type":"cell"}
+{"cell_type":"code","id":"3ad235","input":"model = Mnist_CNN()\nopt = optim.SGD(model.parameters(), lr=lr, momentum=0.9)\n\nfit(epochs, model, loss_func, opt, train_dl, valid_dl)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":78,"state":"done","type":"cell"}
+{"cell_type":"code","id":"3b235d","input":"from IPython.core.debugger import set_trace\n\nlr = 0.5 # learning rate\nepochs = 2 # how many epochs to train for\n\nfor epoch in range(epochs):\n for i in range((n - 1) // bs + 1):\n # set_trace()\n start_i = i * bs\n end_i = start_i + bs\n xb = x_train[start_i:end_i]\n yb = y_train[start_i:end_i]\n pred = model(xb)\n loss = loss_func(pred, yb)\n\n loss.backward()\n with torch.no_grad():\n weights -= weights.grad * lr\n bias -= bias.grad * lr\n weights.grad.zero_()\n bias.grad.zero_()","metadata":{"jupyter":{"outputs_hidden":false}},"pos":26,"state":"done","type":"cell"}
+{"cell_type":"code","id":"40bcfc","input":"import numpy as np\n\ndef fit(epochs, model, loss_func, opt, train_dl, valid_dl):\n for epoch in range(epochs):\n model.train()\n for xb, yb in train_dl:\n loss_batch(model, loss_func, xb, yb, opt)\n\n model.eval()\n with torch.no_grad():\n losses, nums = zip(\n *[loss_batch(model, loss_func, xb, yb) for xb, yb in valid_dl]\n )\n val_loss = np.sum(np.multiply(losses, nums)) / np.sum(nums)\n\n print(epoch, val_loss)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":70,"state":"done","type":"cell"}
+{"cell_type":"code","id":"441030","input":"def loss_batch(model, loss_func, xb, yb, opt=None):\n loss = loss_func(model(xb), yb)\n\n if opt is not None:\n loss.backward()\n opt.step()\n opt.zero_grad()\n\n return loss.item(), len(xb)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":68,"state":"done","type":"cell"}
+{"cell_type":"code","id":"450e1d","input":"print(loss_func(model(xb), yb), accuracy(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":28,"state":"done","type":"cell"}
+{"cell_type":"code","id":"47c4b8","input":"fit(epochs, model, loss_func, opt, train_dl, valid_dl)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":98,"state":"done","type":"cell"}
+{"cell_type":"code","id":"48c137","input":"print(loss_func(model(xb), yb), accuracy(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":32,"state":"done","type":"cell"}
+{"cell_type":"code","id":"5710c8","input":"def nll(input, target):\n return -input[range(target.shape[0]), target].mean()\n\nloss_func = nll","metadata":{"jupyter":{"outputs_hidden":false}},"pos":18,"state":"done","type":"cell"}
+{"cell_type":"code","id":"5a7882","input":"fit()\n\nprint(loss_func(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":48,"state":"done","type":"cell"}
+{"cell_type":"code","id":"5aa7dc","input":"print(loss_func(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":42,"state":"done","type":"cell"}
+{"cell_type":"code","id":"5c1249","input":"model, opt = get_model()\n\nfor epoch in range(epochs):\n for i in range((n - 1) // bs + 1):\n xb, yb = train_ds[i * bs: i * bs + bs]\n pred = model(xb)\n loss = loss_func(pred, yb)\n\n loss.backward()\n opt.step()\n opt.zero_grad()\n\nprint(loss_func(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":58,"state":"done","type":"cell"}
+{"cell_type":"code","id":"638429","input":"model, opt = get_model()\n\nfor epoch in range(epochs):\n model.train()\n for xb, yb in train_dl:\n pred = model(xb)\n loss = loss_func(pred, yb)\n\n loss.backward()\n opt.step()\n opt.zero_grad()\n\n model.eval()\n with torch.no_grad():\n valid_loss = sum(loss_func(model(xb), yb) for xb, yb in valid_dl)\n\n print(epoch, valid_loss / len(valid_dl))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":66,"state":"done","type":"cell"}
+{"cell_type":"code","id":"63e25b","input":"def preprocess(x, y):\n return x.view(-1, 1, 28, 28).to(dev), y.to(dev)\n\n\ntrain_dl, valid_dl = get_data(train_ds, valid_ds, bs)\ntrain_dl = WrappedDataLoader(train_dl, preprocess)\nvalid_dl = WrappedDataLoader(valid_dl, preprocess)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":94,"state":"done","type":"cell"}
+{"cell_type":"code","id":"684c65","input":"def get_model():\n model = Mnist_Logistic()\n return model, optim.SGD(model.parameters(), lr=lr)\n\nmodel, opt = get_model()\nprint(loss_func(model(xb), yb))\n\nfor epoch in range(epochs):\n for i in range((n - 1) // bs + 1):\n start_i = i * bs\n end_i = start_i + bs\n xb = x_train[start_i:end_i]\n yb = y_train[start_i:end_i]\n pred = model(xb)\n loss = loss_func(pred, yb)\n\n loss.backward()\n opt.step()\n opt.zero_grad()\n\nprint(loss_func(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":52,"state":"done","type":"cell"}
+{"cell_type":"code","id":"7f399f","input":"class Lambda(nn.Module):\n def __init__(self, func):\n super().__init__()\n self.func = func\n\n def forward(self, x):\n return self.func(x)\n\n\ndef preprocess(x):\n return x.view(-1, 1, 28, 28)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":80,"state":"done","type":"cell"}
+{"cell_type":"code","id":"7f80d5","input":"model = nn.Sequential(\n nn.Conv2d(1, 16, kernel_size=3, stride=2, padding=1),\n nn.ReLU(),\n nn.Conv2d(16, 16, kernel_size=3, stride=2, padding=1),\n nn.ReLU(),\n nn.Conv2d(16, 10, kernel_size=3, stride=2, padding=1),\n nn.ReLU(),\n nn.AdaptiveAvgPool2d(1),\n Lambda(lambda x: x.view(x.size(0), -1)),\n)\n\nopt = optim.SGD(model.parameters(), lr=lr, momentum=0.9)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":86,"state":"done","type":"cell"}
+{"cell_type":"code","id":"82e1af","input":"from torch import nn\n\nclass Mnist_Logistic(nn.Module):\n def __init__(self):\n super().__init__()\n self.weights = nn.Parameter(torch.randn(784, 10) / math.sqrt(784))\n self.bias = nn.Parameter(torch.zeros(10))\n\n def forward(self, xb):\n return xb @ self.weights + self.bias","metadata":{"jupyter":{"outputs_hidden":false}},"pos":34,"state":"done","type":"cell"}
+{"cell_type":"code","id":"86e2dc","input":"train_dl, valid_dl = get_data(train_ds, valid_ds, bs)\nmodel, opt = get_model()\nfit(epochs, model, loss_func, opt, train_dl, valid_dl)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":74,"state":"done","type":"cell"}
+{"cell_type":"code","id":"8b69e0","input":"from torch import optim","metadata":{"jupyter":{"outputs_hidden":false}},"pos":50,"state":"done","type":"cell"}
+{"cell_type":"code","id":"a0d1d7","input":"from torch.utils.data import TensorDataset","metadata":{"jupyter":{"outputs_hidden":false}},"pos":54,"state":"done","type":"cell"}
+{"cell_type":"code","id":"a24d3e","input":"from torch.utils.data import DataLoader\n\ntrain_ds = TensorDataset(x_train, y_train)\ntrain_dl = DataLoader(train_ds, batch_size=bs)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":60,"state":"done","type":"cell"}
+{"cell_type":"code","id":"a53ec7","input":"model = Mnist_Logistic()\nprint(loss_func(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":46,"state":"done","type":"cell"}
+{"cell_type":"code","id":"abe186","input":"bs = 64 # batch size\n\nxb = x_train[0:bs] # a mini-batch from x\npreds = model(xb) # predictions\npreds[0], preds.shape\nprint(preds[0], preds.shape)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":16,"state":"done","type":"cell"}
+{"cell_type":"code","id":"aedfb9","input":"import math\n\nweights = torch.randn(784, 10) / math.sqrt(784)\nweights.requires_grad_()\nbias = torch.zeros(10, requires_grad=True)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":12,"state":"done","type":"cell"}
+{"cell_type":"code","id":"af0899","input":"print(torch.cuda.is_available())","metadata":{"jupyter":{"outputs_hidden":false}},"pos":90,"state":"done","type":"cell"}
+{"cell_type":"code","id":"b07f3b","input":"model.to(dev)\nopt = optim.SGD(model.parameters(), lr=lr, momentum=0.9)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":96,"state":"done","type":"cell"}
+{"cell_type":"code","id":"b18555","input":"def preprocess(x, y):\n return x.view(-1, 1, 28, 28), y\n\n\nclass WrappedDataLoader:\n def __init__(self, dl, func):\n self.dl = dl\n self.func = func\n\n def __len__(self):\n return len(self.dl)\n\n def __iter__(self):\n batches = iter(self.dl)\n for b in batches:\n yield (self.func(*b))\n\ntrain_dl, valid_dl = get_data(train_ds, valid_ds, bs)\ntrain_dl = WrappedDataLoader(train_dl, preprocess)\nvalid_dl = WrappedDataLoader(valid_dl, preprocess)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":84,"state":"done","type":"cell"}
+{"cell_type":"code","id":"bf0aa3","input":"yb = y_train[0:bs]\nprint(loss_func(preds, yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":20,"state":"done","type":"cell"}
+{"cell_type":"code","id":"cf1a79","input":"model, opt = get_model()\n\nfor epoch in range(epochs):\n for xb, yb in train_dl:\n pred = model(xb)\n loss = loss_func(pred, yb)\n\n loss.backward()\n opt.step()\n opt.zero_grad()\n\nprint(loss_func(model(xb), yb))","metadata":{"jupyter":{"outputs_hidden":false}},"pos":62,"state":"done","type":"cell"}
+{"cell_type":"code","id":"d24f32","input":"class Mnist_CNN(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 16, kernel_size=3, stride=2, padding=1)\n self.conv2 = nn.Conv2d(16, 16, kernel_size=3, stride=2, padding=1)\n self.conv3 = nn.Conv2d(16, 10, kernel_size=3, stride=2, padding=1)\n\n def forward(self, xb):\n xb = xb.view(-1, 1, 28, 28)\n xb = F.relu(self.conv1(xb))\n xb = F.relu(self.conv2(xb))\n xb = F.relu(self.conv3(xb))\n xb = F.avg_pool2d(xb, 4)\n return xb.view(-1, xb.size(1))\n\nlr = 0.1","metadata":{"jupyter":{"outputs_hidden":false}},"pos":76,"state":"done","type":"cell"}
+{"cell_type":"code","id":"d6d84e","input":"model = Mnist_Logistic()","metadata":{"jupyter":{"outputs_hidden":false}},"pos":36,"state":"done","type":"cell"}
+{"cell_type":"code","id":"e6f29c","input":"train_ds = TensorDataset(x_train, y_train)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":56,"state":"done","type":"cell"}
+{"cell_type":"code","id":"ed46a9","input":"import torch.nn.functional as F\n\nloss_func = F.cross_entropy\n\ndef model(xb):\n return xb @ weights + bias","metadata":{"jupyter":{"outputs_hidden":false}},"pos":30,"state":"done","type":"cell"}
+{"cell_type":"code","id":"ed8dbd","input":"def fit():\n for epoch in range(epochs):\n for i in range((n - 1) // bs + 1):\n start_i = i * bs\n end_i = start_i + bs\n xb = x_train[start_i:end_i]\n yb = y_train[start_i:end_i]\n pred = model(xb)\n loss = loss_func(pred, yb)\n\n loss.backward()\n with torch.no_grad():\n for p in model.parameters():\n p -= p.grad * lr\n model.zero_grad()\n\nfit()","metadata":{"jupyter":{"outputs_hidden":false}},"pos":40,"state":"done","type":"cell"}
+{"cell_type":"code","id":"f054c0","input":"train_ds = TensorDataset(x_train, y_train)\ntrain_dl = DataLoader(train_ds, batch_size=bs, shuffle=True)\n\nvalid_ds = TensorDataset(x_valid, y_valid)\nvalid_dl = DataLoader(valid_ds, batch_size=bs * 2)","metadata":{"jupyter":{"outputs_hidden":false}},"pos":64,"state":"done","type":"cell"}
+{"cell_type":"code","id":"f60dd8","input":"def get_data(train_ds, valid_ds, bs):\n return (\n DataLoader(train_ds, batch_size=bs, shuffle=True),\n DataLoader(valid_ds, batch_size=bs * 2),\n )","metadata":{"jupyter":{"outputs_hidden":false}},"pos":72,"state":"done","type":"cell"}
+{"cell_type":"code","id":"f88b82","input":"dev = torch.device(\n \"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")","metadata":{"jupyter":{"outputs_hidden":false}},"pos":92,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"06792d","input":"And then create a device object for it:\n\n","pos":91,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"0d02a9","input":"Using torch.nn.functional\n------------------------------\n\nWe will now refactor our code, so that it does the same thing as before, only\nwe'll start taking advantage of PyTorch's ``nn`` classes to make it more concise\nand flexible. At each step from here, we should be making our code one or more\nof: shorter, more understandable, and/or more flexible.\n\nThe first and easiest step is to make our code shorter by replacing our\nhand-written activation and loss functions with those from ``torch.nn.functional``\n(which is generally imported into the namespace ``F`` by convention). This module\ncontains all the functions in the ``torch.nn`` library (whereas other parts of the\nlibrary contain classes). As well as a wide range of loss and activation\nfunctions, you'll also find here some convenient functions for creating neural\nnets, such as pooling functions. (There are also functions for doing convolutions,\nlinear layers, etc, but as we'll see, these are usually better handled using\nother parts of the library.)\n\nIf you're using negative log likelihood loss and log softmax activation,\nthen Pytorch provides a single function ``F.cross_entropy`` that combines\nthe two. So we can even remove the activation function from our model.\n\n","pos":29,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"13d2e9","input":"In the above, the ``@`` stands for the dot product operation. We will call\nour function on one batch of data (in this case, 64 images). This is\none *forward pass*. Note that our predictions won't be any better than\nrandom at this stage, since we start with random weights.\n\n","pos":15,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"144069","input":"`Momentum <https://cs231n.github.io/neural-networks-3/#sgd>`_ is a variation on\nstochastic gradient descent that takes previous updates into account as well\nand generally leads to faster training.\n\n","pos":77,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"1ec075","input":"Let's update ``preprocess`` to move batches to the GPU:\n\n","pos":93,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"24a7ad","input":"We will calculate and print the validation loss at the end of each epoch.\n\n(Note that we always call ``model.train()`` before training, and ``model.eval()``\nbefore inference, because these are used by layers such as ``nn.BatchNorm2d``\nand ``nn.Dropout`` to ensure appropriate behaviour for these different phases.)\n\n","pos":65,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"2665fd","input":"We can now run a training loop. For each iteration, we will:\n\n- select a mini-batch of data (of size ``bs``)\n- use the model to make predictions\n- calculate the loss\n- ``loss.backward()`` updates the gradients of the model, in this case, ``weights``\n and ``bias``.\n\nWe now use these gradients to update the weights and bias. We do this\nwithin the ``torch.no_grad()`` context manager, because we do not want these\nactions to be recorded for our next calculation of the gradient. You can read\nmore about how PyTorch's Autograd records operations\n`here <https://pytorch.org/docs/stable/notes/autograd.html>`_.\n\nWe then set the\ngradients to zero, so that we are ready for the next loop.\nOtherwise, our gradients would record a running tally of all the operations\nthat had happened (i.e. ``loss.backward()`` *adds* the gradients to whatever is\nalready stored, rather than replacing them).\n\n.. tip:: You can use the standard python debugger to step through PyTorch\n code, allowing you to check the various variable values at each step.\n Uncomment ``set_trace()`` below to try it out.\n\n\n","pos":25,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"26ffdc","input":"The model created with ``Sequential`` is simply:\n\n","pos":81,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"298978","input":"We recommend running this tutorial as a notebook, not a script. To download the notebook (.ipynb) file,\nclick the link at the top of the page.\n\nPyTorch provides the elegantly designed modules and classes `torch.nn <https://pytorch.org/docs/stable/nn.html>`_ ,\n`torch.optim <https://pytorch.org/docs/stable/optim.html>`_ ,\n`Dataset <https://pytorch.org/docs/stable/data.html?highlight=dataset#torch.utils.data.Dataset>`_ ,\nand `DataLoader <https://pytorch.org/docs/stable/data.html?highlight=dataloader#torch.utils.data.DataLoader>`_\nto help you create and train neural networks.\nIn order to fully utilize their power and customize\nthem for your problem, you need to really understand exactly what they're\ndoing. To develop this understanding, we will first train basic neural net\non the MNIST data set without using any features from these models; we will\ninitially only use the most basic PyTorch tensor functionality. Then, we will\nincrementally add one feature from ``torch.nn``, ``torch.optim``, ``Dataset``, or\n``DataLoader`` at a time, showing exactly what each piece does, and how it\nworks to make the code either more concise, or more flexible.\n\n**This tutorial assumes you already have PyTorch installed, and are familiar\nwith the basics of tensor operations.** (If you're familiar with Numpy array\noperations, you'll find the PyTorch tensor operations used here nearly identical).\n\nMNIST data setup\n----------------\n\nWe will use the classic `MNIST <http://deeplearning.net/data/mnist/>`_ dataset,\nwhich consists of black-and-white images of hand-drawn digits (between 0 and 9).\n\nWe will use `pathlib <https://docs.python.org/3/library/pathlib.html>`_\nfor dealing with paths (part of the Python 3 standard library), and will\ndownload the dataset using\n`requests <http://docs.python-requests.org/en/master/>`_. We will only\nimport modules when we use them, so you can see exactly what's being\nused at each point.\n\n","pos":2,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"2a0f71","input":"You should find it runs faster now:\n\n","pos":97,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"2bc0e0","input":"Let's double-check that our loss has gone down:\n\n","pos":41,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"2f876e","input":"Let's try it out:\n\n","pos":87,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"3152f1","input":"Refactor using Dataset\n------------------------------\n\nPyTorch has an abstract Dataset class. A Dataset can be anything that has\na ``__len__`` function (called by Python's standard ``len`` function) and\na ``__getitem__`` function as a way of indexing into it.\n`This tutorial <https://pytorch.org/tutorials/beginner/data_loading_tutorial.html>`_\nwalks through a nice example of creating a custom ``FacialLandmarkDataset`` class\nas a subclass of ``Dataset``.\n\nPyTorch's `TensorDataset <https://pytorch.org/docs/stable/_modules/torch/utils/data/dataset.html#TensorDataset>`_\nis a Dataset wrapping tensors. By defining a length and way of indexing,\nthis also gives us a way to iterate, index, and slice along the first\ndimension of a tensor. This will make it easier to access both the\nindependent and dependent variables in the same line as we train.\n\n","pos":53,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"32d67f","input":"We are still able to use our same ``fit`` method as before.\n\n","pos":47,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"39260e","input":"PyTorch uses ``torch.tensor``, rather than numpy arrays, so we need to\nconvert our data.\n\n","pos":9,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"411394","input":"``fit`` runs the necessary operations to train our model and compute the\ntraining and validation losses for each epoch.\n\n","pos":69,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"4b1a4f","input":"Now, our whole process of obtaining the data loaders and fitting the\nmodel can be run in 3 lines of code:\n\n","pos":73,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"4fe9bb","input":"Since we're now using an object instead of just using a function, we\nfirst have to instantiate our model:\n\n","pos":35,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"53d8e0","input":"nn.Sequential\n------------------------\n\n``torch.nn`` has another handy class we can use to simplify our code:\n`Sequential <https://pytorch.org/docs/stable/nn.html#torch.nn.Sequential>`_ .\nA ``Sequential`` object runs each of the modules contained within it, in a\nsequential manner. This is a simpler way of writing our neural network.\n\nTo take advantage of this, we need to be able to easily define a\n**custom layer** from a given function. For instance, PyTorch doesn't\nhave a `view` layer, and we need to create one for our network. ``Lambda``\nwill create a layer that we can then use when defining a network with\n``Sequential``.\n\n","pos":79,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"5618be","input":"``get_data`` returns dataloaders for the training and validation sets.\n\n","pos":71,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"59aaf4","input":"Let's check our loss with our random model, so we can see if we improve\nafter a backprop pass later.\n\n","pos":19,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"5e3759","input":"\nWhat is `torch.nn` *really*?\n============================\nby Jeremy Howard, `fast.ai <https://www.fast.ai>`_. Thanks to Rachel Thomas and Francisco Ingham.\n\n","pos":1,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"5f9e23","input":"Now we can calculate the loss in the same way as before. Note that\n``nn.Module`` objects are used as if they are functions (i.e they are\n*callable*), but behind the scenes Pytorch will call our ``forward``\nmethod automatically.\n\n","pos":37,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"610c24","input":"Note that we no longer call ``log_softmax`` in the ``model`` function. Let's\nconfirm that our loss and accuracy are the same as before:\n\n","pos":31,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"67a0eb","input":"Let's check the accuracy of our random model, so we can see if our\naccuracy improves as our loss improves.\n\n","pos":23,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"682fa6","input":"Wrapping DataLoader\n-----------------------------\n\nOur CNN is fairly concise, but it only works with MNIST, because:\n - It assumes the input is a 28\\*28 long vector\n - It assumes that the final CNN grid size is 4\\*4 (since that's the average\npooling kernel size we used)\n\nLet's get rid of these two assumptions, so our model works with any 2d\nsingle channel image. First, we can remove the initial Lambda layer by\nmoving the data preprocessing into a generator:\n\n","pos":83,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"69e5bd","input":"Finally, we can move our model to the GPU.\n\n","pos":95,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"70be5f","input":"Refactor using nn.Linear\n-------------------------\n\nWe continue to refactor our code. Instead of manually defining and\ninitializing ``self.weights`` and ``self.bias``, and calculating ``xb @\nself.weights + self.bias``, we will instead use the Pytorch class\n`nn.Linear <https://pytorch.org/docs/stable/nn.html#linear-layers>`_ for a\nlinear layer, which does all that for us. Pytorch has many types of\npredefined layers that can greatly simplify our code, and often makes it\nfaster too.\n\n","pos":43,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"711a8d","input":"Neural net from scratch (no torch.nn)\n---------------------------------------------\n\nLet's first create a model using nothing but PyTorch tensor operations. We're assuming\nyou're already familiar with the basics of neural networks. (If you're not, you can\nlearn them at `course.fast.ai <https://course.fast.ai>`_).\n\nPyTorch provides methods to create random or zero-filled tensors, which we will\nuse to create our weights and bias for a simple linear model. These are just regular\ntensors, with one very special addition: we tell PyTorch that they require a\ngradient. This causes PyTorch to record all of the operations done on the tensor,\nso that it can calculate the gradient during back-propagation *automatically*!\n\nFor the weights, we set ``requires_grad`` **after** the initialization, since we\ndon't want that step included in the gradient. (Note that a trailing ``_`` in\nPyTorch signifies that the operation is performed in-place.)\n\n<div class=\"alert alert-info\"><h4>Note</h4><p>We are initializing the weights here with\n `Xavier initialisation <http://proceedings.mlr.press/v9/glorot10a/glorot10a.pdf>`_\n (by multiplying with 1/sqrt(n)).</p></div>\n\n","pos":11,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"74be6e","input":"Thanks to Pytorch's ``nn.Module``, ``nn.Parameter``, ``Dataset``, and ``DataLoader``,\nour training loop is now dramatically smaller and easier to understand. Let's\nnow try to add the basic features necessary to create effective models in practice.\n\nAdd validation\n-----------------------\n\nIn section 1, we were just trying to get a reasonable training loop set up for\nuse on our training data. In reality, you **always** should also have\na `validation set <https://www.fast.ai/2017/11/13/validation-sets/>`_, in order\nto identify if you are overfitting.\n\nShuffling the training data is\n`important <https://www.quora.com/Does-the-order-of-training-data-matter-when-training-neural-networks>`_\nto prevent correlation between batches and overfitting. On the other hand, the\nvalidation loss will be identical whether we shuffle the validation set or not.\nSince shuffling takes extra time, it makes no sense to shuffle the validation data.\n\nWe'll use a batch size for the validation set that is twice as large as\nthat for the training set. This is because the validation set does not\nneed backpropagation and thus takes less memory (it doesn't need to\nstore the gradients). We take advantage of this to use a larger batch\nsize and compute the loss more quickly.\n\n","pos":63,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"77b646","input":"Refactor using optim\n------------------------------\n\nPytorch also has a package with various optimization algorithms, ``torch.optim``.\nWe can use the ``step`` method from our optimizer to take a forward step, instead\nof manually updating each parameter.\n\nThis will let us replace our previous manually coded optimization step:\n::\n with torch.no_grad():\n for p in model.parameters(): p -= p.grad * lr\n model.zero_grad()\n\nand instead use just:\n::\n opt.step()\n opt.zero_grad()\n\n(``optim.zero_grad()`` resets the gradient to 0 and we need to call it before\ncomputing the gradient for the next minibatch.)\n\n","pos":49,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"841c6f","input":"As you see, the ``preds`` tensor contains not only the tensor values, but also a\ngradient function. We'll use this later to do backprop.\n\nLet's implement negative log-likelihood to use as the loss function\n(again, we can just use standard Python):\n\n","pos":17,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"a0d8c3","input":"Previously for our training loop we had to update the values for each parameter\nby name, and manually zero out the grads for each parameter separately, like this:\n::\n with torch.no_grad():\n weights -= weights.grad * lr\n bias -= bias.grad * lr\n weights.grad.zero_()\n bias.grad.zero_()\n\n\nNow we can take advantage of model.parameters() and model.zero_grad() (which\nare both defined by PyTorch for ``nn.Module``) to make those steps more concise\nand less prone to the error of forgetting some of our parameters, particularly\nif we had a more complicated model:\n::\n with torch.no_grad():\n for p in model.parameters(): p -= p.grad * lr\n model.zero_grad()\n\n\nWe'll wrap our little training loop in a ``fit`` function so we can run it\nagain later.\n\n","pos":39,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"ae0d92","input":"Next, we can replace ``nn.AvgPool2d`` with ``nn.AdaptiveAvgPool2d``, which\nallows us to define the size of the *output* tensor we want, rather than\nthe *input* tensor we have. As a result, our model will work with any\nsize input.\n\n","pos":85,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"ae716c","input":"Thanks to PyTorch's ability to calculate gradients automatically, we can\nuse any standard Python function (or callable object) as a model! So\nlet's just write a plain matrix multiplication and broadcasted addition\nto create a simple linear model. We also need an activation function, so\nwe'll write `log_softmax` and use it. Remember: although PyTorch\nprovides lots of pre-written loss functions, activation functions, and\nso forth, you can easily write your own using plain python. PyTorch will\neven create fast GPU or vectorized CPU code for your function\nautomatically.\n\n","pos":13,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"aebae7","input":"We'll define a little function to create our model and optimizer so we\ncan reuse it in the future.\n\n","pos":51,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"b0fd86","input":"We instantiate our model and calculate the loss in the same way as before:\n\n","pos":45,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"b3c5e0","input":"Previously, our loop iterated over batches (xb, yb) like this:\n::\n for i in range((n-1)//bs + 1):\n xb,yb = train_ds[i*bs : i*bs+bs]\n pred = model(xb)\n\nNow, our loop is much cleaner, as (xb, yb) are loaded automatically from the data loader:\n::\n for xb,yb in train_dl:\n pred = model(xb)\n\n","pos":61,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"b85c97","input":"Refactor using DataLoader\n------------------------------\n\nPytorch's ``DataLoader`` is responsible for managing batches. You can\ncreate a ``DataLoader`` from any ``Dataset``. ``DataLoader`` makes it easier\nto iterate over batches. Rather than having to use ``train_ds[i*bs : i*bs+bs]``,\nthe DataLoader gives us each minibatch automatically.\n\n","pos":59,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"bba2b6","input":"Each image is 28 x 28, and is being stored as a flattened row of length\n784 (=28x28). Let's take a look at one; we need to reshape it to 2d\nfirst.\n\n","pos":7,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"cadeff","input":"Let's also implement a function to calculate the accuracy of our model.\nFor each prediction, if the index with the largest value matches the\ntarget value, then the prediction was correct.\n\n","pos":21,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"cf6a2b","input":"Closing thoughts\n-----------------\n\nWe now have a general data pipeline and training loop which you can use for\ntraining many types of models using Pytorch. To see how simple training a model\ncan now be, take a look at the `mnist_sample` sample notebook.\n\nOf course, there are many things you'll want to add, such as data augmentation,\nhyperparameter tuning, monitoring training, transfer learning, and so forth.\nThese features are available in the fastai library, which has been developed\nusing the same design approach shown in this tutorial, providing a natural\nnext step for practitioners looking to take their models further.\n\nWe promised at the start of this tutorial we'd explain through example each of\n``torch.nn``, ``torch.optim``, ``Dataset``, and ``DataLoader``. So let's summarize\nwhat we've seen:\n\n - **torch.nn**\n\n + ``Module``: creates a callable which behaves like a function, but can also\n contain state(such as neural net layer weights). It knows what ``Parameter`` (s) it\n contains and can zero all their gradients, loop through them for weight updates, etc.\n + ``Parameter``: a wrapper for a tensor that tells a ``Module`` that it has weights\n that need updating during backprop. Only tensors with the `requires_grad` attribute set are updated\n + ``functional``: a module(usually imported into the ``F`` namespace by convention)\n which contains activation functions, loss functions, etc, as well as non-stateful\n versions of layers such as convolutional and linear layers.\n - ``torch.optim``: Contains optimizers such as ``SGD``, which update the weights\n of ``Parameter`` during the backward step\n - ``Dataset``: An abstract interface of objects with a ``__len__`` and a ``__getitem__``,\n including classes provided with Pytorch such as ``TensorDataset``\n - ``DataLoader``: Takes any ``Dataset`` and creates an iterator which returns batches of data.\n\n","pos":99,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"d0346c","input":"You can use these basic 3 lines of code to train a wide variety of models.\nLet's see if we can use them to train a convolutional neural network (CNN)!\n\nSwitch to CNN\n-------------\n\nWe are now going to build our neural network with three convolutional layers.\nBecause none of the functions in the previous section assume anything about\nthe model form, we'll be able to use them to train a CNN without any modification.\n\nWe will use Pytorch's predefined\n`Conv2d <https://pytorch.org/docs/stable/nn.html#torch.nn.Conv2d>`_ class\nas our convolutional layer. We define a CNN with 3 convolutional layers.\nEach convolution is followed by a ReLU. At the end, we perform an\naverage pooling. (Note that ``view`` is PyTorch's version of numpy's\n``reshape``)\n\n","pos":75,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"d10549","input":"Create fit() and get_data()\n----------------------------------\n\nWe'll now do a little refactoring of our own. Since we go through a similar\nprocess twice of calculating the loss for both the training set and the\nvalidation set, let's make that into its own function, ``loss_batch``, which\ncomputes the loss for one batch.\n\nWe pass an optimizer in for the training set, and use it to perform\nbackprop. For the validation set, we don't pass an optimizer, so the\nmethod doesn't perform backprop.\n\n","pos":67,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"d398a1","input":"That's it: we've created and trained a minimal neural network (in this case, a\nlogistic regression, since we have no hidden layers) entirely from scratch!\n\nLet's check the loss and accuracy and compare those to what we got\nearlier. We expect that the loss will have decreased and accuracy to\nhave increased, and they have.\n\n","pos":27,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"e02480","input":"Refactor using nn.Module\n-----------------------------\nNext up, we'll use ``nn.Module`` and ``nn.Parameter``, for a clearer and more\nconcise training loop. We subclass ``nn.Module`` (which itself is a class and\nable to keep track of state). In this case, we want to create a class that\nholds our weights, bias, and method for the forward step. ``nn.Module`` has a\nnumber of attributes and methods (such as ``.parameters()`` and ``.zero_grad()``)\nwhich we will be using.\n\n<div class=\"alert alert-info\"><h4>Note</h4><p>``nn.Module`` (uppercase M) is a PyTorch specific concept, and is a\n class we'll be using a lot. ``nn.Module`` is not to be confused with the Python\n concept of a (lowercase ``m``) `module <https://docs.python.org/3/tutorial/modules.html>`_,\n which is a file of Python code that can be imported.</p></div>\n\n","pos":33,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"e51cd0","input":"Both ``x_train`` and ``y_train`` can be combined in a single ``TensorDataset``,\nwhich will be easier to iterate over and slice.\n\n","pos":55,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"f1ddaa","input":"This dataset is in numpy array format, and has been stored using pickle,\na python-specific format for serializing data.\n\n","pos":4,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"f3f1d1","input":"Previously, we had to iterate through minibatches of x and y values separately:\n::\n xb = x_train[start_i:end_i]\n yb = y_train[start_i:end_i]\n\n\nNow, we can do these two steps together:\n::\n xb,yb = train_ds[i*bs : i*bs+bs]\n\n\n","pos":57,"state":"done","type":"cell"}
+{"cell_type":"markdown","id":"f73650","input":"Using your GPU\n---------------\n\nIf you're lucky enough to have access to a CUDA-capable GPU (you can\nrent one for about $0.50/hour from most cloud providers) you can\nuse it to speed up your code. First check that your GPU is working in\nPytorch:\n\n","pos":89,"state":"done","type":"cell"}
+{"id":0,"time":1631142246776,"type":"user"}
+{"last_load":1631142247936,"type":"file"} \ No newline at end of file
diff --git a/accuracy.png b/accuracy.png
index 4d04a64..3ca17c5 100644
--- a/accuracy.png
+++ b/accuracy.png
Binary files differ
diff --git a/loss.png b/loss.png
index 8a9eca4..d0d9362 100644
--- a/loss.png
+++ b/loss.png
Binary files differ
diff --git a/mnist.py b/mnist.py
index 8ab8614..ea99633 100644..100755
--- a/mnist.py
+++ b/mnist.py
@@ -1,3 +1,4 @@
+#!/usr/bin/python3
import torch
from torch import nn
from torch.autograd import Variable
diff --git a/model.pth b/model.pth
index d0a1381..dae829a 100644
--- a/model.pth
+++ b/model.pth
Binary files differ
diff --git a/nn_tutorial.ipynb b/nn_tutorial.ipynb
index 9cd501a..05f6f72 100644
--- a/nn_tutorial.ipynb
+++ b/nn_tutorial.ipynb
@@ -2,21 +2,24 @@
"cells": [
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"%matplotlib inline"
]
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"\n",
"What is `torch.nn` *really*?\n",
@@ -27,7 +30,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"We recommend running this tutorial as a notebook, not a script. To download the notebook (.ipynb) file,\n",
"click the link at the top of the page.\n",
@@ -67,14 +72,14 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 1,
"metadata": {
"collapsed": false,
"jupyter": {
- "outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"from pathlib import Path\n",
"import requests\n",
@@ -94,7 +99,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"This dataset is in numpy array format, and has been stored using pickle,\n",
"a python-specific format for serializing data.\n",
@@ -103,14 +110,14 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 2,
"metadata": {
"collapsed": false,
"jupyter": {
- "outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"import pickle\n",
"import gzip\n",
@@ -121,8 +128,10 @@
},
{
"cell_type": "code",
- "execution_count": 11,
- "metadata": {},
+ "execution_count": 3,
+ "metadata": {
+ "collapsed": false
+ },
"outputs": [
{
"name": "stdout",
@@ -144,7 +153,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Each image is 28 x 28, and is being stored as a flattened row of length\n",
"784 (=28x28). Let's take a look at one; we need to reshape it to 2d\n",
@@ -154,11 +165,10 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 4,
"metadata": {
"collapsed": false,
"jupyter": {
- "outputs_hidden": false
}
},
"outputs": [
@@ -171,15 +181,20 @@
},
{
"data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAN8klEQVR4nO3df6jVdZ7H8ddrbfojxzI39iZOrWOEUdE6i9nSyjYRTj8o7FYMIzQ0JDl/JDSwyIb7xxSLIVu6rBSDDtXYMus0UJHFMNVm5S6BdDMrs21qoxjlphtmmv1a9b1/3K9xp+75nOs53/PD+34+4HDO+b7P93zffPHl99f53o8jQgAmvj/rdQMAuoOwA0kQdiAJwg4kQdiBJE7o5sJsc+of6LCI8FjT29qy277C9lu237F9ezvfBaCz3Op1dtuTJP1B0gJJOyW9JGlRROwozMOWHeiwTmzZ50l6JyLejYgvJf1G0sI2vg9AB7UT9hmS/jjq/c5q2p+wvcT2kO2hNpYFoE0dP0EXEeskrZPYjQd6qZ0t+y5JZ4x6/51qGoA+1E7YX5J0tu3v2j5R0o8kbaynLQB1a3k3PiIO2V4q6SlJkyQ9EBFv1NYZgFq1fOmtpYVxzA50XEd+VAPg+EHYgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEi0P2Yzjw6RJk4r1U045paPLX7p0acPaSSedVJx39uzZxfqtt95arN9zzz0Na4sWLSrO+/nnnxfrK1euLNbvvPPOYr0X2gq77fckHZB0WNKhiJhbR1MA6lfHlv3SiPiwhu8B0EEcswNJtBv2kPS07ZdtLxnrA7aX2B6yPdTmsgC0od3d+PkRscv2X0h6xvZ/R8Tm0R+IiHWS1kmS7WhzeQBa1NaWPSJ2Vc97JD0maV4dTQGoX8thtz3Z9pSjryX9QNL2uhoDUK92duMHJD1m++j3/HtE/L6WriaYM888s1g/8cQTi/WLL764WJ8/f37D2tSpU4vzXn/99cV6L+3cubNYX7NmTbE+ODjYsHbgwIHivK+++mqx/sILLxTr/ajlsEfEu5L+qsZeAHQQl96AJAg7kARhB5Ig7EAShB1IwhHd+1HbRP0F3Zw5c4r1TZs2Feudvs20Xx05cqRYv/nmm4v1Tz75pOVlDw8PF+sfffRRsf7WW2+1vOxOiwiPNZ0tO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kwXX2GkybNq1Y37JlS7E+a9asOtupVbPe9+3bV6xfeumlDWtffvllcd6svz9oF9fZgeQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJhmyuwd69e4v1ZcuWFetXX311sf7KK68U683+pHLJtm3bivUFCxYU6wcPHizWzzvvvIa12267rTgv6sWWHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeS4H72PnDyyScX682GF167dm3D2uLFi4vz3njjjcX6hg0binX0n5bvZ7f9gO09trePmjbN9jO2366eT62zWQD1G89u/K8kXfG1abdLejYizpb0bPUeQB9rGvaI2Czp678HXShpffV6vaRr620LQN1a/W38QEQcHSzrA0kDjT5oe4mkJS0uB0BN2r4RJiKidOItItZJWidxgg7opVYvve22PV2Squc99bUEoBNaDftGSTdVr2+S9Hg97QDolKa78bY3SPq+pNNs75T0c0krJf3W9mJJ70v6YSebnOj279/f1vwff/xxy/PecsstxfrDDz9crDcbYx39o2nYI2JRg9JlNfcCoIP4uSyQBGEHkiDsQBKEHUiCsANJcIvrBDB58uSGtSeeeKI47yWXXFKsX3nllcX6008/Xayj+xiyGUiOsANJEHYgCcIOJEHYgSQIO5AEYQeS4Dr7BHfWWWcV61u3bi3W9+3bV6w/99xzxfrQ0FDD2n333Vect5v/NicSrrMDyRF2IAnCDiRB2IEkCDuQBGEHkiDsQBJcZ09ucHCwWH/wwQeL9SlTprS87OXLlxfrDz30ULE+PDxcrGfFdXYgOcIOJEHYgSQIO5AEYQeSIOxAEoQdSILr7Cg6//zzi/XVq1cX65dd1vpgv2vXri3WV6xYUazv2rWr5WUfz1q+zm77Adt7bG8fNe0O27tsb6seV9XZLID6jWc3/leSrhhj+r9ExJzq8bt62wJQt6Zhj4jNkvZ2oRcAHdTOCbqltl+rdvNPbfQh20tsD9lu/MfIAHRcq2H/haSzJM2RNCxpVaMPRsS6iJgbEXNbXBaAGrQU9ojYHRGHI+KIpF9KmldvWwDq1lLYbU8f9XZQ0vZGnwXQH5peZ7e9QdL3JZ0mabekn1fv50gKSe9J+mlENL25mOvsE8/UqVOL9WuuuaZhrdm98vaYl4u/smnTpmJ9wYIFxfpE1eg6+wnjmHHRGJPvb7sjAF3Fz2WBJAg7kARhB5Ig7EAShB1Igltc0TNffPFFsX7CCeWLRYcOHSrWL7/88oa1559/vjjv8Yw/JQ0kR9iBJAg7kARhB5Ig7EAShB1IgrADSTS96w25XXDBBcX6DTfcUKxfeOGFDWvNrqM3s2PHjmJ98+bNbX3/RMOWHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeS4Dr7BDd79uxifenSpcX6ddddV6yffvrpx9zTeB0+fLhYHx4u//XyI0eO1NnOcY8tO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kwXX240Cza9mLFo010O6IZtfRZ86c2UpLtRgaGirWV6xYUaxv3LixznYmvKZbdttn2H7O9g7bb9i+rZo+zfYztt+unk/tfLsAWjWe3fhDkv4+Is6V9DeSbrV9rqTbJT0bEWdLerZ6D6BPNQ17RAxHxNbq9QFJb0qaIWmhpPXVx9ZLurZDPQKowTEds9ueKel7krZIGoiIoz9O/kDSQIN5lkha0kaPAGow7rPxtr8t6RFJP4uI/aNrMTI65JiDNkbEuoiYGxFz2+oUQFvGFXbb39JI0H8dEY9Wk3fbnl7Vp0va05kWAdSh6W68bUu6X9KbEbF6VGmjpJskrayeH+9IhxPAwMCYRzhfOffcc4v1e++9t1g/55xzjrmnumzZsqVYv/vuuxvWHn+8/E+GW1TrNZ5j9r+V9GNJr9veVk1brpGQ/9b2YknvS/phRzoEUIumYY+I/5I05uDuki6rtx0AncLPZYEkCDuQBGEHkiDsQBKEHUiCW1zHadq0aQ1ra9euLc47Z86cYn3WrFmttFSLF198sVhftWpVsf7UU08V65999tkx94TOYMsOJEHYgSQIO5AEYQeSIOxAEoQdSIKwA0mkuc5+0UUXFevLli0r1ufNm9ewNmPGjJZ6qsunn37asLZmzZrivHfddVexfvDgwZZ6Qv9hyw4kQdiBJAg7kARhB5Ig7EAShB1IgrADSaS5zj44ONhWvR07duwo1p988sli/dChQ8V66Z7zffv2FedFHmzZgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJR0T5A/YZkh6SNCApJK2LiH+1fYekWyT9b/XR5RHxuybfVV4YgLZFxJijLo8n7NMlTY+IrbanSHpZ0rUaGY/9k4i4Z7xNEHag8xqFfTzjsw9LGq5eH7D9pqTe/mkWAMfsmI7Zbc+U9D1JW6pJS22/ZvsB26c2mGeJ7SHbQ+21CqAdTXfjv/qg/W1JL0haERGP2h6Q9KFGjuP/SSO7+jc3+Q5244EOa/mYXZJsf0vSk5KeiojVY9RnSnoyIs5v8j2EHeiwRmFvuhtv25Lul/Tm6KBXJ+6OGpS0vd0mAXTOeM7Gz5f0n5Jel3Skmrxc0iJJczSyG/+epJ9WJ/NK38WWHeiwtnbj60LYgc5reTcewMRA2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEoQdSKLbQzZ/KOn9Ue9Pq6b1o37trV/7kuitVXX29peNCl29n/0bC7eHImJuzxoo6Nfe+rUvid5a1a3e2I0HkiDsQBK9Dvu6Hi+/pF9769e+JHprVVd66+kxO4Du6fWWHUCXEHYgiZ6E3fYVtt+y/Y7t23vRQyO237P9uu1tvR6frhpDb4/t7aOmTbP9jO23q+cxx9jrUW932N5Vrbtttq/qUW9n2H7O9g7bb9i+rZre03VX6Ksr663rx+y2J0n6g6QFknZKeknSoojY0dVGGrD9nqS5EdHzH2DY/jtJn0h66OjQWrb/WdLeiFhZ/Ud5akT8Q5/0doeOcRjvDvXWaJjxn6iH667O4c9b0Yst+zxJ70TEuxHxpaTfSFrYgz76XkRslrT3a5MXSlpfvV6vkX8sXdegt74QEcMRsbV6fUDS0WHGe7ruCn11RS/CPkPSH0e936n+Gu89JD1t+2XbS3rdzBgGRg2z9YGkgV42M4amw3h309eGGe+bddfK8Oft4gTdN82PiL+WdKWkW6vd1b4UI8dg/XTt9BeSztLIGIDDklb1splqmPFHJP0sIvaPrvVy3Y3RV1fWWy/CvkvSGaPef6ea1hciYlf1vEfSYxo57Ognu4+OoFs97+lxP1+JiN0RcTgijkj6pXq47qphxh+R9OuIeLSa3PN1N1Zf3VpvvQj7S5LOtv1d2ydK+pGkjT3o4xtsT65OnMj2ZEk/UP8NRb1R0k3V65skPd7DXv5Evwzj3WiYcfV43fV8+POI6PpD0lUaOSP/P5L+sRc9NOhrlqRXq8cbve5N0gaN7Nb9n0bObSyW9OeSnpX0tqT/kDStj3r7N40M7f2aRoI1vUe9zdfILvprkrZVj6t6ve4KfXVlvfFzWSAJTtABSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBL/DyJ7caZa7LphAAAAAElFTkSuQmCC\n",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAz0AAAM2CAYAAADcr+22AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAWJQAAFiUBSVIk8AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nO3de6yldX3v8c/3MFWORAb1VEkvFuFUSLFKQUWhkVtQaasVhRNNbDktmraBKFZNG6sttLWxab0hHiW1hSOeFBtNtbZUTQQExdowFonBGxVKaFELlPvFDvM7f+w17XTce5g9s+ZZm+9+vZKdZ/azLt9fdOVh3vOs9awaYwQAAKCr/7boBQAAAOxJogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoLUNi17AnlBVNyTZN8mNC14KAAAwHwckuWuM8ZTVPrDGGPNfzoJV1W1JHr/odQAAAPM1xqjVPqbr29tuXPQCAACAtaFr9AAAACQRPQAAQHOiBwAAaE30AAAArYkeAACgtYVGT1X9SFX9WVX9S1U9WFU3VtW7qupxi1wXAADQx8K+nLSqDkpyVZInJvl4kq8leXaS1yZ5YVUdPca4bVHrAwAAeljkmZ7/k6Xgec0Y4yVjjN8cYxyf5J1JDk7y1gWuDQAAaKLGGNMPrTowyT9m6UtEDxpjbNnmtscmuSVJJXniGOPeXXj+TUkOn89qAQCAtWKMUat9zKLO9Bw/23562+BJkjHG3Uk+n+QxSZ4z9cIAAIBeFvWZnoNn22+scPs3kzw/yVOTfGalJ5md0VnOIbu+NAAAoJNFnenZONveucLtW/fvN8FaAACAxhZ29baHsfV9ejv8wNEY44hlH+wzPQAAwMyizvRsPZOzcYXb993ufgAAALtkUdHz9dn2qSvc/uOz7Uqf+QEAANgpi4qey2bb51fVf1nD7JLVRye5P8nfTb0wAACgl4VEzxjjH5N8OskBSc7Y7uZzkuyT5IO78h09AAAA21rIl5MmSVUdlOSqJE9M8vEkX01yZJLjsvS2tqPGGLft4nO7kAEAADT0SPpy0q1ne56Z5MIsxc7rkxyU5Nwkz93V4AEAANjWws707EnO9AAAQE+PqDM9AAAAUxA9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWtuw6AUAMI299tpr0nkbN26cdF5HZ5555qTzHvOYx0w26+CDD55s1hlnnDHZrD/+4z+ebNYrXvGKyWYlyQMPPDDZrLe97W2TzTrnnHMmm8XiONMDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArS0seqrqxqoaK/x8e1HrAgAAeln0l5PemeRdy+y/Z+qFAAAAPS06eu4YY5y94DUAAACN+UwPAADQ2qLP9Dy6ql6Z5MlJ7k1ybZIrxhgPLXZZAABAF4uOnv2TXLTdvhuq6pfGGJ99uAdX1aYVbjpkt1cGAAC0sMi3t12Q5IQshc8+SX4yyflJDkjyt1X1jMUtDQAA6GJhZ3rGGOdst+srSX61qu5J8vokZyc5+WGe44jl9s/OAB0+h2UCAACPcGvxQgbvn22ft9BVAAAALazF6PnubLvPQlcBAAC0sBaj57mz7bcWugoAAKCFhURPVR1aVY9fZv+PJTlv9uuHpl0VAADQ0aIuZHBqkt+sqsuS3JDk7iQHJfnZJHsnuSTJHy9obQAAQCOLip7Lkhyc5Key9Ha2fZLckeRzWfrenovGGGNBawMAABpZSPTMvnj0Yb98FAAAYHetxQsZAAAAzI3oAQAAWhM9AABAa6IHAABobVFXbwMaePKTnzzZrEc96lGTzTrqqKMmm/XTP/3Tk83ab7/9JpuVJC972csmnccjy8033zzZrHPPPXeyWSeffPJks+6+++7JZiXJl7/85clmffazrnfFfDnTAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1mqMseg1zF1VbUpy+KLXAVM77LDDJp136aWXTjZr48aNk82C9WrLli2TzfrlX/7lyWbdc889k82a0i233DLpvH/7t3+bbNbXv/71yWbxyDPGqNU+xpkeAACgNdEDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWNix6AcD83HTTTZPOu+222yabtXHjxslm8cjzxS9+cbJZd9xxx2SzjjvuuMlmJcn3vve9yWZddNFFk80CcKYHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtbVj0AoD5uf322yed98Y3vnGyWT/3cz832ax/+Id/mGzWueeeO9msqV1zzTWTzTrxxBMnm3XvvfdONuvQQw+dbFaSvPa1r510HsBUnOkBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrNcZY9Brmrqo2JTl80esA5mffffedbNbdd9892azzzz9/slmnn376ZLOS5JWvfOVks/78z/98slkALNYYo1b7GGd6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoLW5RE9VnVJV76mqK6vqrqoaVfWhh3nMUVV1SVXdXlX3VdW1VXVWVe01jzUBAAAkyYY5Pc+bkzwjyT1Jbk5yyI7uXFU/n+SjSR5I8uEktyd5UZJ3Jjk6yalzWhcAALDOzevtba9L8tQk+yb5tR3dsar2TfInSR5KcuwY4/QxxhuTHJbkC0lOqaqXz2ldAADAOjeX6BljXDbG+OYYY+zE3U9J8oNJLh5jXL3NczyQpTNGycOEEwAAwM5axIUMjp9tP7nMbVckuS/JUVX16OmWBAAAdDWvz/SsxsGz7Te2v2GMsbmqbkhyaJIDk3x1R09UVZtWuGmHnykCAADWj0Wc6dk42965wu1b9+83wVoAAIDmFnGm5+HUbPuwnw8aYxyx7BMsnQE6fJ6LAgAAHpkWcaZn65mcjSvcvu929wMAANhli4ier8+2T93+hqrakOQpSTYn+daUiwIAAHpaRPRcOtu+cJnbnpfkMUmuGmM8ON2SAACArhYRPR9JcmuSl1fVM7furKq9k/z+7Nf3LWBdAABAQ3O5kEFVvSTJS2a/7j/bPreqLpz9+dYxxhuSZIxxV1W9Okvxc3lVXZzk9iQvztLlrD+S5MPzWBcAAMC8rt52WJLTttt34OwnSf4pyRu23jDG+FhVHZPkt5K8LMneSa5P8utJzh1jPOyV2wAAAHbGXKJnjHF2krNX+ZjPJ/mZecwHAABYySI+0wMAADAZ0QMAALQmegAAgNZEDwAA0Nq8rt4GsEfdddddi17CHnHnnXcuegl7zKtf/erJZn34w9N908GWLVsmmwXAfDjTAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQWo0xFr2GuauqTUkOX/Q6AB7OPvvsM9msT3ziE5PNSpJjjjlmslknnXTSZLM+/elPTzYLgO83xqjVPsaZHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtFZjjEWvYe6qalOSwxe9DoC15KCDDpp03pe+9KXJZt1xxx2Tzbrssssmm3X11VdPNitJ3vve9042q+PfP4BpjDFqtY9xpgcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK3VGGPRa5i7qtqU5PBFrwNgPTv55JMnm3XBBRdMNuuxj33sZLOm9qY3vWmyWR/84Acnm3XLLbdMNgvY88YYtdrHONMDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWaoyx6DXMXVVtSnL4otcBwDSe9rSnTTbrHe94x2SzTjjhhMlmTe3888+fbNZb3/rWyWb98z//82SzYL0aY9RqH+NMDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQ2l+ipqlOq6j1VdWVV3VVVo6o+tMJ9D5jdvtLPxfNYEwAAQJJsmNPzvDnJM5Lck+TmJIfsxGO+nORjy+z/ypzWBAAAMLfoeV2WYuf6JMckuWwnHnPNGOPsOc0HAABY1lyiZ4zxH5FTVfN4SgAAgLmY15meXfFDVfUrSZ6Q5LYkXxhjXLvA9QAAAA0tMnpOnP38h6q6PMlpY4ybduYJqmrTCjftzGeKAACAdWARl6y+L8nvJTkiyeNmP1s/B3Rsks9U1T4LWBcAANDQ5Gd6xhjfTfLb2+2+oqqen+RzSY5M8qok796J5zpiuf2zM0CH7+ZSAQCABtbMl5OOMTYn+cDs1+ctci0AAEAfayZ6Zv51tvX2NgAAYC7WWvQ8Z7b91kJXAQAAtDF59FTVkVX1qGX2H5+lLzlNkg9NuyoAAKCruVzIoKpekuQls1/3n22fW1UXzv586xjjDbM//2GSQ2eXp755tu/pSY6f/fktY4yr5rEuAACAeV297bAkp22378DZT5L8U5Kt0XNRkpOTPCvJSUl+IMl3kvxFkvPGGFfOaU0AAADziZ4xxtlJzt7J+/5pkj+dx1wAAICHs9YuZAAAADBXogcAAGhN9AAAAK2JHgAAoLUaYyx6DXNXVZuSHL7odQDQz3777TfZrBe96EWTzUqSCy64YLJZVTXZrEsvvXSyWSeeeOJks2C9GmOs+gDiTA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgNdEDAAC0JnoAAIDWRA8AANCa6AEAAFoTPQAAQGs1xlj0GuauqjYlOXzR6wCAR5IHH3xwslkbNmyYbNbmzZsnm/WCF7xgslmXX375ZLNgLRlj1Gof40wPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABa27DoBQDA7nr6058+2axTTjllslnPetazJpuVJBs29PxrwXXXXTfZrCuuuGKyWcDOc6YHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtbVj0AgCYxsEHHzzpvDPPPHOyWS996Usnm7X//vtPNquzhx56aLJZt9xyy2SztmzZMtksYOc50wMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNY2LHoBAGvN/vvvP9msV7ziFZPNOvPMMyeblSQHHHDApPPYfVdfffVks9761rdONuuv/uqvJpsFrE3O9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrux09VfWEqnpVVf1lVV1fVfdX1Z1V9bmqOr2qlp1RVUdV1SVVdXtV3VdV11bVWVW11+6uCQAAYKt5fDnpqUnel+SWJJcluSnJk5K8NMkHkpxUVaeOMcbWB1TVzyf5aJIHknw4ye1JXpTknUmOnj0nAADAbptH9HwjyYuT/M0YY8vWnVX1piR/n+RlWQqgj87275vkT5I8lOTYMcbVs/1vSXJpklOq6uVjjIvnsDYAAGCd2+23t40xLh1jfGLb4Jnt/3aS989+PXabm05J8oNJLt4aPLP7P5DkzbNff2131wUAAJDs+QsZ/Ptsu3mbfcfPtp9c5v5XJLkvyVFV9eg9uTAAAGB9mMfb25ZVVRuS/OLs120D5+DZ9hvbP2aMsbmqbkhyaJIDk3z1YWZsWuGmQ1a3WgAAoKs9eabnbUmeluSSMcanttm/cba9c4XHbd2/355aGAAAsH7skTM9VfWaJK9P8rUkv7Dah8+2Y4f3SjLGOGKF+ZuSHL7KuQAAQENzP9NTVWckeXeS65IcN8a4fbu7bD2TszHL23e7+wEAAOyyuUZPVZ2V5LwkX8lS8Hx7mbt9fbZ96jKP35DkKVm68MG35rk2AABgfZpb9FTVb2Tpy0WvyVLwfHeFu146275wmduel+QxSa4aYzw4r7UBAADr11yiZ/bFom9LsinJCWOMW3dw948kuTXJy6vqmds8x95Jfn/26/vmsS4AAIDdvpBBVZ2W5HeTPJTkyiSvqart73bjGOPCJBlj3FVVr85S/FxeVRcnuT3Ji7N0OeuPJPnw7q4LAAAgmc/V254y2+6V5KwV7vPZJBdu/WWM8bGqOibJbyV5WZK9k1yf5NeTnDvGeNgrtwEAAOyM3Y6eMcbZSc7ehcd9PsnP7O58AACAHdmTX04KAACwcKIHAABoTfQAAACtiR4AAKC1eVy9DVinnvSkJ0026yd+4icmm3XeeedNNuuQQw6ZbBbz8cUvfnGyWX/0R3802awk+fjHPz7ZrC1btkw2C8CZHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1jYsegHQ3eMf//jJZp1//vmTzUqSww47bLJZBx544GSzmI+rrrpqsllvf/vbJ5v1qU99arJZ999//2SzADpzpgcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK1tWPQCYKsjjzxysllvfOMbJ5v17Gc/e7JZP/zDPzzZLObjvvvum2zWueeeO9msJPmDP/iDyWbde++9k80C4JHHmR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALS2YdELgK1OPvnklrM6u+666yab9dd//deTzdq8efNks97+9rdPNuuOO+6YbBYArCXO9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoLUaYyx6DXNXVZuSHL7odQAAAPM1xqjVPsaZHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhtt6Onqp5QVa+qqr+squur6v6qurOqPldVp1fVf9vu/gdU1djBz8W7uyYAAICtNszhOU5N8r4ktyS5LMlNSZ6U5KVJPpDkpKo6dXz/t6B+OcnHlnm+r8xhTQAAAEnmEz3fSPLiJH8zxtiydWdVvSnJ3yd5WZYC6KPbPe6aMcbZc5gPAACwot1+e9sY49Ixxie2DZ7Z/m8nef/s12N3dw4AAMCumMeZnh3599l28zK3/VBV/UqSJyS5LckXxhjX7uH1AAAA68wei56q2pDkF2e/fnKZu5w4+9n2MZcnOW2McdNOzti0wk2H7OQyAQCA5vbkJavfluRpSS4ZY3xqm/33Jfm9JEckedzs55gsXQTh2CSfqap99uC6AACAdaS+/6Jqc3jSqtckeXeSryU5eoxx+048ZkOSzyU5MslZY4x378b8TUkO39XHAwAAa9MYo1b7mLmf6amqM7IUPNclOW5ngidJxhibs3SJ6yR53rzXBQAArE9zjZ6qOivJeVn6rp3jZldwW41/nW29vQ0AAJiLuUVPVf1GkncmuSZLwfPdXXia58y235rXugAAgPVtLtFTVW/J0oULNiU5YYxx6w7ue2RVPWqZ/ccned3s1w/NY10AAAC7fcnqqjotye8meSjJlUleU/V9ny26cYxx4ezPf5jk0NnlqW+e7Xt6kuNnf37LGOOq3V0XAABAMp/v6XnKbLtXkrNWuM9nk1w4+/NFSU5O8qwkJyX5gSTfSfIXSc4bY1w5hzUBAAAk2UOXrF40l6wGAICe1sQlqwEAANYS0QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0JroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALQmegAAgNZEDwAA0Co+k40AAAd+SURBVJroAQAAWhM9AABAa6IHAABoTfQAAACtiR4AAKA10QMAALTWNXoOWPQCAACAtWHDohewh9w12964isccMtt+bb5LoQmvD3bE64Md8fpgR7w+2BGvj//qgPzn3/NXpcYY813KI1RVbUqSMcYRi14La4/XBzvi9cGOeH2wI14f7IjXx/x0fXsbAABAEtEDAAA0J3oAAIDWRA8AANCa6AEAAFpz9TYAAKA1Z3oAAIDWRA8AANCa6AEAAFoTPQAAQGuiBwAAaE30AAAArYkeAACgtXUfPVX1I1X1Z1X1L1X1YFXdWFXvqqrHLXptLN7s9TBW+Pn2otfHnldVp1TVe6rqyqq6a/b//Yce5jFHVdUlVXV7Vd1XVddW1VlVtddU62Yaq3l9VNUBOziejKq6eOr1s+dU1ROq6lVV9ZdVdX1V3V9Vd1bV56rq9Kpa9u9gjh/rw2pfH44fu2/DohewSFV1UJKrkjwxyceTfC3Js5O8NskLq+roMcZtC1wia8OdSd61zP57pl4IC/HmJM/I0v/fNyc5ZEd3rqqfT/LRJA8k+XCS25O8KMk7kxyd5NQ9uVgmt6rXx8yXk3xsmf1fmeO6WLxTk7wvyS1JLktyU5InJXlpkg8kOamqTh3bfEu848e6surXx4zjxy6q7//fcv2oqk8leX6S14wx3rPN/nckeV2S88cYv7qo9bF4VXVjkowxDljsSliUqjouS3+ZvT7JMVn6j9P/G2O8cpn77ju738YkR48xrp7t3zvJpUmem+QVYwz/ItfEKl8fByS5Icn/HWP87+lWySJU1fFJ9knyN2OMLdvs3z/J3yf50SSnjDE+Otvv+LGO7MLr44A4fuyWdfv2tqo6MEvBc2OS92538+8kuTfJL1TVPhMvDVhDxhiXjTG+ucy/ti3nlCQ/mOTirX9hmT3HA1k6I5Akv7YHlsmCrPL1wToyxrh0jPGJbf9CO9v/7STvn/167DY3OX6sI7vw+mA3ree3tx0/2356mRfc3VX1+SxF0XOSfGbqxbGmPLqqXpnkyVmK4WuTXDHGeGixy2IN2npc+eQyt12R5L4kR1XVo8cYD063LNaYH6qqX0nyhCS3JfnCGOPaBa+Jaf37bLt5m32OH2y13OtjK8ePXbSeo+fg2fYbK9z+zSxFz1Mjeta7/ZNctN2+G6rql8YYn13EglizVjyujDE2V9UNSQ5NcmCSr065MNaUE2c//6GqLk9y2hjjpoWsiMlU1YYkvzj7ddvAcfxgR6+PrRw/dtG6fXtblt4zmyx9SH05W/fvN8FaWLsuSHJClsJnnyQ/meT8JAck+duqesbilsYa5LjCjtyX5PeSHJHkcbOfrZ8DOjbJZ7ylel14W5KnJblkjPGpbfY7fpCs/Ppw/NhN6zl6Hk7Ntt6nvY6NMc6Zve/2O2OM+8YYX5ld3OIdSf57krMXu0IeYRxX1rExxnfHGL89xvjSGOOO2c8VWXpXwReT/M8kr1rsKtmTquo1SV6fpavF/sJqHz7bOn40taPXh+PH7lvP0bP1X0w2rnD7vtvdD7a19UOGz1voKlhrHFdYtTHG5ixdojZxTGmrqs5I8u4k1yU5boxx+3Z3cfxYx3bi9bEsx4+dt56j5+uz7VNXuP3HZ9uVPvPD+vbd2dapZLa14nFl9j7tp2Tpg6nfmnJRPCL862zrmNJQVZ2V5LwsfZfKcbMrdG3P8WOd2snXx444fuyE9Rw9l822z1/mW28fm6UvAbs/yd9NvTAeEZ472/qPD9u6dLZ94TK3PS/JY5Jc5cpLLOM5s61jSjNV9RtZ+nLRa7L0F9rvrnBXx491aBWvjx1x/NgJ6zZ6xhj/mOTTWfpA+hnb3XxOlmr5g2OMeydeGmtEVR1aVY9fZv+PZelfZJLkQ9OuijXuI0luTfLyqnrm1p2zLxf8/dmv71vEwli8qjqyqh61zP7js/SF2IljSitV9ZYsfTB9U5ITxhi37uDujh/rzGpeH44fu6/W8/epVdVBSa5K8sQkH8/SJSCPTHJclt7WdtQY47bFrZBFqqqzk/xmls4K3pDk7iQHJfnZJHsnuSTJyWOM7y1qjex5VfWSJC+Z/bp/khdk6V/Trpztu3WM8Ybt7v+RJA8kuTjJ7UlenKXL0X4kyf/yRZZ9rOb1Mbus7KFJLk9y8+z2p+c/v5/lLWOMrX+55RGuqk5LcmGSh5K8J8t/FufGMcaF2zzG8WOdWO3rw/Fj963r6EmSqvrRJL+bpdPJT0hyS5KPJTlnZz9ERk9VdUySX03yU/nPS1bfkaVT0Bcluch/fPqbxe/v7OAu/zTGOGC7xxyd5Ley9DbIvZNcn+TPkpzrS217Wc3ro6pOT3Jyli5H+z+S/ECS7yT5QpLzxhhXrvQkPPLsxGsjST47xjh2u8c5fqwDq319OH7svnUfPQAAQG/r9jM9AADA+iB6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtCZ6AACA1kQPAADQmugBAABaEz0AAEBrogcAAGhN9AAAAK2JHgAAoDXRAwAAtPb/AZlgB+Ge770sAAAAAElFTkSuQmCC",
"text/plain": [
- "<Figure size 432x288 with 1 Axes>"
+ "<Figure size 864x504 with 1 Axes>"
]
},
+ "execution_count": 4,
"metadata": {
+ "image/png": {
+ "height": 411,
+ "width": 414
+ },
"needs_background": "light"
},
- "output_type": "display_data"
+ "output_type": "execute_result"
}
],
"source": [
@@ -192,7 +207,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"PyTorch uses ``torch.tensor``, rather than numpy arrays, so we need to\n",
"convert our data.\n",
@@ -201,11 +218,10 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 5,
"metadata": {
"collapsed": false,
"jupyter": {
- "outputs_hidden": false
}
},
"outputs": [
@@ -239,7 +255,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Neural net from scratch (no torch.nn)\n",
"---------------------------------------------\n",
@@ -266,14 +284,15 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"import math\n",
"\n",
@@ -284,7 +303,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Thanks to PyTorch's ability to calculate gradients automatically, we can\n",
"use any standard Python function (or callable object) as a model! So\n",
@@ -300,14 +321,15 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"def log_softmax(x):\n",
" return x - x.exp().sum(-1).log().unsqueeze(-1)\n",
@@ -318,7 +340,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"In the above, the ``@`` stands for the dot product operation. We will call\n",
"our function on one batch of data (in this case, 64 images). This is\n",
@@ -329,7 +353,7 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -337,14 +361,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor([-2.2680, -1.7434, -2.2746, -2.7562, -2.7793, -2.4086, -2.2656, -2.2761,\n",
- " -2.1634, -2.5035], grad_fn=<SelectBackward>) torch.Size([64, 10])\n"
- ]
- }
],
"source": [
"bs = 64 # batch size\n",
@@ -357,7 +373,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"As you see, the ``preds`` tensor contains not only the tensor values, but also a\n",
"gradient function. We'll use this later to do backprop.\n",
@@ -369,14 +387,15 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"def nll(input, target):\n",
" return -input[range(target.shape[0]), target].mean()\n",
@@ -386,7 +405,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Let's check our loss with our random model, so we can see if we improve\n",
"after a backprop pass later.\n",
@@ -395,7 +416,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -403,13 +424,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(2.4159, grad_fn=<NegBackward>)\n"
- ]
- }
],
"source": [
"yb = y_train[0:bs]\n",
@@ -418,7 +432,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Let's also implement a function to calculate the accuracy of our model.\n",
"For each prediction, if the index with the largest value matches the\n",
@@ -428,14 +444,15 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"def accuracy(out, yb):\n",
" preds = torch.argmax(out, dim=1)\n",
@@ -444,7 +461,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Let's check the accuracy of our random model, so we can see if our\n",
"accuracy improves as our loss improves.\n",
@@ -453,7 +472,7 @@
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -461,13 +480,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(0.0781)\n"
- ]
- }
],
"source": [
"print(accuracy(preds, yb))"
@@ -475,7 +487,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"We can now run a training loop. For each iteration, we will:\n",
"\n",
@@ -506,14 +520,15 @@
},
{
"cell_type": "code",
- "execution_count": 12,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"from IPython.core.debugger import set_trace\n",
"\n",
@@ -540,7 +555,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"That's it: we've created and trained a minimal neural network (in this case, a\n",
"logistic regression, since we have no hidden layers) entirely from scratch!\n",
@@ -553,7 +570,7 @@
},
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -561,13 +578,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(0.0825, grad_fn=<NegBackward>) tensor(1.)\n"
- ]
- }
],
"source": [
"print(loss_func(model(xb), yb), accuracy(model(xb), yb))"
@@ -575,7 +585,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Using torch.nn.functional\n",
"------------------------------\n",
@@ -603,14 +615,15 @@
},
{
"cell_type": "code",
- "execution_count": 14,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"import torch.nn.functional as F\n",
"\n",
@@ -622,7 +635,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Note that we no longer call ``log_softmax`` in the ``model`` function. Let's\n",
"confirm that our loss and accuracy are the same as before:\n",
@@ -631,7 +646,7 @@
},
{
"cell_type": "code",
- "execution_count": 15,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -639,13 +654,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(0.0825, grad_fn=<NllLossBackward>) tensor(1.)\n"
- ]
- }
],
"source": [
"print(loss_func(model(xb), yb), accuracy(model(xb), yb))"
@@ -653,7 +661,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Refactor using nn.Module\n",
"-----------------------------\n",
@@ -673,14 +683,15 @@
},
{
"cell_type": "code",
- "execution_count": 16,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"from torch import nn\n",
"\n",
@@ -696,7 +707,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Since we're now using an object instead of just using a function, we\n",
"first have to instantiate our model:\n",
@@ -705,21 +718,24 @@
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"model = Mnist_Logistic()"
]
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Now we can calculate the loss in the same way as before. Note that\n",
"``nn.Module`` objects are used as if they are functions (i.e they are\n",
@@ -730,7 +746,7 @@
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -738,13 +754,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(2.1396, grad_fn=<NllLossBackward>)\n"
- ]
- }
],
"source": [
"print(loss_func(model(xb), yb))"
@@ -752,7 +761,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Previously for our training loop we had to update the values for each parameter\n",
"by name, and manually zero out the grads for each parameter separately, like this:\n",
@@ -781,14 +792,15 @@
},
{
"cell_type": "code",
- "execution_count": 19,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"def fit():\n",
" for epoch in range(epochs):\n",
@@ -811,7 +823,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Let's double-check that our loss has gone down:\n",
"\n"
@@ -819,7 +833,7 @@
},
{
"cell_type": "code",
- "execution_count": 20,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -827,13 +841,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(0.0820, grad_fn=<NllLossBackward>)\n"
- ]
- }
],
"source": [
"print(loss_func(model(xb), yb))"
@@ -841,7 +848,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Refactor using nn.Linear\n",
"-------------------------\n",
@@ -858,14 +867,15 @@
},
{
"cell_type": "code",
- "execution_count": 21,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"class Mnist_Logistic(nn.Module):\n",
" def __init__(self):\n",
@@ -878,7 +888,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"We instantiate our model and calculate the loss in the same way as before:\n",
"\n"
@@ -886,7 +898,7 @@
},
{
"cell_type": "code",
- "execution_count": 22,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -894,13 +906,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(2.2840, grad_fn=<NllLossBackward>)\n"
- ]
- }
],
"source": [
"model = Mnist_Logistic()\n",
@@ -909,7 +914,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"We are still able to use our same ``fit`` method as before.\n",
"\n"
@@ -917,7 +924,7 @@
},
{
"cell_type": "code",
- "execution_count": 23,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -925,13 +932,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(0.0798, grad_fn=<NllLossBackward>)\n"
- ]
- }
],
"source": [
"fit()\n",
@@ -941,7 +941,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Refactor using optim\n",
"------------------------------\n",
@@ -968,21 +970,24 @@
},
{
"cell_type": "code",
- "execution_count": 24,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"from torch import optim"
]
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"We'll define a little function to create our model and optimizer so we\n",
"can reuse it in the future.\n",
@@ -991,7 +996,7 @@
},
{
"cell_type": "code",
- "execution_count": 25,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -999,14 +1004,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(2.2706, grad_fn=<NllLossBackward>)\n",
- "tensor(0.0798, grad_fn=<NllLossBackward>)\n"
- ]
- }
],
"source": [
"def get_model():\n",
@@ -1034,7 +1031,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Refactor using Dataset\n",
"------------------------------\n",
@@ -1056,21 +1055,24 @@
},
{
"cell_type": "code",
- "execution_count": 26,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"from torch.utils.data import TensorDataset"
]
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Both ``x_train`` and ``y_train`` can be combined in a single ``TensorDataset``,\n",
"which will be easier to iterate over and slice.\n",
@@ -1079,21 +1081,24 @@
},
{
"cell_type": "code",
- "execution_count": 27,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"train_ds = TensorDataset(x_train, y_train)"
]
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Previously, we had to iterate through minibatches of x and y values separately:\n",
"::\n",
@@ -1110,7 +1115,7 @@
},
{
"cell_type": "code",
- "execution_count": 28,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -1118,13 +1123,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(0.0817, grad_fn=<NllLossBackward>)\n"
- ]
- }
],
"source": [
"model, opt = get_model()\n",
@@ -1144,7 +1142,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Refactor using DataLoader\n",
"------------------------------\n",
@@ -1158,14 +1158,15 @@
},
{
"cell_type": "code",
- "execution_count": 29,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"from torch.utils.data import DataLoader\n",
"\n",
@@ -1175,7 +1176,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Previously, our loop iterated over batches (xb, yb) like this:\n",
"::\n",
@@ -1192,7 +1195,7 @@
},
{
"cell_type": "code",
- "execution_count": 30,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -1200,13 +1203,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "tensor(0.0815, grad_fn=<NllLossBackward>)\n"
- ]
- }
],
"source": [
"model, opt = get_model()\n",
@@ -1225,7 +1221,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Thanks to Pytorch's ``nn.Module``, ``nn.Parameter``, ``Dataset``, and ``DataLoader``,\n",
"our training loop is now dramatically smaller and easier to understand. Let's\n",
@@ -1255,14 +1253,15 @@
},
{
"cell_type": "code",
- "execution_count": 31,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"train_ds = TensorDataset(x_train, y_train)\n",
"train_dl = DataLoader(train_ds, batch_size=bs, shuffle=True)\n",
@@ -1273,7 +1272,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"We will calculate and print the validation loss at the end of each epoch.\n",
"\n",
@@ -1285,7 +1286,7 @@
},
{
"cell_type": "code",
- "execution_count": 32,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -1293,14 +1294,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "0 tensor(0.3232)\n",
- "1 tensor(0.2736)\n"
- ]
- }
],
"source": [
"model, opt = get_model()\n",
@@ -1324,7 +1317,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Create fit() and get_data()\n",
"----------------------------------\n",
@@ -1342,14 +1337,15 @@
},
{
"cell_type": "code",
- "execution_count": 33,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"def loss_batch(model, loss_func, xb, yb, opt=None):\n",
" loss = loss_func(model(xb), yb)\n",
@@ -1364,7 +1360,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"``fit`` runs the necessary operations to train our model and compute the\n",
"training and validation losses for each epoch.\n",
@@ -1373,14 +1371,15 @@
},
{
"cell_type": "code",
- "execution_count": 34,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"import numpy as np\n",
"\n",
@@ -1402,7 +1401,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"``get_data`` returns dataloaders for the training and validation sets.\n",
"\n"
@@ -1410,14 +1411,15 @@
},
{
"cell_type": "code",
- "execution_count": 35,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"def get_data(train_ds, valid_ds, bs):\n",
" return (\n",
@@ -1428,7 +1430,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Now, our whole process of obtaining the data loaders and fitting the\n",
"model can be run in 3 lines of code:\n",
@@ -1437,7 +1441,7 @@
},
{
"cell_type": "code",
- "execution_count": 36,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -1445,14 +1449,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "0 0.36182342684268953\n",
- "1 0.3086622476875782\n"
- ]
- }
],
"source": [
"train_dl, valid_dl = get_data(train_ds, valid_ds, bs)\n",
@@ -1462,7 +1458,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"You can use these basic 3 lines of code to train a wide variety of models.\n",
"Let's see if we can use them to train a convolutional neural network (CNN)!\n",
@@ -1485,14 +1483,15 @@
},
{
"cell_type": "code",
- "execution_count": 38,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"class Mnist_CNN(nn.Module):\n",
" def __init__(self):\n",
@@ -1514,7 +1513,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"`Momentum <https://cs231n.github.io/neural-networks-3/#sgd>`_ is a variation on\n",
"stochastic gradient descent that takes previous updates into account as well\n",
@@ -1524,7 +1525,7 @@
},
{
"cell_type": "code",
- "execution_count": 39,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -1532,14 +1533,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "0 0.30878638651371004\n",
- "1 0.25200295938253403\n"
- ]
- }
],
"source": [
"model = Mnist_CNN()\n",
@@ -1550,7 +1543,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"nn.Sequential\n",
"------------------------\n",
@@ -1570,14 +1565,15 @@
},
{
"cell_type": "code",
- "execution_count": 40,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"class Lambda(nn.Module):\n",
" def __init__(self, func):\n",
@@ -1594,7 +1590,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"The model created with ``Sequential`` is simply:\n",
"\n"
@@ -1602,7 +1600,7 @@
},
{
"cell_type": "code",
- "execution_count": 41,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -1610,14 +1608,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "0 0.32227418100833893\n",
- "1 0.2695485789179802\n"
- ]
- }
],
"source": [
"model = nn.Sequential(\n",
@@ -1639,7 +1629,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Wrapping DataLoader\n",
"-----------------------------\n",
@@ -1657,14 +1649,15 @@
},
{
"cell_type": "code",
- "execution_count": 42,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"def preprocess(x, y):\n",
" return x.view(-1, 1, 28, 28), y\n",
@@ -1690,7 +1683,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Next, we can replace ``nn.AvgPool2d`` with ``nn.AdaptiveAvgPool2d``, which\n",
"allows us to define the size of the *output* tensor we want, rather than\n",
@@ -1701,14 +1696,15 @@
},
{
"cell_type": "code",
- "execution_count": 43,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"model = nn.Sequential(\n",
" nn.Conv2d(1, 16, kernel_size=3, stride=2, padding=1),\n",
@@ -1726,7 +1722,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Let's try it out:\n",
"\n"
@@ -1734,7 +1732,7 @@
},
{
"cell_type": "code",
- "execution_count": 44,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -1742,14 +1740,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "0 0.3791842395067215\n",
- "1 0.26341770286560057\n"
- ]
- }
],
"source": [
"fit(epochs, model, loss_func, opt, train_dl, valid_dl)"
@@ -1757,7 +1747,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Using your GPU\n",
"---------------\n",
@@ -1771,7 +1763,7 @@
},
{
"cell_type": "code",
- "execution_count": 45,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
@@ -1779,13 +1771,6 @@
}
},
"outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "False\n"
- ]
- }
],
"source": [
"print(torch.cuda.is_available())"
@@ -1793,7 +1778,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"And then create a device object for it:\n",
"\n"
@@ -1801,14 +1788,15 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"dev = torch.device(\n",
" \"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")"
@@ -1816,7 +1804,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Let's update ``preprocess`` to move batches to the GPU:\n",
"\n"
@@ -1824,14 +1814,15 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"def preprocess(x, y):\n",
" return x.view(-1, 1, 28, 28).to(dev), y.to(dev)\n",
@@ -1844,7 +1835,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Finally, we can move our model to the GPU.\n",
"\n"
@@ -1852,14 +1845,15 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"model.to(dev)\n",
"opt = optim.SGD(model.parameters(), lr=lr, momentum=0.9)"
@@ -1867,7 +1861,9 @@
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"You should find it runs faster now:\n",
"\n"
@@ -1875,21 +1871,24 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 0,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
- "outputs": [],
+ "outputs": [
+ ],
"source": [
"fit(epochs, model, loss_func, opt, train_dl, valid_dl)"
]
},
{
"cell_type": "markdown",
- "metadata": {},
+ "metadata": {
+ "collapsed": false
+ },
"source": [
"Closing thoughts\n",
"-----------------\n",
@@ -1929,9 +1928,10 @@
],
"metadata": {
"kernelspec": {
- "display_name": "PyTorch",
+ "display_name": "Python 3 (Ubuntu Linux)",
"language": "python",
- "name": "pytorch"
+ "name": "python3-ubuntu",
+ "resource_dir": "/usr/local/share/jupyter/kernels/python3-ubuntu"
},
"language_info": {
"codemirror_mode": {
@@ -1943,9 +1943,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.6"
+ "version": "3.8.10"
}
},
"nbformat": 4,
"nbformat_minor": 4
-}
+} \ No newline at end of file