add all files
This commit is contained in:
93
.gitignore
vendored
Normal file
93
.gitignore
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
.DS_Store
|
||||
.floydexpt
|
||||
.floydignore
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# IPython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
22
.travis.yml
Normal file
22
.travis.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
dist: xenial
|
||||
language: python
|
||||
python:
|
||||
- "3.7"
|
||||
install:
|
||||
- sudo apt-get update
|
||||
# We do this conditionally because it saves us some downloading if the
|
||||
# version is the same.
|
||||
- wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
|
||||
- bash miniconda.sh -b -p $HOME/miniconda
|
||||
- export PATH="$HOME/miniconda/bin:$PATH"
|
||||
- hash -r
|
||||
- conda config --set always_yes yes --set changeps1 no
|
||||
- conda update -q conda
|
||||
# Useful for debugging any issues with conda
|
||||
- conda info -a
|
||||
|
||||
- conda env create -q -n test-environment python=$TRAVIS_PYTHON_VERSION -f environment.yml
|
||||
- source activate test-environment
|
||||
|
||||
script:
|
||||
- travis_wait 30 py.test -v
|
||||
37
LICENSE
Normal file
37
LICENSE
Normal file
@@ -0,0 +1,37 @@
|
||||
COPYRIGHT
|
||||
|
||||
All contributions by Francesco Mosconi:
|
||||
Copyright (c) 2017, Francesco Mosconi.
|
||||
All rights reserved.
|
||||
|
||||
All contributions by Catalit LLC:
|
||||
Copyright (c) 2017, Catalit LLC.
|
||||
All rights reserved.
|
||||
|
||||
All other contributions:
|
||||
Copyright (c) 2015, the respective contributors.
|
||||
All rights reserved.
|
||||
|
||||
Each contributor holds copyright over their respective contributions.
|
||||
The project versioning (Git) records all such contribution source information.
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2017
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
6
Untitled.ipynb
Normal file
6
Untitled.ipynb
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"cells": [],
|
||||
"metadata": {},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
206
course/0_Check_Environment.ipynb
Normal file
206
course/0_Check_Environment.ipynb
Normal file
@@ -0,0 +1,206 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Check Environment\n",
|
||||
"This notebook checks that you have correctly created the environment and that all packages needed are installed."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Environment\n",
|
||||
"\n",
|
||||
"The next command should return a line like (Mac/Linux):\n",
|
||||
"\n",
|
||||
" /<YOUR-HOME-FOLDER>/anaconda/envs/ztdl/bin/python\n",
|
||||
"\n",
|
||||
"or like (Windows 10):\n",
|
||||
"\n",
|
||||
" C:\\\\<YOUR-HOME-FOLDER>\\\\Anaconda3\\\\envs\\\\ztdl\\\\python.exe\n",
|
||||
"\n",
|
||||
"In particular you should make sure that you are using the python executable from within the course environment.\n",
|
||||
"\n",
|
||||
"If that's not the case do this:\n",
|
||||
"\n",
|
||||
"1. close this notebook\n",
|
||||
"2. go to the terminal and stop jupyer notebook\n",
|
||||
"3. make sure that you have activated the environment, you should see a prompt like:\n",
|
||||
"\n",
|
||||
" (ztdl) $\n",
|
||||
"4. (optional) if you don't see that prompt activate the environment:\n",
|
||||
" - mac/linux:\n",
|
||||
" \n",
|
||||
" conda activate ztdl\n",
|
||||
"\n",
|
||||
" - windows:\n",
|
||||
"\n",
|
||||
" activate ztdl\n",
|
||||
"5. restart jupyter notebook"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import sys\n",
|
||||
"sys.executable"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Python 3.7\n",
|
||||
"\n",
|
||||
"The next line should say that you're using Python 3.7.x from Anaconda. At the time of publication it looks like this (Mac/Linux):\n",
|
||||
"\n",
|
||||
" Python 3.7.3 (default, Mar 27 2019, 22:11:17)\n",
|
||||
" [GCC 7.3.0] :: Anaconda, Inc. on linux\n",
|
||||
" Type \"help\", \"copyright\", \"credits\" or \"license\" for more information.\n",
|
||||
"\n",
|
||||
"or like this (Windows 10):\n",
|
||||
"\n",
|
||||
" Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)] :: Anaconda, Inc. on win32\n",
|
||||
" Type \"help\", \"copyright\", \"credits\" or \"license\" for more information.\n",
|
||||
"\n",
|
||||
"but date and exact version of GCC may change in the future.\n",
|
||||
"\n",
|
||||
"If you see a different version of python, go back to the previous step and make sure you created and activated the environment correctly."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import sys\n",
|
||||
"sys.version"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Jupyter\n",
|
||||
"\n",
|
||||
"Check that Jupyter is running from within the environment. The next line should look like (Mac/Linux):\n",
|
||||
"\n",
|
||||
" /<YOUR-HOME-FOLDER>/anaconda/envs/ztdl/lib/python3.6/site-packages/jupyter.py'\n",
|
||||
"\n",
|
||||
"or like this (Windows 10):\n",
|
||||
"\n",
|
||||
" C:\\\\Users\\\\<YOUR-USER>\\\\Anaconda3\\\\envs\\\\ztdl\\\\lib\\\\site-packages\\\\jupyter.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import jupyter\n",
|
||||
"jupyter.__file__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Other packages\n",
|
||||
"\n",
|
||||
"Here we will check that all the packages are installed and have the correct versions. If everything is ok you should see:\n",
|
||||
" \n",
|
||||
" Using TensorFlow backend.\n",
|
||||
" \n",
|
||||
" Houston we are go!\n",
|
||||
"\n",
|
||||
"If there's any issue here please make sure you have checked the previous steps and if it's all good please send us a question in the Q&A forum."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pip\n",
|
||||
"import numpy\n",
|
||||
"import jupyter\n",
|
||||
"import matplotlib\n",
|
||||
"import sklearn\n",
|
||||
"import scipy\n",
|
||||
"import pandas\n",
|
||||
"import PIL\n",
|
||||
"import seaborn\n",
|
||||
"import tensorflow\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def check_version(pkg, version):\n",
|
||||
" actual = pkg.__version__.split('.')\n",
|
||||
" if len(actual) == 3:\n",
|
||||
" actual_major = '.'.join(actual[:2])\n",
|
||||
" elif len(actual) == 2:\n",
|
||||
" actual_major = '.'.join(actual)\n",
|
||||
" else:\n",
|
||||
" raise NotImplementedError(pkg.__name__ +\n",
|
||||
" \"actual version :\"+\n",
|
||||
" pkg.__version__)\n",
|
||||
" try:\n",
|
||||
" assert(actual_major == version)\n",
|
||||
" except Exception as ex:\n",
|
||||
" print(\"{} {}\\t=> {}\".format(pkg.__name__,\n",
|
||||
" version,\n",
|
||||
" pkg.__version__))\n",
|
||||
" raise ex\n",
|
||||
"\n",
|
||||
"check_version(pip, '21.0')\n",
|
||||
"check_version(numpy, '1.19')\n",
|
||||
"check_version(matplotlib, '3.3')\n",
|
||||
"check_version(sklearn, '0.24')\n",
|
||||
"check_version(scipy, '1.6')\n",
|
||||
"check_version(pandas, '1.2')\n",
|
||||
"check_version(PIL, '8.2')\n",
|
||||
"check_version(seaborn, '0.11')\n",
|
||||
"check_version(tensorflow, '2.5')\n",
|
||||
"\n",
|
||||
"print(\"Houston we are go!\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
190
course/1 First Deep Learning Model-Copy1.ipynb
Normal file
190
course/1 First Deep Learning Model-Copy1.ipynb
Normal file
@@ -0,0 +1,190 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# First Deep Learning Model"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np # import the numpy library and assign the name np to it\n",
|
||||
"%matplotlib inline # magic function that sets the backend of matplotlib to the inline backend\n",
|
||||
"import matplotlib.pyplot as plt # import the matplotlib.pyplot and assign the name plt to it"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.datasets import make_circles"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X, y = make_circles(n_samples=1000,\n",
|
||||
" noise=0.1,\n",
|
||||
" factor=0.2,\n",
|
||||
" random_state=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(5, 5))\n",
|
||||
"plt.plot(X[y==0, 0], X[y==0, 1], 'ob', alpha=0.5)\n",
|
||||
"plt.plot(X[y==1, 0], X[y==1, 1], 'xr', alpha=0.5)\n",
|
||||
"plt.xlim(-1.5, 1.5)\n",
|
||||
"plt.ylim(-1.5, 1.5)\n",
|
||||
"plt.legend(['0', '1'])\n",
|
||||
"plt.title(\"Blue circles and Red crosses\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"from tensorflow.keras.optimizers import SGD"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.add(Dense(4, input_shape=(2,), activation='tanh'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.add(Dense(1, activation='sigmoid'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.compile(SGD(learning_rate=0.5), 'binary_crossentropy', metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X, y, epochs=20)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"hticks = np.linspace(-1.5, 1.5, 101)\n",
|
||||
"vticks = np.linspace(-1.5, 1.5, 101)\n",
|
||||
"aa, bb = np.meshgrid(hticks, vticks)\n",
|
||||
"ab = np.c_[aa.ravel(), bb.ravel()]\n",
|
||||
"c = model.predict(ab)\n",
|
||||
"cc = c.reshape(aa.shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(5, 5))\n",
|
||||
"plt.contourf(aa, bb, cc, cmap='bwr', alpha=0.2)\n",
|
||||
"plt.plot(X[y==0, 0], X[y==0, 1], 'ob', alpha=0.5)\n",
|
||||
"plt.plot(X[y==1, 0], X[y==1, 1], 'xr', alpha=0.5)\n",
|
||||
"plt.xlim(-1.5, 1.5)\n",
|
||||
"plt.ylim(-1.5, 1.5)\n",
|
||||
"plt.legend(['0', '1'])\n",
|
||||
"plt.title(\"Blue circles and Red crosses\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
190
course/1 First Deep Learning Model.ipynb
Normal file
190
course/1 First Deep Learning Model.ipynb
Normal file
@@ -0,0 +1,190 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# First Deep Learning Model"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.datasets import make_circles"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X, y = make_circles(n_samples=1000,\n",
|
||||
" noise=0.1,\n",
|
||||
" factor=0.2,\n",
|
||||
" random_state=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(5, 5))\n",
|
||||
"plt.plot(X[y==0, 0], X[y==0, 1], 'ob', alpha=0.5)\n",
|
||||
"plt.plot(X[y==1, 0], X[y==1, 1], 'xr', alpha=0.5)\n",
|
||||
"plt.xlim(-1.5, 1.5)\n",
|
||||
"plt.ylim(-1.5, 1.5)\n",
|
||||
"plt.legend(['0', '1'])\n",
|
||||
"plt.title(\"Blue circles and Red crosses\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"from tensorflow.keras.optimizers import SGD"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.add(Dense(4, input_shape=(2,), activation='tanh'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.add(Dense(1, activation='sigmoid'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.compile(SGD(learning_rate=0.5), 'binary_crossentropy', metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X, y, epochs=20)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"hticks = np.linspace(-1.5, 1.5, 101)\n",
|
||||
"vticks = np.linspace(-1.5, 1.5, 101)\n",
|
||||
"aa, bb = np.meshgrid(hticks, vticks)\n",
|
||||
"ab = np.c_[aa.ravel(), bb.ravel()]\n",
|
||||
"c = model.predict(ab)\n",
|
||||
"cc = c.reshape(aa.shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(5, 5))\n",
|
||||
"plt.contourf(aa, bb, cc, cmap='bwr', alpha=0.2)\n",
|
||||
"plt.plot(X[y==0, 0], X[y==0, 1], 'ob', alpha=0.5)\n",
|
||||
"plt.plot(X[y==1, 0], X[y==1, 1], 'xr', alpha=0.5)\n",
|
||||
"plt.xlim(-1.5, 1.5)\n",
|
||||
"plt.ylim(-1.5, 1.5)\n",
|
||||
"plt.legend(['0', '1'])\n",
|
||||
"plt.title(\"Blue circles and Red crosses\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
1892
course/2 Data.ipynb
Normal file
1892
course/2 Data.ipynb
Normal file
File diff suppressed because one or more lines are too long
1177
course/3 Machine Learning.ipynb
Normal file
1177
course/3 Machine Learning.ipynb
Normal file
File diff suppressed because one or more lines are too long
560
course/4 Deep Learning Intro.ipynb
Normal file
560
course/4 Deep Learning Intro.ipynb
Normal file
@@ -0,0 +1,560 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Deep Learning Intro"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Shallow and Deep Networks"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.datasets import make_moons\n",
|
||||
"\n",
|
||||
"X, y = make_moons(n_samples=1000, noise=0.1, random_state=0)\n",
|
||||
"plt.plot(X[y==0, 0], X[y==0, 1], 'ob', alpha=0.5)\n",
|
||||
"plt.plot(X[y==1, 0], X[y==1, 1], 'xr', alpha=0.5)\n",
|
||||
"plt.legend(['0', '1'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.model_selection import train_test_split"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train, X_test, y_train, y_test = train_test_split(X, y,\n",
|
||||
" test_size=0.3,\n",
|
||||
" random_state=42)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"from tensorflow.keras.optimizers import SGD, Adam"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Shallow Model"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(1, input_shape=(2,), activation='sigmoid'))\n",
|
||||
"model.compile(Adam(learning_rate=0.05), 'binary_crossentropy', metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train, epochs=200, verbose=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"results = model.evaluate(X_test, y_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"results"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(\"The Accuracy score on the Train set is:\\t{:0.3f}\".format(results[1]))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def plot_decision_boundary(model, X, y):\n",
|
||||
" amin, bmin = X.min(axis=0) - 0.1\n",
|
||||
" amax, bmax = X.max(axis=0) + 0.1\n",
|
||||
" hticks = np.linspace(amin, amax, 101)\n",
|
||||
" vticks = np.linspace(bmin, bmax, 101)\n",
|
||||
" \n",
|
||||
" aa, bb = np.meshgrid(hticks, vticks)\n",
|
||||
" ab = np.c_[aa.ravel(), bb.ravel()]\n",
|
||||
" \n",
|
||||
" c = model.predict(ab)\n",
|
||||
" cc = c.reshape(aa.shape)\n",
|
||||
"\n",
|
||||
" plt.figure(figsize=(12, 8))\n",
|
||||
" plt.contourf(aa, bb, cc, cmap='bwr', alpha=0.2)\n",
|
||||
" plt.plot(X[y==0, 0], X[y==0, 1], 'ob', alpha=0.5)\n",
|
||||
" plt.plot(X[y==1, 0], X[y==1, 1], 'xr', alpha=0.5)\n",
|
||||
" plt.legend(['0', '1'])\n",
|
||||
" \n",
|
||||
"plot_decision_boundary(model, X, y)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Deep model"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(4, input_shape=(2,), activation='tanh'))\n",
|
||||
"model.add(Dense(2, activation='tanh'))\n",
|
||||
"model.add(Dense(1, activation='sigmoid'))\n",
|
||||
"model.compile(Adam(learning_rate=0.05), 'binary_crossentropy', metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train, epochs=100, verbose=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.evaluate(X_test, y_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.metrics import accuracy_score, confusion_matrix"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train_pred = model.predict_classes(X_train)\n",
|
||||
"y_test_pred = model.predict_classes(X_test)\n",
|
||||
"\n",
|
||||
"print(\"The Accuracy score on the Train set is:\\t{:0.3f}\".format(accuracy_score(y_train, y_train_pred)))\n",
|
||||
"print(\"The Accuracy score on the Test set is:\\t{:0.3f}\".format(accuracy_score(y_test, y_test_pred)))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plot_decision_boundary(model, X, y)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Multiclass classification\n",
|
||||
"\n",
|
||||
"### The Iris dataset"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.read_csv('../data/iris.csv')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import seaborn as sns\n",
|
||||
"sns.pairplot(df, hue=\"species\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X = df.drop('species', axis=1)\n",
|
||||
"X.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"target_names = df['species'].unique()\n",
|
||||
"target_names"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"target_dict = {n:i for i, n in enumerate(target_names)}\n",
|
||||
"target_dict"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y= df['species'].map(target_dict)\n",
|
||||
"y.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.utils import to_categorical"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_cat = to_categorical(y)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_cat[:10]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train, X_test, y_train, y_test = train_test_split(X.values, y_cat,\n",
|
||||
" test_size=0.2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(3, input_shape=(4,), activation='softmax'))\n",
|
||||
"model.compile(Adam(learning_rate=0.1),\n",
|
||||
" loss='categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train, epochs=20, validation_split=0.1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = model.predict(X_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred[:5]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_test_class = np.argmax(y_test, axis=1)\n",
|
||||
"y_pred_class = np.argmax(y_pred, axis=1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.metrics import classification_report"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(classification_report(y_test_class, y_pred_class))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"confusion_matrix(y_test_class, y_pred_class)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"The [Pima Indians dataset](https://archive.ics.uci.edu/ml/datasets/diabetes) is a very famous dataset distributed by UCI and originally collected from the National Institute of Diabetes and Digestive and Kidney Diseases. It contains data from clinical exams for women age 21 and above of Pima indian origins. The objective is to predict based on diagnostic measurements whether a patient has diabetes.\n",
|
||||
"\n",
|
||||
"It has the following features:\n",
|
||||
"\n",
|
||||
"- Pregnancies: Number of times pregnant\n",
|
||||
"- Glucose: Plasma glucose concentration a 2 hours in an oral glucose tolerance test\n",
|
||||
"- BloodPressure: Diastolic blood pressure (mm Hg)\n",
|
||||
"- SkinThickness: Triceps skin fold thickness (mm)\n",
|
||||
"- Insulin: 2-Hour serum insulin (mu U/ml)\n",
|
||||
"- BMI: Body mass index (weight in kg/(height in m)^2)\n",
|
||||
"- DiabetesPedigreeFunction: Diabetes pedigree function\n",
|
||||
"- Age: Age (years)\n",
|
||||
"\n",
|
||||
"The last colum is the outcome, and it is a binary variable.\n",
|
||||
"\n",
|
||||
"In this first exercise we will explore it through the following steps:\n",
|
||||
"\n",
|
||||
"1. Load the ..data/diabetes.csv dataset, use pandas to explore the range of each feature\n",
|
||||
"- For each feature draw a histogram. Bonus points if you draw all the histograms in the same figure.\n",
|
||||
"- Explore correlations of features with the outcome column. You can do this in several ways, for example using the `sns.pairplot` we used above or drawing a heatmap of the correlations.\n",
|
||||
"- Do features need standardization? If so what stardardization technique will you use? MinMax? Standard?\n",
|
||||
"- Prepare your final `X` and `y` variables to be used by a ML model. Make sure you define your target variable well. Will you need dummy columns?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"Build a fully connected NN model that predicts diabetes. Follow these steps:\n",
|
||||
"\n",
|
||||
"1. Split your data in a train/test with a test size of 20% and a `random_state = 22`\n",
|
||||
"- define a sequential model with at least one inner layer. You will have to make choices for the following things:\n",
|
||||
" - what is the size of the input?\n",
|
||||
" - how many nodes will you use in each layer?\n",
|
||||
" - what is the size of the output?\n",
|
||||
" - what activation functions will you use in the inner layers?\n",
|
||||
" - what activation function will you use at output?\n",
|
||||
" - what loss function will you use?\n",
|
||||
" - what optimizer will you use?\n",
|
||||
"- fit your model on the training set, using a validation_split of 0.1\n",
|
||||
"- test your trained model on the test data from the train/test split\n",
|
||||
"- check the accuracy score, the confusion matrix and the classification report"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 3\n",
|
||||
"Compare your work with the results presented in [this notebook](https://www.kaggle.com/futurist/d/uciml/pima-indians-diabetes-database/pima-data-visualisation-and-machine-learning). Are your Neural Network results better or worse than the results obtained by traditional Machine Learning techniques?\n",
|
||||
"\n",
|
||||
"- Try training a Support Vector Machine or a Random Forest model on the exact same train/test split. Is the performance better or worse?\n",
|
||||
"- Try restricting your features to only 4 features like in the suggested notebook. How does model performance change?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 4\n",
|
||||
"\n",
|
||||
"[Tensorflow playground](http://playground.tensorflow.org/) is a web based neural network demo. It is really useful to develop an intuition about what happens when you change architecture, activation function or other parameters. Try playing with it for a few minutes. You don't need do understand the meaning of every knob and button in the page, just get a sense for what happens if you change something. In the next chapter we'll explore these things in more detail.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
1055
course/5 Gradient Descent.ipynb
Normal file
1055
course/5 Gradient Descent.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
988
course/6 Convolutional Neural Networks.ipynb
Normal file
988
course/6 Convolutional Neural Networks.ipynb
Normal file
@@ -0,0 +1,988 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Convolutional Neural Networks"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Machine learning on images"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### MNIST"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.datasets import mnist"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"(X_train, y_train), (X_test, y_test) = mnist.load_data('/tmp/mnist.npz')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_test.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(X_train[0], cmap='gray')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = X_train.reshape(-1, 28*28)\n",
|
||||
"X_test = X_test.reshape(-1, 28*28)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = X_train.astype('float32')\n",
|
||||
"X_test = X_test.astype('float32')\n",
|
||||
"X_train /= 255.0\n",
|
||||
"X_test /= 255.0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.utils import to_categorical"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train_cat = to_categorical(y_train)\n",
|
||||
"y_test_cat = to_categorical(y_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train_cat[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train_cat.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_test_cat.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Fully connected on images"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"import tensorflow.keras.backend as K\n",
|
||||
"\n",
|
||||
"# K.clear_session()\n",
|
||||
"\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(512, input_dim=28*28, activation='relu'))\n",
|
||||
"model.add(Dense(256, activation='relu'))\n",
|
||||
"model.add(Dense(128, activation='relu'))\n",
|
||||
"model.add(Dense(32, activation='relu'))\n",
|
||||
"model.add(Dense(10, activation='softmax'))\n",
|
||||
"model.compile(loss='categorical_crossentropy',\n",
|
||||
" optimizer='rmsprop',\n",
|
||||
" metrics=['accuracy'])\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"h = model.fit(X_train, y_train_cat, batch_size=128, epochs=10, verbose=1, validation_split=0.3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.plot(h.history['accuracy'])\n",
|
||||
"plt.plot(h.history['val_accuracy'])\n",
|
||||
"plt.legend(['Training', 'Validation'])\n",
|
||||
"plt.title('Accuracy')\n",
|
||||
"plt.xlabel('Epochs');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"test_accuracy = model.evaluate(X_test, y_test_cat)[1]\n",
|
||||
"test_accuracy"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Tensor Math"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"A = np.random.randint(10, size=(2, 3, 4, 5))\n",
|
||||
"B = np.random.randint(10, size=(2, 3))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"A"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"A[0, 1, 0, 3]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"B"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"#### A random colored image"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img = np.random.randint(255, size=(4, 4, 3), dtype='uint8')\n",
|
||||
"img"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(5, 5))\n",
|
||||
"plt.subplot(221)\n",
|
||||
"plt.imshow(img)\n",
|
||||
"plt.title(\"All Channels combined\")\n",
|
||||
"\n",
|
||||
"plt.subplot(222)\n",
|
||||
"plt.imshow(img[:, : , 0], cmap='Reds')\n",
|
||||
"plt.title(\"Red channel\")\n",
|
||||
"\n",
|
||||
"plt.subplot(223)\n",
|
||||
"plt.imshow(img[:, : , 1], cmap='Greens')\n",
|
||||
"plt.title(\"Green channel\")\n",
|
||||
"\n",
|
||||
"plt.subplot(224)\n",
|
||||
"plt.imshow(img[:, : , 2], cmap='Blues')\n",
|
||||
"plt.title(\"Blue channel\");"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Tensor operations"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"2 * A"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"A + A"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"A.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"B.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"np.tensordot(A, B, axes=([0, 1], [0, 1]))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"np.tensordot(A, B, axes=([0], [0])).shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### 1D convolution"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"a = np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0], dtype='float32')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"b = np.array([-1, 1], dtype='float32')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c = np.convolve(a, b)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"a"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"b"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.subplot(211)\n",
|
||||
"plt.plot(a, 'o-')\n",
|
||||
"\n",
|
||||
"plt.subplot(212)\n",
|
||||
"plt.plot(c, 'o-');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Image filters with convolutions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from scipy.ndimage.filters import convolve\n",
|
||||
"from scipy.signal import convolve2d\n",
|
||||
"from scipy import misc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img = misc.ascent()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(img, cmap='gray');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"h_kernel = np.array([[ 1, 2, 1],\n",
|
||||
" [ 0, 0, 0],\n",
|
||||
" [-1, -2, -1]])\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(h_kernel, cmap='gray');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = convolve2d(img, h_kernel)\n",
|
||||
"\n",
|
||||
"plt.imshow(res, cmap='gray');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Convolutional neural networks"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import Conv2D"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(5, 5))\n",
|
||||
"plt.imshow(img, cmap='gray');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img_tensor = img.reshape((1, 512, 512, 1))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Conv2D(1, (3, 3), strides=(2,1), input_shape=(512, 512, 1)))\n",
|
||||
"model.compile('adam', 'mse')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img_pred_tensor = model.predict(img_tensor)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img_pred_tensor.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img_pred = img_pred_tensor[0, :, :, 0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(img_pred, cmap='gray');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"weights = model.get_weights()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"weights[0].shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(weights[0][:, :, 0, 0], cmap='gray');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"weights[0] = np.ones(weights[0].shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.set_weights(weights)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img_pred_tensor = model.predict(img_tensor)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img_pred = img_pred_tensor[0, :, :, 0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(img_pred, cmap='gray');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Conv2D(1, (3, 3), input_shape=(512, 512, 1), padding='same'))\n",
|
||||
"model.compile('adam', 'mse')\n",
|
||||
"\n",
|
||||
"img_pred_tensor = model.predict(img_tensor)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"img_pred_tensor.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Pooling layers"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import MaxPool2D, AvgPool2D"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(MaxPool2D((5, 5), input_shape=(512, 512, 1)))\n",
|
||||
"model.compile('adam', 'mse')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img_pred = model.predict(img_tensor)[0, :, :, 0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(img_pred, cmap='gray')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(AvgPool2D((5, 5), input_shape=(512, 512, 1)))\n",
|
||||
"model.compile('adam', 'mse')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"img_pred = model.predict(img_tensor)[0, :, :, 0]\n",
|
||||
"plt.imshow(img_pred, cmap='gray');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Final architecture"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = X_train.reshape(-1, 28, 28, 1)\n",
|
||||
"X_test = X_test.reshape(-1, 28, 28, 1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import Flatten, Activation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"\n",
|
||||
"model = Sequential()\n",
|
||||
"\n",
|
||||
"model.add(Conv2D(32, (3, 3), input_shape=(28, 28, 1)))\n",
|
||||
"model.add(MaxPool2D(pool_size=(2, 2)))\n",
|
||||
"model.add(Activation('relu'))\n",
|
||||
"\n",
|
||||
"model.add(Flatten())\n",
|
||||
"\n",
|
||||
"model.add(Dense(128, activation='relu'))\n",
|
||||
"\n",
|
||||
"model.add(Dense(10, activation='softmax'))\n",
|
||||
"\n",
|
||||
"model.compile(loss='categorical_crossentropy',\n",
|
||||
" optimizer='rmsprop',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train_cat, batch_size=128,\n",
|
||||
" epochs=2, verbose=1, validation_split=0.3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.evaluate(X_test, y_test_cat)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"### Exercise 1\n",
|
||||
"You've been hired by a shipping company to overhaul the way they route mail, parcels and packages. They want to build an image recognition system capable of recognizing the digits in the zipcode on a package, so that it can be automatically routed to the correct location.\n",
|
||||
"You are tasked to build the digit recognition system. Luckily, you can rely on the MNIST dataset for the intial training of your model!\n",
|
||||
"\n",
|
||||
"Build a deep convolutional neural network with at least two convolutional and two pooling layers before the fully connected layer.\n",
|
||||
"\n",
|
||||
"- Start from the network we have just built\n",
|
||||
"- Insert a `Conv2D` layer after the first `MaxPool2D`, give it 64 filters.\n",
|
||||
"- Insert a `MaxPool2D` after that one\n",
|
||||
"- Insert an `Activation` layer\n",
|
||||
"- retrain the model\n",
|
||||
"- does performance improve?\n",
|
||||
"- how many parameters does this new model have? More or less than the previous model? Why?\n",
|
||||
"- how long did this second model take to train? Longer or shorter than the previous model? Why?\n",
|
||||
"- did it perform better or worse than the previous model?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Exercise 2\n",
|
||||
"\n",
|
||||
"Pleased with your performance with the digits recognition task, your boss decides to challenge you with a harder task. Their online branch allows people to upload images to a website that generates and prints a postcard that is shipped to destination. Your boss would like to know what images people are loading on the site in order to provide targeted advertising on the same page, so he asks you to build an image recognition system capable of recognizing a few objects. Luckily for you, there's a dataset ready made with a collection of labeled images. This is the [Cifar 10 Dataset](http://www.cs.toronto.edu/~kriz/cifar.html), a very famous dataset that contains images for 10 different categories:\n",
|
||||
"\n",
|
||||
"- airplane \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- automobile \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- bird \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- cat \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- deer \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- dog \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- frog \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- horse \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- ship \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- truck\n",
|
||||
"\n",
|
||||
"In this exercise we will reach the limit of what you can achieve on your laptop and get ready for the next session on cloud GPUs.\n",
|
||||
"\n",
|
||||
"Here's what you have to do:\n",
|
||||
"- load the cifar10 dataset using `keras.datasets.cifar10.load_data()`\n",
|
||||
"- display a few images, see how hard/easy it is for you to recognize an object with such low resolution\n",
|
||||
"- check the shape of X_train, does it need reshape?\n",
|
||||
"- check the scale of X_train, does it need rescaling?\n",
|
||||
"- check the shape of y_train, does it need reshape?\n",
|
||||
"- build a model with the following architecture, and choose the parameters and activation functions for each of the layers:\n",
|
||||
" - conv2d\n",
|
||||
" - conv2d\n",
|
||||
" - maxpool\n",
|
||||
" - conv2d\n",
|
||||
" - conv2d\n",
|
||||
" - maxpool\n",
|
||||
" - flatten\n",
|
||||
" - dense\n",
|
||||
" - output\n",
|
||||
"- compile the model and check the number of parameters\n",
|
||||
"- attempt to train the model with the optimizer of your choice. How fast does training proceed?\n",
|
||||
"- If training is too slow (as expected) stop the execution and move to the next session!"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.datasets import cifar10"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
565
course/8 Recurrent Neural Networks.ipynb
Normal file
565
course/8 Recurrent Neural Networks.ipynb
Normal file
@@ -0,0 +1,565 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Recurrent Neural Networks"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Time series forecasting"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.read_csv('../data/cansim-0800020-eng-6674700030567901031.csv',\n",
|
||||
" skiprows=6, skipfooter=9,\n",
|
||||
" engine='python')\n",
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from pandas.tseries.offsets import MonthEnd"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['Adjustments'] = pd.to_datetime(df['Adjustments']) + MonthEnd(1)\n",
|
||||
"df = df.set_index('Adjustments')\n",
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"split_date = pd.Timestamp('01-01-2011')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train = df.loc[:split_date, ['Unadjusted']]\n",
|
||||
"test = df.loc[split_date:, ['Unadjusted']]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ax = train.plot()\n",
|
||||
"test.plot(ax=ax)\n",
|
||||
"plt.legend(['train', 'test'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.preprocessing import MinMaxScaler\n",
|
||||
"\n",
|
||||
"sc = MinMaxScaler()\n",
|
||||
"\n",
|
||||
"train_sc = sc.fit_transform(train)\n",
|
||||
"test_sc = sc.transform(test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_sc[:4]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = train_sc[:-1]\n",
|
||||
"y_train = train_sc[1:]\n",
|
||||
"\n",
|
||||
"X_test = test_sc[:-1]\n",
|
||||
"y_test = test_sc[1:]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Fully connected predictor"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"import tensorflow.keras.backend as K\n",
|
||||
"from tensorflow.keras.callbacks import EarlyStopping"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(12, input_dim=1, activation='relu'))\n",
|
||||
"model.add(Dense(1))\n",
|
||||
"model.compile(loss='mean_squared_error', optimizer='adam')\n",
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"early_stop = EarlyStopping(monitor='loss', patience=1, verbose=1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train, epochs=200,\n",
|
||||
" batch_size=2, verbose=1,\n",
|
||||
" callbacks=[early_stop])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = model.predict(X_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.plot(y_test)\n",
|
||||
"plt.plot(y_pred)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Recurrent predictor"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import LSTM"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#3D tensor with shape (batch_size, timesteps, input_dim)\n",
|
||||
"X_train[:, None].shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train_t = X_train[:, None]\n",
|
||||
"X_test_t = X_test[:, None]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"model = Sequential()\n",
|
||||
"\n",
|
||||
"model.add(LSTM(6, input_shape=(1, 1)))\n",
|
||||
"\n",
|
||||
"model.add(Dense(1))\n",
|
||||
"\n",
|
||||
"model.compile(loss='mean_squared_error', optimizer='adam')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train_t, y_train,\n",
|
||||
" epochs=100, batch_size=1, verbose=1,\n",
|
||||
" callbacks=[early_stop])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = model.predict(X_test_t)\n",
|
||||
"plt.plot(y_test)\n",
|
||||
"plt.plot(y_pred)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Windows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_sc.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_sc_df = pd.DataFrame(train_sc, columns=['Scaled'], index=train.index)\n",
|
||||
"test_sc_df = pd.DataFrame(test_sc, columns=['Scaled'], index=test.index)\n",
|
||||
"train_sc_df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for s in range(1, 13):\n",
|
||||
" train_sc_df['shift_{}'.format(s)] = train_sc_df['Scaled'].shift(s)\n",
|
||||
" test_sc_df['shift_{}'.format(s)] = test_sc_df['Scaled'].shift(s)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_sc_df.head(13)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = train_sc_df.dropna().drop('Scaled', axis=1)\n",
|
||||
"y_train = train_sc_df.dropna()[['Scaled']]\n",
|
||||
"\n",
|
||||
"X_test = test_sc_df.dropna().drop('Scaled', axis=1)\n",
|
||||
"y_test = test_sc_df.dropna()[['Scaled']]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = X_train.values\n",
|
||||
"X_test= X_test.values\n",
|
||||
"\n",
|
||||
"y_train = y_train.values\n",
|
||||
"y_test = y_test.values"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Fully Connected on Windows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(12, input_dim=12, activation='relu'))\n",
|
||||
"model.add(Dense(1))\n",
|
||||
"model.compile(loss='mean_squared_error', optimizer='adam')\n",
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train, epochs=200,\n",
|
||||
" batch_size=1, verbose=1, callbacks=[early_stop])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = model.predict(X_test)\n",
|
||||
"plt.plot(y_test)\n",
|
||||
"plt.plot(y_pred)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### LSTM on Windows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train_t = X_train.reshape(X_train.shape[0], 1, 12)\n",
|
||||
"X_test_t = X_test.reshape(X_test.shape[0], 1, 12)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train_t.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"model = Sequential()\n",
|
||||
"\n",
|
||||
"model.add(LSTM(6, input_shape=(1, 12)))\n",
|
||||
"\n",
|
||||
"model.add(Dense(1))\n",
|
||||
"\n",
|
||||
"model.compile(loss='mean_squared_error', optimizer='adam')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train_t, y_train, epochs=100,\n",
|
||||
" batch_size=1, verbose=1, callbacks=[early_stop])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = model.predict(X_test_t)\n",
|
||||
"plt.plot(y_test)\n",
|
||||
"plt.plot(y_pred)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 1\n",
|
||||
"\n",
|
||||
"In the model above we reshaped the input shape to: `(num_samples, 1, 12)`, i.e. we treated a window of 12 months as a vector of 12 coordinates that we simultaneously passed to all the LSTM nodes. An alternative way to look at the problem is to reshape the input to `(num_samples, 12, 1)`. This means we consider each input window as a sequence of 12 values that we will pass in sequence to the LSTM. In principle this looks like a more accurate description of our situation. But does it yield better predictions? Let's check it.\n",
|
||||
"\n",
|
||||
"- Reshape `X_train` and `X_test` so that they represent a set of univariate sequences\n",
|
||||
"- retrain the same LSTM(6) model, you'll have to adapt the `input_shape`\n",
|
||||
"- check the performance of this new model, is it better at predicting the test data?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"## Exercise 2\n",
|
||||
"\n",
|
||||
"RNN models can be applied to images too. In general we can apply them to any data where there's a connnection between nearby units. Let's see how we can easily build a model that works with images.\n",
|
||||
"\n",
|
||||
"- Load the MNIST data, by now you should be able to do it blindfolded :)\n",
|
||||
"- reshape it so that an image looks like a long sequence of pixels\n",
|
||||
"- create a recurrent model and train it on the training data\n",
|
||||
"- how does it perform compared to a fully connected? How does it compare to Convolutional Neural Networks?\n",
|
||||
"\n",
|
||||
"(feel free to run this exercise on a cloud GPU if it's too slow on your laptop)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
872
course/9 Improving performance.ipynb
Normal file
872
course/9 Improving performance.ipynb
Normal file
@@ -0,0 +1,872 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# 9 Improving performance"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Learning curves"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.datasets import load_digits"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"digits = load_digits()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X, y = digits.data, digits.target"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for i in range(8):\n",
|
||||
" plt.subplot(1,8,i+1)\n",
|
||||
" plt.imshow(X.reshape(-1, 8, 8)[i], cmap='gray')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"from tensorflow.keras.utils import to_categorical\n",
|
||||
"import tensorflow.keras.backend as K\n",
|
||||
"from tensorflow.keras.callbacks import EarlyStopping"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(16, input_shape=(64,), activation='relu'))\n",
|
||||
"model.add(Dense(10, activation='softmax'))\n",
|
||||
"model.compile('adam', 'categorical_crossentropy', metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# store the initial random weights\n",
|
||||
"initial_weights = model.get_weights()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_cat = to_categorical(y, 10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.model_selection import train_test_split"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train, X_test, y_train, y_test = train_test_split(X, y_cat,\n",
|
||||
" test_size=0.3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_sizes = (len(X_train) * np.linspace(0.1, 0.999, 4)).astype(int)\n",
|
||||
"train_sizes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_scores = []\n",
|
||||
"test_scores = []\n",
|
||||
"\n",
|
||||
"for train_size in train_sizes:\n",
|
||||
" X_train_frac, _, y_train_frac, _ = \\\n",
|
||||
" train_test_split(X_train, y_train, train_size=train_size)\n",
|
||||
" \n",
|
||||
" # at each iteration reset the weights of the model\n",
|
||||
" # to the initial random weights\n",
|
||||
" model.set_weights(initial_weights)\n",
|
||||
" \n",
|
||||
" h = model.fit(X_train_frac, y_train_frac,\n",
|
||||
" verbose=0,\n",
|
||||
" epochs=300,\n",
|
||||
" callbacks=[EarlyStopping(monitor='loss', patience=1)])\n",
|
||||
"\n",
|
||||
" r = model.evaluate(X_train_frac, y_train_frac, verbose=0)\n",
|
||||
" train_scores.append(r[-1])\n",
|
||||
" \n",
|
||||
" e = model.evaluate(X_test, y_test, verbose=0)\n",
|
||||
" test_scores.append(e[-1])\n",
|
||||
" \n",
|
||||
" print(\"Done size: \", train_size)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.plot(train_sizes, train_scores, 'o-', label=\"Training score\")\n",
|
||||
"plt.plot(train_sizes, test_scores, 'o-', label=\"Test score\")\n",
|
||||
"plt.legend(loc=\"best\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Batch Normalization"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import BatchNormalization"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def repeated_training(X_train,\n",
|
||||
" y_train,\n",
|
||||
" X_test,\n",
|
||||
" y_test,\n",
|
||||
" units=512,\n",
|
||||
" activation='sigmoid',\n",
|
||||
" optimizer='sgd',\n",
|
||||
" do_bn=False,\n",
|
||||
" epochs=10,\n",
|
||||
" repeats=3):\n",
|
||||
" histories = []\n",
|
||||
" \n",
|
||||
" for repeat in range(repeats):\n",
|
||||
" K.clear_session()\n",
|
||||
"\n",
|
||||
" model = Sequential()\n",
|
||||
" \n",
|
||||
" # first fully connected layer\n",
|
||||
" model.add(Dense(units,\n",
|
||||
" input_shape=X_train.shape[1:],\n",
|
||||
" kernel_initializer='normal',\n",
|
||||
" activation=activation))\n",
|
||||
" if do_bn:\n",
|
||||
" model.add(BatchNormalization())\n",
|
||||
"\n",
|
||||
" # second fully connected layer\n",
|
||||
" model.add(Dense(units,\n",
|
||||
" kernel_initializer='normal',\n",
|
||||
" activation=activation))\n",
|
||||
" if do_bn:\n",
|
||||
" model.add(BatchNormalization())\n",
|
||||
"\n",
|
||||
" # third fully connected layer\n",
|
||||
" model.add(Dense(units,\n",
|
||||
" kernel_initializer='normal',\n",
|
||||
" activation=activation))\n",
|
||||
" if do_bn:\n",
|
||||
" model.add(BatchNormalization())\n",
|
||||
"\n",
|
||||
" # output layer\n",
|
||||
" model.add(Dense(10, activation='softmax'))\n",
|
||||
" \n",
|
||||
" model.compile(optimizer,\n",
|
||||
" 'categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])\n",
|
||||
"\n",
|
||||
" h = model.fit(X_train, y_train,\n",
|
||||
" validation_data=(X_test, y_test),\n",
|
||||
" epochs=epochs,\n",
|
||||
" verbose=0)\n",
|
||||
" histories.append([h.history['accuracy'], h.history['val_accuracy']])\n",
|
||||
" print(repeat, end=' ')\n",
|
||||
"\n",
|
||||
" histories = np.array(histories)\n",
|
||||
" \n",
|
||||
" # calculate mean and standard deviation across repeats:\n",
|
||||
" mean_acc = histories.mean(axis=0)\n",
|
||||
" std_acc = histories.std(axis=0)\n",
|
||||
" print()\n",
|
||||
" \n",
|
||||
" return mean_acc[0], std_acc[0], mean_acc[1], std_acc[1]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"mean_acc, std_acc, mean_acc_val, std_acc_val = \\\n",
|
||||
" repeated_training(X_train, y_train, X_test, y_test, do_bn=False)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"mean_acc_bn, std_acc_bn, mean_acc_val_bn, std_acc_val_bn = \\\n",
|
||||
" repeated_training(X_train, y_train, X_test, y_test, do_bn=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def plot_mean_std(m, s):\n",
|
||||
" plt.plot(m)\n",
|
||||
" plt.fill_between(range(len(m)), m-s, m+s, alpha=0.1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plot_mean_std(mean_acc, std_acc)\n",
|
||||
"plot_mean_std(mean_acc_val, std_acc_val)\n",
|
||||
"plot_mean_std(mean_acc_bn, std_acc_bn)\n",
|
||||
"plot_mean_std(mean_acc_val_bn, std_acc_val_bn)\n",
|
||||
"plt.ylim(0, 1.01)\n",
|
||||
"plt.title(\"Batch Normalization Accuracy\")\n",
|
||||
"plt.xlabel('Epochs')\n",
|
||||
"plt.ylabel('Accuracy')\n",
|
||||
"plt.legend(['Train', 'Test', 'Train with Batch Normalization', 'Test with Batch Normalization'], loc='best');\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Weight Regularization & Dropout"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import Dropout"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dropout(0.2, input_shape=X_train.shape[1:]))\n",
|
||||
"# first fully connected layer\n",
|
||||
"model.add(Dense(512, kernel_initializer='normal',\n",
|
||||
" kernel_regularizer='l2', activation='sigmoid'))\n",
|
||||
"model.add(Dropout(0.4))\n",
|
||||
"model.add(Dense(10, activation='softmax'))\n",
|
||||
"\n",
|
||||
"model.compile('sgd',\n",
|
||||
" 'categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Data augmentation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
|
||||
"\n",
|
||||
"generator = ImageDataGenerator(rescale = 1./255,\n",
|
||||
" width_shift_range=0.1,\n",
|
||||
" height_shift_range=0.1,\n",
|
||||
" rotation_range = 20,\n",
|
||||
" shear_range = 0.3,\n",
|
||||
" zoom_range = 0.3,\n",
|
||||
" horizontal_flip = True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train = generator.flow_from_directory('../data/generator',\n",
|
||||
" target_size = (128, 128),\n",
|
||||
" batch_size = 32,\n",
|
||||
" class_mode = 'binary')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(12, 12))\n",
|
||||
"for i in range(16):\n",
|
||||
" img, label = train.next()\n",
|
||||
" plt.subplot(4, 4, i+1)\n",
|
||||
" plt.imshow(img[0])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Embeddings"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import Embedding"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Embedding(input_dim=100, output_dim=2))\n",
|
||||
"model.compile(loss='binary_crossentropy',\n",
|
||||
" optimizer='adam',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"emb = model.predict(np.array([[81, 1, 96, 79],\n",
|
||||
" [17, 47, 69, 50],\n",
|
||||
" [49, 3, 12, 88]]))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"emb.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"emb"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Sentiment prediction on movie Reviews"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.datasets import imdb"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"(X_train, y_train), (X_test, y_test) = imdb.load_data('/tmp/imdb.npz',\n",
|
||||
" num_words=None,\n",
|
||||
" skip_top=0,\n",
|
||||
" maxlen=None,\n",
|
||||
" start_char=1,\n",
|
||||
" oov_char=2,\n",
|
||||
" index_from=3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train[1]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"idx = imdb.get_word_index()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"max(idx.values())"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"idx"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rev_idx = {v+3:k for k,v in idx.items()}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rev_idx"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rev_idx[0] = 'padding_char'\n",
|
||||
"rev_idx[1] = 'start_char'\n",
|
||||
"rev_idx[2] = 'oov_char'\n",
|
||||
"rev_idx[3] = 'unk_char'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"rev_idx[3]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"example_review = ' '.join([rev_idx[word] for word in X_train[0]])\n",
|
||||
"example_review"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"len(X_train[0])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"len(X_train[1])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"len(X_train[2])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"len(X_train[3])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.preprocessing.sequence import pad_sequences\n",
|
||||
"from tensorflow.keras.layers import LSTM"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"maxlen = 100\n",
|
||||
"\n",
|
||||
"X_train_pad = pad_sequences(X_train, maxlen=maxlen)\n",
|
||||
"X_test_pad = pad_sequences(X_test, maxlen=maxlen)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train_pad.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train_pad[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"max_features = max([max(x) for x in X_train_pad] + \n",
|
||||
" [max(x) for x in X_test_pad]) + 1\n",
|
||||
"max_features"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Embedding(max_features, 128))\n",
|
||||
"model.add(LSTM(64, dropout=0.2, recurrent_dropout=0.2))\n",
|
||||
"model.add(Dense(1, activation='sigmoid'))\n",
|
||||
"\n",
|
||||
"model.compile(loss='binary_crossentropy',\n",
|
||||
" optimizer='adam',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train_pad, y_train,\n",
|
||||
" batch_size=32,\n",
|
||||
" epochs=2,\n",
|
||||
" validation_split=0.3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"score, acc = model.evaluate(X_test_pad, y_test)\n",
|
||||
"print('Test score:', score)\n",
|
||||
"print('Test accuracy:', acc)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 1\n",
|
||||
"\n",
|
||||
"- Reload the IMDB data keeping only the first 20000 most common words\n",
|
||||
"- pad the reviews to a shorter length (eg. 70 or 80), this time make sure you keep the first part of the review if it's longer than the maximum length\n",
|
||||
"- re run the model (remember to set max_features correctly)\n",
|
||||
"- does it train faster this time?\n",
|
||||
"- do you get a better performance?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 2\n",
|
||||
"\n",
|
||||
"- Reload the digits data as above\n",
|
||||
"- define a function repeated_training_reg_dropout that adds regularization and dropout to a fully connected network\n",
|
||||
"- compare the performance with/witouth dropout and regularization like we did for batch normalization\n",
|
||||
"- do you get a better performance?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 3\n",
|
||||
"\n",
|
||||
"This is a very long and complex exercise, that should give you an idea of a real world scenario. Feel free to look at the solution if you feel lost. Also, feel free to run this on Floyd with a GPU, in which case you don't need to download the data.\n",
|
||||
"\n",
|
||||
"If you are running this locally, download and unpack the male/female pictures from [here](https://www.dropbox.com/s/nov493om2jmh2gp/male_female.tgz?dl=0). These images and labels were obtained from [Crowdflower](https://www.crowdflower.com/data-for-everyone/).\n",
|
||||
"\n",
|
||||
"Your goal is to build an image classifier that will recognize the gender of a person from pictures.\n",
|
||||
"\n",
|
||||
"- Have a look at the directory structure and inspect a couple of pictures\n",
|
||||
"- Design a model that will take a color image of size 64x64 as input and return a binary output (female=0/male=1)\n",
|
||||
"- Feel free to introduce any regularization technique in your model (Dropout, Batch Normalization, Weight Regularization)\n",
|
||||
"- Compile your model with an optimizer of your choice\n",
|
||||
"- Using `ImageDataGenerator`, define a train generator that will augment your images with some geometric transformations. Feel free to choose the parameters that make sense to you.\n",
|
||||
"- Define also a test generator, whose only purpose is to rescale the pixels by 1./255\n",
|
||||
"- use the function `flow_from_directory` to generate batches from the train and test folders. Make sure you set the `target_size` to 64x64.\n",
|
||||
"- Use the `model.fit_generator` function to fit the model on the batches generated from the ImageDataGenerator. Since you are streaming and augmenting the data in real time you will have to decide how many batches make an epoch and how many epochs you want to run\n",
|
||||
"- Train your model (you should get to at least 85% accuracy)\n",
|
||||
"- Once you are satisfied with your training, check a few of the misclassified pictures. Are those sensible errors?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
15000
data/HR_comma_sep.csv
Normal file
15000
data/HR_comma_sep.csv
Normal file
File diff suppressed because it is too large
Load Diff
1373
data/banknotes.csv
Normal file
1373
data/banknotes.csv
Normal file
File diff suppressed because it is too large
Load Diff
BIN
data/banknotes.png
Normal file
BIN
data/banknotes.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 563 KiB |
329
data/cansim-0800020-eng-6674700030567901031.csv
Normal file
329
data/cansim-0800020-eng-6674700030567901031.csv
Normal file
@@ -0,0 +1,329 @@
|
||||
"Table 080-0020 Retail trade, sales by the North American Industry Classification System (NAICS), monthly (dollars x 1,000)(2,3,4,5,6)"
|
||||
Survey or program details:
|
||||
Retail Trade Survey (Monthly) - 2406
|
||||
Monthly Retail Trade Survey (Department Store Organizations) - 2408
|
||||
Geography,Canada,Canada
|
||||
North American Industry Classification System (NAICS),Retail trade [44-45] ,Retail trade [44-45]
|
||||
Adjustments,Unadjusted,Seasonally adjusted
|
||||
Jan-1991,12588862,15026890
|
||||
Feb-1991,12154321,15304585
|
||||
Mar-1991,14337072,15413591
|
||||
Apr-1991,15108570,15293409
|
||||
May-1991,17225734,15676083
|
||||
Jun-1991,16342833,15507931
|
||||
Jul-1991,15996243,15556313
|
||||
Aug-1991,16064910,15430645
|
||||
Sep-1991,15015317,15427313
|
||||
Oct-1991,15606864,15410250
|
||||
Nov-1991,16237366,15662790
|
||||
Dec-1991,18381340,15349625
|
||||
Jan-1992,13084963,15477875
|
||||
Feb-1992,12773972,15513022
|
||||
Mar-1992,14198775,15527933
|
||||
Apr-1992,15558390,15708556
|
||||
May-1992,16776396,15642000
|
||||
Jun-1992,16716231,15823989
|
||||
Jul-1992,16637483,15869453
|
||||
Aug-1992,15842075,15844631
|
||||
Sep-1992,15812400,15983239
|
||||
Oct-1992,16562268,16125835
|
||||
Nov-1992,16015869,16049478
|
||||
Dec-1992,19682921,16095727
|
||||
Jan-1993,13672727,16408864
|
||||
Feb-1993,12900733,16239039
|
||||
Mar-1993,15211859,16314960
|
||||
Apr-1993,16642246,16577426
|
||||
May-1993,17442405,16472045
|
||||
Jun-1993,17444074,16351907
|
||||
Jul-1993,17610326,16712914
|
||||
Aug-1993,16645660,16703413
|
||||
Sep-1993,16790330,16755338
|
||||
Oct-1993,16921755,16819382
|
||||
Nov-1993,17124609,16958202
|
||||
Dec-1993,20928208,17021436
|
||||
Jan-1994,14005058,17076164
|
||||
Feb-1994,13799079,17393150
|
||||
Mar-1994,16865149,17890903
|
||||
Apr-1994,17494589,17507688
|
||||
May-1994,18739509,17775079
|
||||
Jun-1994,19323481,17882069
|
||||
Jul-1994,18297834,17785800
|
||||
Aug-1994,18101290,17881976
|
||||
Sep-1994,18161417,17952647
|
||||
Oct-1994,17998875,18193703
|
||||
Nov-1994,18516766,18264676
|
||||
Dec-1994,22688647,18387840
|
||||
Jan-1995,14927996,18337565
|
||||
Feb-1995,14520623,18259470
|
||||
Mar-1995,17457477,18225708
|
||||
Apr-1995,17774107,18217661
|
||||
May-1995,19740889,18333051
|
||||
Jun-1995,20319460,18503481
|
||||
Jul-1995,18747299,18407254
|
||||
Aug-1995,19280525,18720783
|
||||
Sep-1995,18860566,18628735
|
||||
Oct-1995,18177152,18412692
|
||||
Nov-1995,18962903,18506305
|
||||
Dec-1995,22308880,18525162
|
||||
Jan-1996,15379086,18531426
|
||||
Feb-1996,15521981,18657652
|
||||
Mar-1996,17613469,18774049
|
||||
Apr-1996,18421405,18739023
|
||||
May-1996,20624568,18758009
|
||||
Jun-1996,20099348,18977805
|
||||
Jul-1996,19423284,18914063
|
||||
Aug-1996,19889359,19071178
|
||||
Sep-1996,18589571,19019991
|
||||
Oct-1996,19686383,19488074
|
||||
Nov-1996,20293165,19820074
|
||||
Dec-1996,22897980,19688254
|
||||
Jan-1997,16882321,19857365
|
||||
Feb-1997,16033605,20141489
|
||||
Mar-1997,18225453,20056949
|
||||
Apr-1997,20432272,20215340
|
||||
May-1997,22594727,20386953
|
||||
Jun-1997,21577744,20466883
|
||||
Jul-1997,21570145,20681230
|
||||
Aug-1997,21065784,20605349
|
||||
Sep-1997,20532806,20646564
|
||||
Oct-1997,21491163,21037021
|
||||
Nov-1997,20904746,20973561
|
||||
Dec-1997,25507180,21749250
|
||||
Jan-1998,17736224,20776214
|
||||
Feb-1998,16797018,21153779
|
||||
Mar-1998,19408883,21041225
|
||||
Apr-1998,21501677,21504619
|
||||
May-1998,23312947,21504262
|
||||
Jun-1998,22654803,21247311
|
||||
Jul-1998,22594775,21385620
|
||||
Aug-1998,21512734,21335980
|
||||
Sep-1998,21645562,21660645
|
||||
Oct-1998,21994089,21565457
|
||||
Nov-1998,21461344,21714061
|
||||
Dec-1998,25874332,21605209
|
||||
Jan-1999,18438151,22074043
|
||||
Feb-1999,17658952,22286260
|
||||
Mar-1999,21082603,22402680
|
||||
Apr-1999,22587382,22389229
|
||||
May-1999,23892100,22300484
|
||||
Jun-1999,24036828,22450487
|
||||
Jul-1999,23994614,22614164
|
||||
Aug-1999,22926469,22806183
|
||||
Sep-1999,22984278,22817165
|
||||
Oct-1999,22813633,22967565
|
||||
Nov-1999,22972959,23036527
|
||||
Dec-1999,28143999,23387176
|
||||
Jan-2000,19324692,23434451
|
||||
Feb-2000,19140440,23378327
|
||||
Mar-2000,22918829,23813646
|
||||
Apr-2000,22914155,23537859
|
||||
May-2000,25659687,23644020
|
||||
Jun-2000,25945400,23841011
|
||||
Jul-2000,24821347,24204193
|
||||
Aug-2000,25102965,24266358
|
||||
Sep-2000,24710257,24495699
|
||||
Oct-2000,23687124,24330740
|
||||
Nov-2000,24556357,24373656
|
||||
Dec-2000,29057176,24518477
|
||||
Jan-2001,20607642,24640517
|
||||
Feb-2001,19444855,24477976
|
||||
Mar-2001,23652255,24583988
|
||||
Apr-2001,24370700,24944482
|
||||
May-2001,27585889,25143416
|
||||
Jun-2001,27243919,25190078
|
||||
Jul-2001,25507932,24813831
|
||||
Aug-2001,26322941,25017925
|
||||
Sep-2001,24263969,24703734
|
||||
Oct-2001,24917747,25149772
|
||||
Nov-2001,26048646,25657476
|
||||
Dec-2001,30481412,26124705
|
||||
Jan-2002,22361219,26496729
|
||||
Feb-2002,20787209,26105990
|
||||
Mar-2002,24642692,26072633
|
||||
Apr-2002,26405170,26657565
|
||||
May-2002,29087583,26165998
|
||||
Jun-2002,28363263,26768924
|
||||
Jul-2002,27912328,26620588
|
||||
Aug-2002,28202300,26767279
|
||||
Sep-2002,26054411,26620259
|
||||
Oct-2002,27131743,27061844
|
||||
Nov-2002,27276942,27015215
|
||||
Dec-2002,31300554,27172388
|
||||
Jan-2003,23301871,27137545
|
||||
Feb-2003,21980804,27622538
|
||||
Mar-2003,25468203,27275170
|
||||
Apr-2003,27059495,27176306
|
||||
May-2003,30417563,27484615
|
||||
Jun-2003,28912102,27569750
|
||||
Jul-2003,29492832,27707861
|
||||
Aug-2003,29102135,28020755
|
||||
Sep-2003,27467571,27841344
|
||||
Oct-2003,28223631,27825024
|
||||
Nov-2003,27391422,27777531
|
||||
Dec-2003,32325789,27704978
|
||||
Jan-2004,23778728,27935993
|
||||
Feb-2004,23008594,28719948
|
||||
Mar-2004,26967793,28689514
|
||||
Apr-2004,28592026,28254086
|
||||
May-2004,30479247,28554094
|
||||
Jun-2004,30711705,28550528
|
||||
Jul-2004,30898334,28616168
|
||||
Aug-2004,29535183,28836665
|
||||
Sep-2004,29245397,29243662
|
||||
Oct-2004,29445711,29561177
|
||||
Nov-2004,29232659,29901036
|
||||
Dec-2004,34561103,29593609
|
||||
Jan-2005,24498615,29888781
|
||||
Feb-2005,24028226,30460620
|
||||
Mar-2005,28600602,30006264
|
||||
Apr-2005,30600811,29940271
|
||||
May-2005,31948565,29935878
|
||||
Jun-2005,32967426,30590992
|
||||
Jul-2005,32620077,30800241
|
||||
Aug-2005,32025283,30647393
|
||||
Sep-2005,30914826,30600008
|
||||
Oct-2005,30241532,30887481
|
||||
Nov-2005,30828069,31012789
|
||||
Dec-2005,36726743,31230056
|
||||
Jan-2006,25993203,31747679
|
||||
Feb-2006,25128165,31744450
|
||||
Mar-2006,30760061,31871717
|
||||
Apr-2006,32106585,32354405
|
||||
May-2006,34894460,32048264
|
||||
Jun-2006,35049112,32242363
|
||||
Jul-2006,34341547,33040218
|
||||
Aug-2006,35045180,33007575
|
||||
Sep-2006,33056559,32492838
|
||||
Oct-2006,31830349,32595465
|
||||
Nov-2006,32663281,32814138
|
||||
Dec-2006,38605976,33515367
|
||||
Jan-2007,27777968,33221023
|
||||
Feb-2007,26548520,33466188
|
||||
Mar-2007,32818504,33910296
|
||||
Apr-2007,33621240,34385868
|
||||
May-2007,38434319,34789277
|
||||
Jun-2007,37555708,34436278
|
||||
Jul-2007,35635889,34430726
|
||||
Aug-2007,36978090,34725483
|
||||
Sep-2007,34057842,34341226
|
||||
Oct-2007,34070363,34305870
|
||||
Nov-2007,35091406,34958995
|
||||
Dec-2007,40006665,35625285
|
||||
Jan-2008,30525699,35933156
|
||||
Feb-2008,29418898,35526909
|
||||
Mar-2008,32925876,35530142
|
||||
Apr-2008,36272111,35914167
|
||||
May-2008,39778972,36006888
|
||||
Jun-2008,37842321,36400129
|
||||
Jul-2008,38632038,36403867
|
||||
Aug-2008,37775417,36137537
|
||||
Sep-2008,36138751,36390521
|
||||
Oct-2008,36158245,35747571
|
||||
Nov-2008,34230901,34661690
|
||||
Dec-2008,38256712,33303365
|
||||
Jan-2009,29192654,33747849
|
||||
Feb-2009,26804723,33869426
|
||||
Mar-2009,31356949,33894022
|
||||
Apr-2009,33942769,33930673
|
||||
May-2009,37316515,34345160
|
||||
Jun-2009,36865690,34735113
|
||||
Jul-2009,37191480,34755444
|
||||
Aug-2009,36049418,35039739
|
||||
Sep-2009,35537357,35239195
|
||||
Oct-2009,36133694,35330869
|
||||
Nov-2009,34354756,35250896
|
||||
Dec-2009,40969765,35577384
|
||||
Jan-2010,30668321,36080033
|
||||
Feb-2010,28632551,36051781
|
||||
Mar-2010,34967182,37001843
|
||||
Apr-2010,36469949,36148495
|
||||
May-2010,38424455,36041318
|
||||
Jun-2010,38973462,36350588
|
||||
Jul-2010,38932294,36295314
|
||||
Aug-2010,37395330,36515170
|
||||
Sep-2010,36923390,36632898
|
||||
Oct-2010,37014326,36879707
|
||||
Nov-2010,37408825,37568029
|
||||
Dec-2010,43147947,37392857
|
||||
Jan-2011,31191594,37392259
|
||||
Feb-2011,29797949,37437926
|
||||
Mar-2011,36099866,37617167
|
||||
Apr-2011,38035760,37755408
|
||||
May-2011,40046516,37723958
|
||||
Jun-2011,40839556,38228307
|
||||
Jul-2011,39832282,37925826
|
||||
Aug-2011,39541248,37976798
|
||||
Sep-2011,38877263,38181654
|
||||
Oct-2011,38203872,38623692
|
||||
Nov-2011,39174736,38779553
|
||||
Dec-2011,45089701,39087795
|
||||
Jan-2012,32361808,39102435
|
||||
Feb-2012,32087072,38968001
|
||||
Mar-2012,37933733,39201228
|
||||
Apr-2012,37775805,38920526
|
||||
May-2012,42584571,38841267
|
||||
Jun-2012,41789242,38773515
|
||||
Jul-2012,40130908,38854126
|
||||
Aug-2012,41321526,38854279
|
||||
Sep-2012,39069513,39058649
|
||||
Oct-2012,39487597,39277317
|
||||
Nov-2012,40095933,39224805
|
||||
Dec-2012,43489091,39050651
|
||||
Jan-2013,33574671,39523536
|
||||
Feb-2013,31636843,39710038
|
||||
Mar-2013,37561378,39811962
|
||||
Apr-2013,39401295,39655045
|
||||
May-2013,44577490,40295930
|
||||
Jun-2013,42169145,39992542
|
||||
Jul-2013,42417829,40388278
|
||||
Aug-2013,43237460,40660890
|
||||
Sep-2013,40170270,40631164
|
||||
Oct-2013,41560987,40813306
|
||||
Nov-2013,41893714,40798569
|
||||
Dec-2013,44796794,40716614
|
||||
Jan-2014,34980327,40976155
|
||||
Feb-2014,32905708,41256280
|
||||
Mar-2014,38460091,41242344
|
||||
Apr-2014,41809373,41852467
|
||||
May-2014,46379543,41906455
|
||||
Jun-2014,44178750,42457216
|
||||
Jul-2014,45285331,42562972
|
||||
Aug-2014,44359733,42456214
|
||||
Sep-2014,43017529,42685882
|
||||
Oct-2014,43775478,42690228
|
||||
Nov-2014,42968326,42603501
|
||||
Dec-2014,46887481,42317955
|
||||
Jan-2015,34820395,40971992
|
||||
Feb-2015,33174923,41801906
|
||||
Mar-2015,39444291,42420253
|
||||
Apr-2015,42297319,42331926
|
||||
May-2015,46670930,42721761
|
||||
Jun-2015,45584849,42989280
|
||||
Jul-2015,46295664,43154020
|
||||
Aug-2015,44793347,43309509
|
||||
Sep-2015,43999627,43303889
|
||||
Oct-2015,44507776,43378904
|
||||
Nov-2015,43696305,43921767
|
||||
Dec-2015,48097829,43078048
|
||||
Jan-2016,36415115,43977584
|
||||
Feb-2016,35649450,44205540
|
||||
Mar-2016,41403762,43839996
|
||||
Apr-2016,44881587,44181416
|
||||
May-2016,47337082,44176591
|
||||
Jun-2016,47399117,44162244
|
||||
Jul-2016,46321314,44110862
|
||||
Aug-2016,46201453,44216280
|
||||
Sep-2016,45528702,44534797
|
||||
Oct-2016,44770113,45061618
|
||||
Nov-2016,46285062,45141762
|
||||
Dec-2016,50016137,44943929
|
||||
Jan-2017,37628452,45952103
|
||||
Footnotes:
|
||||
2,The total for retail trade excludes North American Industry Classification System (NAICS) 454.
|
||||
3,"This CANSIM table replaces archived table 80-0014, 80-0015 and 80-0017."
|
||||
4,"Quality indicator: Code A=Excellent. Code B=Very good. Code C=Good. Code D=Acceptable. Code E=Poor, use with caution. Code F=Unreliable (data not published)."
|
||||
5,"Data for Northwest Territories includes Nunavut, from 1991-01 to 1998-12."
|
||||
6,"In April 2013, data from 2004 onwards will be based on the 2012 North American Industry Classification System (NAICS). Data prior to 2004 will continue to be based on the 2007 North American Industry Classification System (NAICS)."
|
||||
Source:
|
||||
"Statistics Canada. Table 080-0020 - Retail trade, sales by the North American Industry Classification System (NAICS), monthly (dollars)"
|
||||
"(accessed: April 19, 2017)"
|
||||
|
Can't render this file because it contains an unexpected character in line 323 and column 3.
|
769
data/diabetes.csv
Normal file
769
data/diabetes.csv
Normal file
@@ -0,0 +1,769 @@
|
||||
Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome
|
||||
6,148,72,35,0,33.6,0.627,50,1
|
||||
1,85,66,29,0,26.6,0.351,31,0
|
||||
8,183,64,0,0,23.3,0.672,32,1
|
||||
1,89,66,23,94,28.1,0.167,21,0
|
||||
0,137,40,35,168,43.1,2.288,33,1
|
||||
5,116,74,0,0,25.6,0.201,30,0
|
||||
3,78,50,32,88,31,0.248,26,1
|
||||
10,115,0,0,0,35.3,0.134,29,0
|
||||
2,197,70,45,543,30.5,0.158,53,1
|
||||
8,125,96,0,0,0,0.232,54,1
|
||||
4,110,92,0,0,37.6,0.191,30,0
|
||||
10,168,74,0,0,38,0.537,34,1
|
||||
10,139,80,0,0,27.1,1.441,57,0
|
||||
1,189,60,23,846,30.1,0.398,59,1
|
||||
5,166,72,19,175,25.8,0.587,51,1
|
||||
7,100,0,0,0,30,0.484,32,1
|
||||
0,118,84,47,230,45.8,0.551,31,1
|
||||
7,107,74,0,0,29.6,0.254,31,1
|
||||
1,103,30,38,83,43.3,0.183,33,0
|
||||
1,115,70,30,96,34.6,0.529,32,1
|
||||
3,126,88,41,235,39.3,0.704,27,0
|
||||
8,99,84,0,0,35.4,0.388,50,0
|
||||
7,196,90,0,0,39.8,0.451,41,1
|
||||
9,119,80,35,0,29,0.263,29,1
|
||||
11,143,94,33,146,36.6,0.254,51,1
|
||||
10,125,70,26,115,31.1,0.205,41,1
|
||||
7,147,76,0,0,39.4,0.257,43,1
|
||||
1,97,66,15,140,23.2,0.487,22,0
|
||||
13,145,82,19,110,22.2,0.245,57,0
|
||||
5,117,92,0,0,34.1,0.337,38,0
|
||||
5,109,75,26,0,36,0.546,60,0
|
||||
3,158,76,36,245,31.6,0.851,28,1
|
||||
3,88,58,11,54,24.8,0.267,22,0
|
||||
6,92,92,0,0,19.9,0.188,28,0
|
||||
10,122,78,31,0,27.6,0.512,45,0
|
||||
4,103,60,33,192,24,0.966,33,0
|
||||
11,138,76,0,0,33.2,0.42,35,0
|
||||
9,102,76,37,0,32.9,0.665,46,1
|
||||
2,90,68,42,0,38.2,0.503,27,1
|
||||
4,111,72,47,207,37.1,1.39,56,1
|
||||
3,180,64,25,70,34,0.271,26,0
|
||||
7,133,84,0,0,40.2,0.696,37,0
|
||||
7,106,92,18,0,22.7,0.235,48,0
|
||||
9,171,110,24,240,45.4,0.721,54,1
|
||||
7,159,64,0,0,27.4,0.294,40,0
|
||||
0,180,66,39,0,42,1.893,25,1
|
||||
1,146,56,0,0,29.7,0.564,29,0
|
||||
2,71,70,27,0,28,0.586,22,0
|
||||
7,103,66,32,0,39.1,0.344,31,1
|
||||
7,105,0,0,0,0,0.305,24,0
|
||||
1,103,80,11,82,19.4,0.491,22,0
|
||||
1,101,50,15,36,24.2,0.526,26,0
|
||||
5,88,66,21,23,24.4,0.342,30,0
|
||||
8,176,90,34,300,33.7,0.467,58,1
|
||||
7,150,66,42,342,34.7,0.718,42,0
|
||||
1,73,50,10,0,23,0.248,21,0
|
||||
7,187,68,39,304,37.7,0.254,41,1
|
||||
0,100,88,60,110,46.8,0.962,31,0
|
||||
0,146,82,0,0,40.5,1.781,44,0
|
||||
0,105,64,41,142,41.5,0.173,22,0
|
||||
2,84,0,0,0,0,0.304,21,0
|
||||
8,133,72,0,0,32.9,0.27,39,1
|
||||
5,44,62,0,0,25,0.587,36,0
|
||||
2,141,58,34,128,25.4,0.699,24,0
|
||||
7,114,66,0,0,32.8,0.258,42,1
|
||||
5,99,74,27,0,29,0.203,32,0
|
||||
0,109,88,30,0,32.5,0.855,38,1
|
||||
2,109,92,0,0,42.7,0.845,54,0
|
||||
1,95,66,13,38,19.6,0.334,25,0
|
||||
4,146,85,27,100,28.9,0.189,27,0
|
||||
2,100,66,20,90,32.9,0.867,28,1
|
||||
5,139,64,35,140,28.6,0.411,26,0
|
||||
13,126,90,0,0,43.4,0.583,42,1
|
||||
4,129,86,20,270,35.1,0.231,23,0
|
||||
1,79,75,30,0,32,0.396,22,0
|
||||
1,0,48,20,0,24.7,0.14,22,0
|
||||
7,62,78,0,0,32.6,0.391,41,0
|
||||
5,95,72,33,0,37.7,0.37,27,0
|
||||
0,131,0,0,0,43.2,0.27,26,1
|
||||
2,112,66,22,0,25,0.307,24,0
|
||||
3,113,44,13,0,22.4,0.14,22,0
|
||||
2,74,0,0,0,0,0.102,22,0
|
||||
7,83,78,26,71,29.3,0.767,36,0
|
||||
0,101,65,28,0,24.6,0.237,22,0
|
||||
5,137,108,0,0,48.8,0.227,37,1
|
||||
2,110,74,29,125,32.4,0.698,27,0
|
||||
13,106,72,54,0,36.6,0.178,45,0
|
||||
2,100,68,25,71,38.5,0.324,26,0
|
||||
15,136,70,32,110,37.1,0.153,43,1
|
||||
1,107,68,19,0,26.5,0.165,24,0
|
||||
1,80,55,0,0,19.1,0.258,21,0
|
||||
4,123,80,15,176,32,0.443,34,0
|
||||
7,81,78,40,48,46.7,0.261,42,0
|
||||
4,134,72,0,0,23.8,0.277,60,1
|
||||
2,142,82,18,64,24.7,0.761,21,0
|
||||
6,144,72,27,228,33.9,0.255,40,0
|
||||
2,92,62,28,0,31.6,0.13,24,0
|
||||
1,71,48,18,76,20.4,0.323,22,0
|
||||
6,93,50,30,64,28.7,0.356,23,0
|
||||
1,122,90,51,220,49.7,0.325,31,1
|
||||
1,163,72,0,0,39,1.222,33,1
|
||||
1,151,60,0,0,26.1,0.179,22,0
|
||||
0,125,96,0,0,22.5,0.262,21,0
|
||||
1,81,72,18,40,26.6,0.283,24,0
|
||||
2,85,65,0,0,39.6,0.93,27,0
|
||||
1,126,56,29,152,28.7,0.801,21,0
|
||||
1,96,122,0,0,22.4,0.207,27,0
|
||||
4,144,58,28,140,29.5,0.287,37,0
|
||||
3,83,58,31,18,34.3,0.336,25,0
|
||||
0,95,85,25,36,37.4,0.247,24,1
|
||||
3,171,72,33,135,33.3,0.199,24,1
|
||||
8,155,62,26,495,34,0.543,46,1
|
||||
1,89,76,34,37,31.2,0.192,23,0
|
||||
4,76,62,0,0,34,0.391,25,0
|
||||
7,160,54,32,175,30.5,0.588,39,1
|
||||
4,146,92,0,0,31.2,0.539,61,1
|
||||
5,124,74,0,0,34,0.22,38,1
|
||||
5,78,48,0,0,33.7,0.654,25,0
|
||||
4,97,60,23,0,28.2,0.443,22,0
|
||||
4,99,76,15,51,23.2,0.223,21,0
|
||||
0,162,76,56,100,53.2,0.759,25,1
|
||||
6,111,64,39,0,34.2,0.26,24,0
|
||||
2,107,74,30,100,33.6,0.404,23,0
|
||||
5,132,80,0,0,26.8,0.186,69,0
|
||||
0,113,76,0,0,33.3,0.278,23,1
|
||||
1,88,30,42,99,55,0.496,26,1
|
||||
3,120,70,30,135,42.9,0.452,30,0
|
||||
1,118,58,36,94,33.3,0.261,23,0
|
||||
1,117,88,24,145,34.5,0.403,40,1
|
||||
0,105,84,0,0,27.9,0.741,62,1
|
||||
4,173,70,14,168,29.7,0.361,33,1
|
||||
9,122,56,0,0,33.3,1.114,33,1
|
||||
3,170,64,37,225,34.5,0.356,30,1
|
||||
8,84,74,31,0,38.3,0.457,39,0
|
||||
2,96,68,13,49,21.1,0.647,26,0
|
||||
2,125,60,20,140,33.8,0.088,31,0
|
||||
0,100,70,26,50,30.8,0.597,21,0
|
||||
0,93,60,25,92,28.7,0.532,22,0
|
||||
0,129,80,0,0,31.2,0.703,29,0
|
||||
5,105,72,29,325,36.9,0.159,28,0
|
||||
3,128,78,0,0,21.1,0.268,55,0
|
||||
5,106,82,30,0,39.5,0.286,38,0
|
||||
2,108,52,26,63,32.5,0.318,22,0
|
||||
10,108,66,0,0,32.4,0.272,42,1
|
||||
4,154,62,31,284,32.8,0.237,23,0
|
||||
0,102,75,23,0,0,0.572,21,0
|
||||
9,57,80,37,0,32.8,0.096,41,0
|
||||
2,106,64,35,119,30.5,1.4,34,0
|
||||
5,147,78,0,0,33.7,0.218,65,0
|
||||
2,90,70,17,0,27.3,0.085,22,0
|
||||
1,136,74,50,204,37.4,0.399,24,0
|
||||
4,114,65,0,0,21.9,0.432,37,0
|
||||
9,156,86,28,155,34.3,1.189,42,1
|
||||
1,153,82,42,485,40.6,0.687,23,0
|
||||
8,188,78,0,0,47.9,0.137,43,1
|
||||
7,152,88,44,0,50,0.337,36,1
|
||||
2,99,52,15,94,24.6,0.637,21,0
|
||||
1,109,56,21,135,25.2,0.833,23,0
|
||||
2,88,74,19,53,29,0.229,22,0
|
||||
17,163,72,41,114,40.9,0.817,47,1
|
||||
4,151,90,38,0,29.7,0.294,36,0
|
||||
7,102,74,40,105,37.2,0.204,45,0
|
||||
0,114,80,34,285,44.2,0.167,27,0
|
||||
2,100,64,23,0,29.7,0.368,21,0
|
||||
0,131,88,0,0,31.6,0.743,32,1
|
||||
6,104,74,18,156,29.9,0.722,41,1
|
||||
3,148,66,25,0,32.5,0.256,22,0
|
||||
4,120,68,0,0,29.6,0.709,34,0
|
||||
4,110,66,0,0,31.9,0.471,29,0
|
||||
3,111,90,12,78,28.4,0.495,29,0
|
||||
6,102,82,0,0,30.8,0.18,36,1
|
||||
6,134,70,23,130,35.4,0.542,29,1
|
||||
2,87,0,23,0,28.9,0.773,25,0
|
||||
1,79,60,42,48,43.5,0.678,23,0
|
||||
2,75,64,24,55,29.7,0.37,33,0
|
||||
8,179,72,42,130,32.7,0.719,36,1
|
||||
6,85,78,0,0,31.2,0.382,42,0
|
||||
0,129,110,46,130,67.1,0.319,26,1
|
||||
5,143,78,0,0,45,0.19,47,0
|
||||
5,130,82,0,0,39.1,0.956,37,1
|
||||
6,87,80,0,0,23.2,0.084,32,0
|
||||
0,119,64,18,92,34.9,0.725,23,0
|
||||
1,0,74,20,23,27.7,0.299,21,0
|
||||
5,73,60,0,0,26.8,0.268,27,0
|
||||
4,141,74,0,0,27.6,0.244,40,0
|
||||
7,194,68,28,0,35.9,0.745,41,1
|
||||
8,181,68,36,495,30.1,0.615,60,1
|
||||
1,128,98,41,58,32,1.321,33,1
|
||||
8,109,76,39,114,27.9,0.64,31,1
|
||||
5,139,80,35,160,31.6,0.361,25,1
|
||||
3,111,62,0,0,22.6,0.142,21,0
|
||||
9,123,70,44,94,33.1,0.374,40,0
|
||||
7,159,66,0,0,30.4,0.383,36,1
|
||||
11,135,0,0,0,52.3,0.578,40,1
|
||||
8,85,55,20,0,24.4,0.136,42,0
|
||||
5,158,84,41,210,39.4,0.395,29,1
|
||||
1,105,58,0,0,24.3,0.187,21,0
|
||||
3,107,62,13,48,22.9,0.678,23,1
|
||||
4,109,64,44,99,34.8,0.905,26,1
|
||||
4,148,60,27,318,30.9,0.15,29,1
|
||||
0,113,80,16,0,31,0.874,21,0
|
||||
1,138,82,0,0,40.1,0.236,28,0
|
||||
0,108,68,20,0,27.3,0.787,32,0
|
||||
2,99,70,16,44,20.4,0.235,27,0
|
||||
6,103,72,32,190,37.7,0.324,55,0
|
||||
5,111,72,28,0,23.9,0.407,27,0
|
||||
8,196,76,29,280,37.5,0.605,57,1
|
||||
5,162,104,0,0,37.7,0.151,52,1
|
||||
1,96,64,27,87,33.2,0.289,21,0
|
||||
7,184,84,33,0,35.5,0.355,41,1
|
||||
2,81,60,22,0,27.7,0.29,25,0
|
||||
0,147,85,54,0,42.8,0.375,24,0
|
||||
7,179,95,31,0,34.2,0.164,60,0
|
||||
0,140,65,26,130,42.6,0.431,24,1
|
||||
9,112,82,32,175,34.2,0.26,36,1
|
||||
12,151,70,40,271,41.8,0.742,38,1
|
||||
5,109,62,41,129,35.8,0.514,25,1
|
||||
6,125,68,30,120,30,0.464,32,0
|
||||
5,85,74,22,0,29,1.224,32,1
|
||||
5,112,66,0,0,37.8,0.261,41,1
|
||||
0,177,60,29,478,34.6,1.072,21,1
|
||||
2,158,90,0,0,31.6,0.805,66,1
|
||||
7,119,0,0,0,25.2,0.209,37,0
|
||||
7,142,60,33,190,28.8,0.687,61,0
|
||||
1,100,66,15,56,23.6,0.666,26,0
|
||||
1,87,78,27,32,34.6,0.101,22,0
|
||||
0,101,76,0,0,35.7,0.198,26,0
|
||||
3,162,52,38,0,37.2,0.652,24,1
|
||||
4,197,70,39,744,36.7,2.329,31,0
|
||||
0,117,80,31,53,45.2,0.089,24,0
|
||||
4,142,86,0,0,44,0.645,22,1
|
||||
6,134,80,37,370,46.2,0.238,46,1
|
||||
1,79,80,25,37,25.4,0.583,22,0
|
||||
4,122,68,0,0,35,0.394,29,0
|
||||
3,74,68,28,45,29.7,0.293,23,0
|
||||
4,171,72,0,0,43.6,0.479,26,1
|
||||
7,181,84,21,192,35.9,0.586,51,1
|
||||
0,179,90,27,0,44.1,0.686,23,1
|
||||
9,164,84,21,0,30.8,0.831,32,1
|
||||
0,104,76,0,0,18.4,0.582,27,0
|
||||
1,91,64,24,0,29.2,0.192,21,0
|
||||
4,91,70,32,88,33.1,0.446,22,0
|
||||
3,139,54,0,0,25.6,0.402,22,1
|
||||
6,119,50,22,176,27.1,1.318,33,1
|
||||
2,146,76,35,194,38.2,0.329,29,0
|
||||
9,184,85,15,0,30,1.213,49,1
|
||||
10,122,68,0,0,31.2,0.258,41,0
|
||||
0,165,90,33,680,52.3,0.427,23,0
|
||||
9,124,70,33,402,35.4,0.282,34,0
|
||||
1,111,86,19,0,30.1,0.143,23,0
|
||||
9,106,52,0,0,31.2,0.38,42,0
|
||||
2,129,84,0,0,28,0.284,27,0
|
||||
2,90,80,14,55,24.4,0.249,24,0
|
||||
0,86,68,32,0,35.8,0.238,25,0
|
||||
12,92,62,7,258,27.6,0.926,44,1
|
||||
1,113,64,35,0,33.6,0.543,21,1
|
||||
3,111,56,39,0,30.1,0.557,30,0
|
||||
2,114,68,22,0,28.7,0.092,25,0
|
||||
1,193,50,16,375,25.9,0.655,24,0
|
||||
11,155,76,28,150,33.3,1.353,51,1
|
||||
3,191,68,15,130,30.9,0.299,34,0
|
||||
3,141,0,0,0,30,0.761,27,1
|
||||
4,95,70,32,0,32.1,0.612,24,0
|
||||
3,142,80,15,0,32.4,0.2,63,0
|
||||
4,123,62,0,0,32,0.226,35,1
|
||||
5,96,74,18,67,33.6,0.997,43,0
|
||||
0,138,0,0,0,36.3,0.933,25,1
|
||||
2,128,64,42,0,40,1.101,24,0
|
||||
0,102,52,0,0,25.1,0.078,21,0
|
||||
2,146,0,0,0,27.5,0.24,28,1
|
||||
10,101,86,37,0,45.6,1.136,38,1
|
||||
2,108,62,32,56,25.2,0.128,21,0
|
||||
3,122,78,0,0,23,0.254,40,0
|
||||
1,71,78,50,45,33.2,0.422,21,0
|
||||
13,106,70,0,0,34.2,0.251,52,0
|
||||
2,100,70,52,57,40.5,0.677,25,0
|
||||
7,106,60,24,0,26.5,0.296,29,1
|
||||
0,104,64,23,116,27.8,0.454,23,0
|
||||
5,114,74,0,0,24.9,0.744,57,0
|
||||
2,108,62,10,278,25.3,0.881,22,0
|
||||
0,146,70,0,0,37.9,0.334,28,1
|
||||
10,129,76,28,122,35.9,0.28,39,0
|
||||
7,133,88,15,155,32.4,0.262,37,0
|
||||
7,161,86,0,0,30.4,0.165,47,1
|
||||
2,108,80,0,0,27,0.259,52,1
|
||||
7,136,74,26,135,26,0.647,51,0
|
||||
5,155,84,44,545,38.7,0.619,34,0
|
||||
1,119,86,39,220,45.6,0.808,29,1
|
||||
4,96,56,17,49,20.8,0.34,26,0
|
||||
5,108,72,43,75,36.1,0.263,33,0
|
||||
0,78,88,29,40,36.9,0.434,21,0
|
||||
0,107,62,30,74,36.6,0.757,25,1
|
||||
2,128,78,37,182,43.3,1.224,31,1
|
||||
1,128,48,45,194,40.5,0.613,24,1
|
||||
0,161,50,0,0,21.9,0.254,65,0
|
||||
6,151,62,31,120,35.5,0.692,28,0
|
||||
2,146,70,38,360,28,0.337,29,1
|
||||
0,126,84,29,215,30.7,0.52,24,0
|
||||
14,100,78,25,184,36.6,0.412,46,1
|
||||
8,112,72,0,0,23.6,0.84,58,0
|
||||
0,167,0,0,0,32.3,0.839,30,1
|
||||
2,144,58,33,135,31.6,0.422,25,1
|
||||
5,77,82,41,42,35.8,0.156,35,0
|
||||
5,115,98,0,0,52.9,0.209,28,1
|
||||
3,150,76,0,0,21,0.207,37,0
|
||||
2,120,76,37,105,39.7,0.215,29,0
|
||||
10,161,68,23,132,25.5,0.326,47,1
|
||||
0,137,68,14,148,24.8,0.143,21,0
|
||||
0,128,68,19,180,30.5,1.391,25,1
|
||||
2,124,68,28,205,32.9,0.875,30,1
|
||||
6,80,66,30,0,26.2,0.313,41,0
|
||||
0,106,70,37,148,39.4,0.605,22,0
|
||||
2,155,74,17,96,26.6,0.433,27,1
|
||||
3,113,50,10,85,29.5,0.626,25,0
|
||||
7,109,80,31,0,35.9,1.127,43,1
|
||||
2,112,68,22,94,34.1,0.315,26,0
|
||||
3,99,80,11,64,19.3,0.284,30,0
|
||||
3,182,74,0,0,30.5,0.345,29,1
|
||||
3,115,66,39,140,38.1,0.15,28,0
|
||||
6,194,78,0,0,23.5,0.129,59,1
|
||||
4,129,60,12,231,27.5,0.527,31,0
|
||||
3,112,74,30,0,31.6,0.197,25,1
|
||||
0,124,70,20,0,27.4,0.254,36,1
|
||||
13,152,90,33,29,26.8,0.731,43,1
|
||||
2,112,75,32,0,35.7,0.148,21,0
|
||||
1,157,72,21,168,25.6,0.123,24,0
|
||||
1,122,64,32,156,35.1,0.692,30,1
|
||||
10,179,70,0,0,35.1,0.2,37,0
|
||||
2,102,86,36,120,45.5,0.127,23,1
|
||||
6,105,70,32,68,30.8,0.122,37,0
|
||||
8,118,72,19,0,23.1,1.476,46,0
|
||||
2,87,58,16,52,32.7,0.166,25,0
|
||||
1,180,0,0,0,43.3,0.282,41,1
|
||||
12,106,80,0,0,23.6,0.137,44,0
|
||||
1,95,60,18,58,23.9,0.26,22,0
|
||||
0,165,76,43,255,47.9,0.259,26,0
|
||||
0,117,0,0,0,33.8,0.932,44,0
|
||||
5,115,76,0,0,31.2,0.343,44,1
|
||||
9,152,78,34,171,34.2,0.893,33,1
|
||||
7,178,84,0,0,39.9,0.331,41,1
|
||||
1,130,70,13,105,25.9,0.472,22,0
|
||||
1,95,74,21,73,25.9,0.673,36,0
|
||||
1,0,68,35,0,32,0.389,22,0
|
||||
5,122,86,0,0,34.7,0.29,33,0
|
||||
8,95,72,0,0,36.8,0.485,57,0
|
||||
8,126,88,36,108,38.5,0.349,49,0
|
||||
1,139,46,19,83,28.7,0.654,22,0
|
||||
3,116,0,0,0,23.5,0.187,23,0
|
||||
3,99,62,19,74,21.8,0.279,26,0
|
||||
5,0,80,32,0,41,0.346,37,1
|
||||
4,92,80,0,0,42.2,0.237,29,0
|
||||
4,137,84,0,0,31.2,0.252,30,0
|
||||
3,61,82,28,0,34.4,0.243,46,0
|
||||
1,90,62,12,43,27.2,0.58,24,0
|
||||
3,90,78,0,0,42.7,0.559,21,0
|
||||
9,165,88,0,0,30.4,0.302,49,1
|
||||
1,125,50,40,167,33.3,0.962,28,1
|
||||
13,129,0,30,0,39.9,0.569,44,1
|
||||
12,88,74,40,54,35.3,0.378,48,0
|
||||
1,196,76,36,249,36.5,0.875,29,1
|
||||
5,189,64,33,325,31.2,0.583,29,1
|
||||
5,158,70,0,0,29.8,0.207,63,0
|
||||
5,103,108,37,0,39.2,0.305,65,0
|
||||
4,146,78,0,0,38.5,0.52,67,1
|
||||
4,147,74,25,293,34.9,0.385,30,0
|
||||
5,99,54,28,83,34,0.499,30,0
|
||||
6,124,72,0,0,27.6,0.368,29,1
|
||||
0,101,64,17,0,21,0.252,21,0
|
||||
3,81,86,16,66,27.5,0.306,22,0
|
||||
1,133,102,28,140,32.8,0.234,45,1
|
||||
3,173,82,48,465,38.4,2.137,25,1
|
||||
0,118,64,23,89,0,1.731,21,0
|
||||
0,84,64,22,66,35.8,0.545,21,0
|
||||
2,105,58,40,94,34.9,0.225,25,0
|
||||
2,122,52,43,158,36.2,0.816,28,0
|
||||
12,140,82,43,325,39.2,0.528,58,1
|
||||
0,98,82,15,84,25.2,0.299,22,0
|
||||
1,87,60,37,75,37.2,0.509,22,0
|
||||
4,156,75,0,0,48.3,0.238,32,1
|
||||
0,93,100,39,72,43.4,1.021,35,0
|
||||
1,107,72,30,82,30.8,0.821,24,0
|
||||
0,105,68,22,0,20,0.236,22,0
|
||||
1,109,60,8,182,25.4,0.947,21,0
|
||||
1,90,62,18,59,25.1,1.268,25,0
|
||||
1,125,70,24,110,24.3,0.221,25,0
|
||||
1,119,54,13,50,22.3,0.205,24,0
|
||||
5,116,74,29,0,32.3,0.66,35,1
|
||||
8,105,100,36,0,43.3,0.239,45,1
|
||||
5,144,82,26,285,32,0.452,58,1
|
||||
3,100,68,23,81,31.6,0.949,28,0
|
||||
1,100,66,29,196,32,0.444,42,0
|
||||
5,166,76,0,0,45.7,0.34,27,1
|
||||
1,131,64,14,415,23.7,0.389,21,0
|
||||
4,116,72,12,87,22.1,0.463,37,0
|
||||
4,158,78,0,0,32.9,0.803,31,1
|
||||
2,127,58,24,275,27.7,1.6,25,0
|
||||
3,96,56,34,115,24.7,0.944,39,0
|
||||
0,131,66,40,0,34.3,0.196,22,1
|
||||
3,82,70,0,0,21.1,0.389,25,0
|
||||
3,193,70,31,0,34.9,0.241,25,1
|
||||
4,95,64,0,0,32,0.161,31,1
|
||||
6,137,61,0,0,24.2,0.151,55,0
|
||||
5,136,84,41,88,35,0.286,35,1
|
||||
9,72,78,25,0,31.6,0.28,38,0
|
||||
5,168,64,0,0,32.9,0.135,41,1
|
||||
2,123,48,32,165,42.1,0.52,26,0
|
||||
4,115,72,0,0,28.9,0.376,46,1
|
||||
0,101,62,0,0,21.9,0.336,25,0
|
||||
8,197,74,0,0,25.9,1.191,39,1
|
||||
1,172,68,49,579,42.4,0.702,28,1
|
||||
6,102,90,39,0,35.7,0.674,28,0
|
||||
1,112,72,30,176,34.4,0.528,25,0
|
||||
1,143,84,23,310,42.4,1.076,22,0
|
||||
1,143,74,22,61,26.2,0.256,21,0
|
||||
0,138,60,35,167,34.6,0.534,21,1
|
||||
3,173,84,33,474,35.7,0.258,22,1
|
||||
1,97,68,21,0,27.2,1.095,22,0
|
||||
4,144,82,32,0,38.5,0.554,37,1
|
||||
1,83,68,0,0,18.2,0.624,27,0
|
||||
3,129,64,29,115,26.4,0.219,28,1
|
||||
1,119,88,41,170,45.3,0.507,26,0
|
||||
2,94,68,18,76,26,0.561,21,0
|
||||
0,102,64,46,78,40.6,0.496,21,0
|
||||
2,115,64,22,0,30.8,0.421,21,0
|
||||
8,151,78,32,210,42.9,0.516,36,1
|
||||
4,184,78,39,277,37,0.264,31,1
|
||||
0,94,0,0,0,0,0.256,25,0
|
||||
1,181,64,30,180,34.1,0.328,38,1
|
||||
0,135,94,46,145,40.6,0.284,26,0
|
||||
1,95,82,25,180,35,0.233,43,1
|
||||
2,99,0,0,0,22.2,0.108,23,0
|
||||
3,89,74,16,85,30.4,0.551,38,0
|
||||
1,80,74,11,60,30,0.527,22,0
|
||||
2,139,75,0,0,25.6,0.167,29,0
|
||||
1,90,68,8,0,24.5,1.138,36,0
|
||||
0,141,0,0,0,42.4,0.205,29,1
|
||||
12,140,85,33,0,37.4,0.244,41,0
|
||||
5,147,75,0,0,29.9,0.434,28,0
|
||||
1,97,70,15,0,18.2,0.147,21,0
|
||||
6,107,88,0,0,36.8,0.727,31,0
|
||||
0,189,104,25,0,34.3,0.435,41,1
|
||||
2,83,66,23,50,32.2,0.497,22,0
|
||||
4,117,64,27,120,33.2,0.23,24,0
|
||||
8,108,70,0,0,30.5,0.955,33,1
|
||||
4,117,62,12,0,29.7,0.38,30,1
|
||||
0,180,78,63,14,59.4,2.42,25,1
|
||||
1,100,72,12,70,25.3,0.658,28,0
|
||||
0,95,80,45,92,36.5,0.33,26,0
|
||||
0,104,64,37,64,33.6,0.51,22,1
|
||||
0,120,74,18,63,30.5,0.285,26,0
|
||||
1,82,64,13,95,21.2,0.415,23,0
|
||||
2,134,70,0,0,28.9,0.542,23,1
|
||||
0,91,68,32,210,39.9,0.381,25,0
|
||||
2,119,0,0,0,19.6,0.832,72,0
|
||||
2,100,54,28,105,37.8,0.498,24,0
|
||||
14,175,62,30,0,33.6,0.212,38,1
|
||||
1,135,54,0,0,26.7,0.687,62,0
|
||||
5,86,68,28,71,30.2,0.364,24,0
|
||||
10,148,84,48,237,37.6,1.001,51,1
|
||||
9,134,74,33,60,25.9,0.46,81,0
|
||||
9,120,72,22,56,20.8,0.733,48,0
|
||||
1,71,62,0,0,21.8,0.416,26,0
|
||||
8,74,70,40,49,35.3,0.705,39,0
|
||||
5,88,78,30,0,27.6,0.258,37,0
|
||||
10,115,98,0,0,24,1.022,34,0
|
||||
0,124,56,13,105,21.8,0.452,21,0
|
||||
0,74,52,10,36,27.8,0.269,22,0
|
||||
0,97,64,36,100,36.8,0.6,25,0
|
||||
8,120,0,0,0,30,0.183,38,1
|
||||
6,154,78,41,140,46.1,0.571,27,0
|
||||
1,144,82,40,0,41.3,0.607,28,0
|
||||
0,137,70,38,0,33.2,0.17,22,0
|
||||
0,119,66,27,0,38.8,0.259,22,0
|
||||
7,136,90,0,0,29.9,0.21,50,0
|
||||
4,114,64,0,0,28.9,0.126,24,0
|
||||
0,137,84,27,0,27.3,0.231,59,0
|
||||
2,105,80,45,191,33.7,0.711,29,1
|
||||
7,114,76,17,110,23.8,0.466,31,0
|
||||
8,126,74,38,75,25.9,0.162,39,0
|
||||
4,132,86,31,0,28,0.419,63,0
|
||||
3,158,70,30,328,35.5,0.344,35,1
|
||||
0,123,88,37,0,35.2,0.197,29,0
|
||||
4,85,58,22,49,27.8,0.306,28,0
|
||||
0,84,82,31,125,38.2,0.233,23,0
|
||||
0,145,0,0,0,44.2,0.63,31,1
|
||||
0,135,68,42,250,42.3,0.365,24,1
|
||||
1,139,62,41,480,40.7,0.536,21,0
|
||||
0,173,78,32,265,46.5,1.159,58,0
|
||||
4,99,72,17,0,25.6,0.294,28,0
|
||||
8,194,80,0,0,26.1,0.551,67,0
|
||||
2,83,65,28,66,36.8,0.629,24,0
|
||||
2,89,90,30,0,33.5,0.292,42,0
|
||||
4,99,68,38,0,32.8,0.145,33,0
|
||||
4,125,70,18,122,28.9,1.144,45,1
|
||||
3,80,0,0,0,0,0.174,22,0
|
||||
6,166,74,0,0,26.6,0.304,66,0
|
||||
5,110,68,0,0,26,0.292,30,0
|
||||
2,81,72,15,76,30.1,0.547,25,0
|
||||
7,195,70,33,145,25.1,0.163,55,1
|
||||
6,154,74,32,193,29.3,0.839,39,0
|
||||
2,117,90,19,71,25.2,0.313,21,0
|
||||
3,84,72,32,0,37.2,0.267,28,0
|
||||
6,0,68,41,0,39,0.727,41,1
|
||||
7,94,64,25,79,33.3,0.738,41,0
|
||||
3,96,78,39,0,37.3,0.238,40,0
|
||||
10,75,82,0,0,33.3,0.263,38,0
|
||||
0,180,90,26,90,36.5,0.314,35,1
|
||||
1,130,60,23,170,28.6,0.692,21,0
|
||||
2,84,50,23,76,30.4,0.968,21,0
|
||||
8,120,78,0,0,25,0.409,64,0
|
||||
12,84,72,31,0,29.7,0.297,46,1
|
||||
0,139,62,17,210,22.1,0.207,21,0
|
||||
9,91,68,0,0,24.2,0.2,58,0
|
||||
2,91,62,0,0,27.3,0.525,22,0
|
||||
3,99,54,19,86,25.6,0.154,24,0
|
||||
3,163,70,18,105,31.6,0.268,28,1
|
||||
9,145,88,34,165,30.3,0.771,53,1
|
||||
7,125,86,0,0,37.6,0.304,51,0
|
||||
13,76,60,0,0,32.8,0.18,41,0
|
||||
6,129,90,7,326,19.6,0.582,60,0
|
||||
2,68,70,32,66,25,0.187,25,0
|
||||
3,124,80,33,130,33.2,0.305,26,0
|
||||
6,114,0,0,0,0,0.189,26,0
|
||||
9,130,70,0,0,34.2,0.652,45,1
|
||||
3,125,58,0,0,31.6,0.151,24,0
|
||||
3,87,60,18,0,21.8,0.444,21,0
|
||||
1,97,64,19,82,18.2,0.299,21,0
|
||||
3,116,74,15,105,26.3,0.107,24,0
|
||||
0,117,66,31,188,30.8,0.493,22,0
|
||||
0,111,65,0,0,24.6,0.66,31,0
|
||||
2,122,60,18,106,29.8,0.717,22,0
|
||||
0,107,76,0,0,45.3,0.686,24,0
|
||||
1,86,66,52,65,41.3,0.917,29,0
|
||||
6,91,0,0,0,29.8,0.501,31,0
|
||||
1,77,56,30,56,33.3,1.251,24,0
|
||||
4,132,0,0,0,32.9,0.302,23,1
|
||||
0,105,90,0,0,29.6,0.197,46,0
|
||||
0,57,60,0,0,21.7,0.735,67,0
|
||||
0,127,80,37,210,36.3,0.804,23,0
|
||||
3,129,92,49,155,36.4,0.968,32,1
|
||||
8,100,74,40,215,39.4,0.661,43,1
|
||||
3,128,72,25,190,32.4,0.549,27,1
|
||||
10,90,85,32,0,34.9,0.825,56,1
|
||||
4,84,90,23,56,39.5,0.159,25,0
|
||||
1,88,78,29,76,32,0.365,29,0
|
||||
8,186,90,35,225,34.5,0.423,37,1
|
||||
5,187,76,27,207,43.6,1.034,53,1
|
||||
4,131,68,21,166,33.1,0.16,28,0
|
||||
1,164,82,43,67,32.8,0.341,50,0
|
||||
4,189,110,31,0,28.5,0.68,37,0
|
||||
1,116,70,28,0,27.4,0.204,21,0
|
||||
3,84,68,30,106,31.9,0.591,25,0
|
||||
6,114,88,0,0,27.8,0.247,66,0
|
||||
1,88,62,24,44,29.9,0.422,23,0
|
||||
1,84,64,23,115,36.9,0.471,28,0
|
||||
7,124,70,33,215,25.5,0.161,37,0
|
||||
1,97,70,40,0,38.1,0.218,30,0
|
||||
8,110,76,0,0,27.8,0.237,58,0
|
||||
11,103,68,40,0,46.2,0.126,42,0
|
||||
11,85,74,0,0,30.1,0.3,35,0
|
||||
6,125,76,0,0,33.8,0.121,54,1
|
||||
0,198,66,32,274,41.3,0.502,28,1
|
||||
1,87,68,34,77,37.6,0.401,24,0
|
||||
6,99,60,19,54,26.9,0.497,32,0
|
||||
0,91,80,0,0,32.4,0.601,27,0
|
||||
2,95,54,14,88,26.1,0.748,22,0
|
||||
1,99,72,30,18,38.6,0.412,21,0
|
||||
6,92,62,32,126,32,0.085,46,0
|
||||
4,154,72,29,126,31.3,0.338,37,0
|
||||
0,121,66,30,165,34.3,0.203,33,1
|
||||
3,78,70,0,0,32.5,0.27,39,0
|
||||
2,130,96,0,0,22.6,0.268,21,0
|
||||
3,111,58,31,44,29.5,0.43,22,0
|
||||
2,98,60,17,120,34.7,0.198,22,0
|
||||
1,143,86,30,330,30.1,0.892,23,0
|
||||
1,119,44,47,63,35.5,0.28,25,0
|
||||
6,108,44,20,130,24,0.813,35,0
|
||||
2,118,80,0,0,42.9,0.693,21,1
|
||||
10,133,68,0,0,27,0.245,36,0
|
||||
2,197,70,99,0,34.7,0.575,62,1
|
||||
0,151,90,46,0,42.1,0.371,21,1
|
||||
6,109,60,27,0,25,0.206,27,0
|
||||
12,121,78,17,0,26.5,0.259,62,0
|
||||
8,100,76,0,0,38.7,0.19,42,0
|
||||
8,124,76,24,600,28.7,0.687,52,1
|
||||
1,93,56,11,0,22.5,0.417,22,0
|
||||
8,143,66,0,0,34.9,0.129,41,1
|
||||
6,103,66,0,0,24.3,0.249,29,0
|
||||
3,176,86,27,156,33.3,1.154,52,1
|
||||
0,73,0,0,0,21.1,0.342,25,0
|
||||
11,111,84,40,0,46.8,0.925,45,1
|
||||
2,112,78,50,140,39.4,0.175,24,0
|
||||
3,132,80,0,0,34.4,0.402,44,1
|
||||
2,82,52,22,115,28.5,1.699,25,0
|
||||
6,123,72,45,230,33.6,0.733,34,0
|
||||
0,188,82,14,185,32,0.682,22,1
|
||||
0,67,76,0,0,45.3,0.194,46,0
|
||||
1,89,24,19,25,27.8,0.559,21,0
|
||||
1,173,74,0,0,36.8,0.088,38,1
|
||||
1,109,38,18,120,23.1,0.407,26,0
|
||||
1,108,88,19,0,27.1,0.4,24,0
|
||||
6,96,0,0,0,23.7,0.19,28,0
|
||||
1,124,74,36,0,27.8,0.1,30,0
|
||||
7,150,78,29,126,35.2,0.692,54,1
|
||||
4,183,0,0,0,28.4,0.212,36,1
|
||||
1,124,60,32,0,35.8,0.514,21,0
|
||||
1,181,78,42,293,40,1.258,22,1
|
||||
1,92,62,25,41,19.5,0.482,25,0
|
||||
0,152,82,39,272,41.5,0.27,27,0
|
||||
1,111,62,13,182,24,0.138,23,0
|
||||
3,106,54,21,158,30.9,0.292,24,0
|
||||
3,174,58,22,194,32.9,0.593,36,1
|
||||
7,168,88,42,321,38.2,0.787,40,1
|
||||
6,105,80,28,0,32.5,0.878,26,0
|
||||
11,138,74,26,144,36.1,0.557,50,1
|
||||
3,106,72,0,0,25.8,0.207,27,0
|
||||
6,117,96,0,0,28.7,0.157,30,0
|
||||
2,68,62,13,15,20.1,0.257,23,0
|
||||
9,112,82,24,0,28.2,1.282,50,1
|
||||
0,119,0,0,0,32.4,0.141,24,1
|
||||
2,112,86,42,160,38.4,0.246,28,0
|
||||
2,92,76,20,0,24.2,1.698,28,0
|
||||
6,183,94,0,0,40.8,1.461,45,0
|
||||
0,94,70,27,115,43.5,0.347,21,0
|
||||
2,108,64,0,0,30.8,0.158,21,0
|
||||
4,90,88,47,54,37.7,0.362,29,0
|
||||
0,125,68,0,0,24.7,0.206,21,0
|
||||
0,132,78,0,0,32.4,0.393,21,0
|
||||
5,128,80,0,0,34.6,0.144,45,0
|
||||
4,94,65,22,0,24.7,0.148,21,0
|
||||
7,114,64,0,0,27.4,0.732,34,1
|
||||
0,102,78,40,90,34.5,0.238,24,0
|
||||
2,111,60,0,0,26.2,0.343,23,0
|
||||
1,128,82,17,183,27.5,0.115,22,0
|
||||
10,92,62,0,0,25.9,0.167,31,0
|
||||
13,104,72,0,0,31.2,0.465,38,1
|
||||
5,104,74,0,0,28.8,0.153,48,0
|
||||
2,94,76,18,66,31.6,0.649,23,0
|
||||
7,97,76,32,91,40.9,0.871,32,1
|
||||
1,100,74,12,46,19.5,0.149,28,0
|
||||
0,102,86,17,105,29.3,0.695,27,0
|
||||
4,128,70,0,0,34.3,0.303,24,0
|
||||
6,147,80,0,0,29.5,0.178,50,1
|
||||
4,90,0,0,0,28,0.61,31,0
|
||||
3,103,72,30,152,27.6,0.73,27,0
|
||||
2,157,74,35,440,39.4,0.134,30,0
|
||||
1,167,74,17,144,23.4,0.447,33,1
|
||||
0,179,50,36,159,37.8,0.455,22,1
|
||||
11,136,84,35,130,28.3,0.26,42,1
|
||||
0,107,60,25,0,26.4,0.133,23,0
|
||||
1,91,54,25,100,25.2,0.234,23,0
|
||||
1,117,60,23,106,33.8,0.466,27,0
|
||||
5,123,74,40,77,34.1,0.269,28,0
|
||||
2,120,54,0,0,26.8,0.455,27,0
|
||||
1,106,70,28,135,34.2,0.142,22,0
|
||||
2,155,52,27,540,38.7,0.24,25,1
|
||||
2,101,58,35,90,21.8,0.155,22,0
|
||||
1,120,80,48,200,38.9,1.162,41,0
|
||||
11,127,106,0,0,39,0.19,51,0
|
||||
3,80,82,31,70,34.2,1.292,27,1
|
||||
10,162,84,0,0,27.7,0.182,54,0
|
||||
1,199,76,43,0,42.9,1.394,22,1
|
||||
8,167,106,46,231,37.6,0.165,43,1
|
||||
9,145,80,46,130,37.9,0.637,40,1
|
||||
6,115,60,39,0,33.7,0.245,40,1
|
||||
1,112,80,45,132,34.8,0.217,24,0
|
||||
4,145,82,18,0,32.5,0.235,70,1
|
||||
10,111,70,27,0,27.5,0.141,40,1
|
||||
6,98,58,33,190,34,0.43,43,0
|
||||
9,154,78,30,100,30.9,0.164,45,0
|
||||
6,165,68,26,168,33.6,0.631,49,0
|
||||
1,99,58,10,0,25.4,0.551,21,0
|
||||
10,68,106,23,49,35.5,0.285,47,0
|
||||
3,123,100,35,240,57.3,0.88,22,0
|
||||
8,91,82,0,0,35.6,0.587,68,0
|
||||
6,195,70,0,0,30.9,0.328,31,1
|
||||
9,156,86,0,0,24.8,0.23,53,1
|
||||
0,93,60,0,0,35.3,0.263,25,0
|
||||
3,121,52,0,0,36,0.127,25,1
|
||||
2,101,58,17,265,24.2,0.614,23,0
|
||||
2,56,56,28,45,24.2,0.332,22,0
|
||||
0,162,76,36,0,49.6,0.364,26,1
|
||||
0,95,64,39,105,44.6,0.366,22,0
|
||||
4,125,80,0,0,32.3,0.536,27,1
|
||||
5,136,82,0,0,0,0.64,69,0
|
||||
2,129,74,26,205,33.2,0.591,25,0
|
||||
3,130,64,0,0,23.1,0.314,22,0
|
||||
1,107,50,19,0,28.3,0.181,29,0
|
||||
1,140,74,26,180,24.1,0.828,23,0
|
||||
1,144,82,46,180,46.1,0.335,46,1
|
||||
8,107,80,0,0,24.6,0.856,34,0
|
||||
13,158,114,0,0,42.3,0.257,44,1
|
||||
2,121,70,32,95,39.1,0.886,23,0
|
||||
7,129,68,49,125,38.5,0.439,43,1
|
||||
2,90,60,0,0,23.5,0.191,25,0
|
||||
7,142,90,24,480,30.4,0.128,43,1
|
||||
3,169,74,19,125,29.9,0.268,31,1
|
||||
0,99,0,0,0,25,0.253,22,0
|
||||
4,127,88,11,155,34.5,0.598,28,0
|
||||
4,118,70,0,0,44.5,0.904,26,0
|
||||
2,122,76,27,200,35.9,0.483,26,0
|
||||
6,125,78,31,0,27.6,0.565,49,1
|
||||
1,168,88,29,0,35,0.905,52,1
|
||||
2,129,0,0,0,38.5,0.304,41,0
|
||||
4,110,76,20,100,28.4,0.118,27,0
|
||||
6,80,80,36,0,39.8,0.177,28,0
|
||||
10,115,0,0,0,0,0.261,30,1
|
||||
2,127,46,21,335,34.4,0.176,22,0
|
||||
9,164,78,0,0,32.8,0.148,45,1
|
||||
2,93,64,32,160,38,0.674,23,1
|
||||
3,158,64,13,387,31.2,0.295,24,0
|
||||
5,126,78,27,22,29.6,0.439,40,0
|
||||
10,129,62,36,0,41.2,0.441,38,1
|
||||
0,134,58,20,291,26.4,0.352,21,0
|
||||
3,102,74,0,0,29.5,0.121,32,0
|
||||
7,187,50,33,392,33.9,0.826,34,1
|
||||
3,173,78,39,185,33.8,0.97,31,1
|
||||
10,94,72,18,0,23.1,0.595,56,0
|
||||
1,108,60,46,178,35.5,0.415,24,0
|
||||
5,97,76,27,0,35.6,0.378,52,1
|
||||
4,83,86,19,0,29.3,0.317,34,0
|
||||
1,114,66,36,200,38.1,0.289,21,0
|
||||
1,149,68,29,127,29.3,0.349,42,1
|
||||
5,117,86,30,105,39.1,0.251,42,0
|
||||
1,111,94,0,0,32.8,0.265,45,0
|
||||
4,112,78,40,0,39.4,0.236,38,0
|
||||
1,116,78,29,180,36.1,0.496,25,0
|
||||
0,141,84,26,0,32.4,0.433,22,0
|
||||
2,175,88,0,0,22.9,0.326,22,0
|
||||
2,92,52,0,0,30.1,0.141,22,0
|
||||
3,130,78,23,79,28.4,0.323,34,1
|
||||
8,120,86,0,0,28.4,0.259,22,1
|
||||
2,174,88,37,120,44.5,0.646,24,1
|
||||
2,106,56,27,165,29,0.426,22,0
|
||||
2,105,75,0,0,23.3,0.56,53,0
|
||||
4,95,60,32,0,35.4,0.284,28,0
|
||||
0,126,86,27,120,27.4,0.515,21,0
|
||||
8,65,72,23,0,32,0.6,42,0
|
||||
2,99,60,17,160,36.6,0.453,21,0
|
||||
1,102,74,0,0,39.5,0.293,42,1
|
||||
11,120,80,37,150,42.3,0.785,48,1
|
||||
3,102,44,20,94,30.8,0.4,26,0
|
||||
1,109,58,18,116,28.5,0.219,22,0
|
||||
9,140,94,0,0,32.7,0.734,45,1
|
||||
13,153,88,37,140,40.6,1.174,39,0
|
||||
12,100,84,33,105,30,0.488,46,0
|
||||
1,147,94,41,0,49.3,0.358,27,1
|
||||
1,81,74,41,57,46.3,1.096,32,0
|
||||
3,187,70,22,200,36.4,0.408,36,1
|
||||
6,162,62,0,0,24.3,0.178,50,1
|
||||
4,136,70,0,0,31.2,1.182,22,1
|
||||
1,121,78,39,74,39,0.261,28,0
|
||||
3,108,62,24,0,26,0.223,25,0
|
||||
0,181,88,44,510,43.3,0.222,26,1
|
||||
8,154,78,32,0,32.4,0.443,45,1
|
||||
1,128,88,39,110,36.5,1.057,37,1
|
||||
7,137,90,41,0,32,0.391,39,0
|
||||
0,123,72,0,0,36.3,0.258,52,1
|
||||
1,106,76,0,0,37.5,0.197,26,0
|
||||
6,190,92,0,0,35.5,0.278,66,1
|
||||
2,88,58,26,16,28.4,0.766,22,0
|
||||
9,170,74,31,0,44,0.403,43,1
|
||||
9,89,62,0,0,22.5,0.142,33,0
|
||||
10,101,76,48,180,32.9,0.171,63,0
|
||||
2,122,70,27,0,36.8,0.34,27,0
|
||||
5,121,72,23,112,26.2,0.245,30,0
|
||||
1,126,60,0,0,30.1,0.349,47,1
|
||||
1,93,70,31,0,30.4,0.315,23,0
|
||||
|
BIN
data/generator/class 0/squirrel.jpeg
Normal file
BIN
data/generator/class 0/squirrel.jpeg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.5 MiB |
304
data/heart.csv
Normal file
304
data/heart.csv
Normal file
@@ -0,0 +1,304 @@
|
||||
age,sex,cp,trtbps,chol,fbs,restecg,thalachh,exng,oldpeak,slp,caa,thall,output
|
||||
63,1,3,145,233,1,0,150,0,2.3,0,0,1,1
|
||||
37,1,2,130,250,0,1,187,0,3.5,0,0,2,1
|
||||
41,0,1,130,204,0,0,172,0,1.4,2,0,2,1
|
||||
56,1,1,120,236,0,1,178,0,0.8,2,0,2,1
|
||||
57,0,0,120,354,0,1,163,1,0.6,2,0,2,1
|
||||
57,1,0,140,192,0,1,148,0,0.4,1,0,1,1
|
||||
56,0,1,140,294,0,0,153,0,1.3,1,0,2,1
|
||||
44,1,1,120,263,0,1,173,0,0,2,0,3,1
|
||||
52,1,2,172,199,1,1,162,0,0.5,2,0,3,1
|
||||
57,1,2,150,168,0,1,174,0,1.6,2,0,2,1
|
||||
54,1,0,140,239,0,1,160,0,1.2,2,0,2,1
|
||||
48,0,2,130,275,0,1,139,0,0.2,2,0,2,1
|
||||
49,1,1,130,266,0,1,171,0,0.6,2,0,2,1
|
||||
64,1,3,110,211,0,0,144,1,1.8,1,0,2,1
|
||||
58,0,3,150,283,1,0,162,0,1,2,0,2,1
|
||||
50,0,2,120,219,0,1,158,0,1.6,1,0,2,1
|
||||
58,0,2,120,340,0,1,172,0,0,2,0,2,1
|
||||
66,0,3,150,226,0,1,114,0,2.6,0,0,2,1
|
||||
43,1,0,150,247,0,1,171,0,1.5,2,0,2,1
|
||||
69,0,3,140,239,0,1,151,0,1.8,2,2,2,1
|
||||
59,1,0,135,234,0,1,161,0,0.5,1,0,3,1
|
||||
44,1,2,130,233,0,1,179,1,0.4,2,0,2,1
|
||||
42,1,0,140,226,0,1,178,0,0,2,0,2,1
|
||||
61,1,2,150,243,1,1,137,1,1,1,0,2,1
|
||||
40,1,3,140,199,0,1,178,1,1.4,2,0,3,1
|
||||
71,0,1,160,302,0,1,162,0,0.4,2,2,2,1
|
||||
59,1,2,150,212,1,1,157,0,1.6,2,0,2,1
|
||||
51,1,2,110,175,0,1,123,0,0.6,2,0,2,1
|
||||
65,0,2,140,417,1,0,157,0,0.8,2,1,2,1
|
||||
53,1,2,130,197,1,0,152,0,1.2,0,0,2,1
|
||||
41,0,1,105,198,0,1,168,0,0,2,1,2,1
|
||||
65,1,0,120,177,0,1,140,0,0.4,2,0,3,1
|
||||
44,1,1,130,219,0,0,188,0,0,2,0,2,1
|
||||
54,1,2,125,273,0,0,152,0,0.5,0,1,2,1
|
||||
51,1,3,125,213,0,0,125,1,1.4,2,1,2,1
|
||||
46,0,2,142,177,0,0,160,1,1.4,0,0,2,1
|
||||
54,0,2,135,304,1,1,170,0,0,2,0,2,1
|
||||
54,1,2,150,232,0,0,165,0,1.6,2,0,3,1
|
||||
65,0,2,155,269,0,1,148,0,0.8,2,0,2,1
|
||||
65,0,2,160,360,0,0,151,0,0.8,2,0,2,1
|
||||
51,0,2,140,308,0,0,142,0,1.5,2,1,2,1
|
||||
48,1,1,130,245,0,0,180,0,0.2,1,0,2,1
|
||||
45,1,0,104,208,0,0,148,1,3,1,0,2,1
|
||||
53,0,0,130,264,0,0,143,0,0.4,1,0,2,1
|
||||
39,1,2,140,321,0,0,182,0,0,2,0,2,1
|
||||
52,1,1,120,325,0,1,172,0,0.2,2,0,2,1
|
||||
44,1,2,140,235,0,0,180,0,0,2,0,2,1
|
||||
47,1,2,138,257,0,0,156,0,0,2,0,2,1
|
||||
53,0,2,128,216,0,0,115,0,0,2,0,0,1
|
||||
53,0,0,138,234,0,0,160,0,0,2,0,2,1
|
||||
51,0,2,130,256,0,0,149,0,0.5,2,0,2,1
|
||||
66,1,0,120,302,0,0,151,0,0.4,1,0,2,1
|
||||
62,1,2,130,231,0,1,146,0,1.8,1,3,3,1
|
||||
44,0,2,108,141,0,1,175,0,0.6,1,0,2,1
|
||||
63,0,2,135,252,0,0,172,0,0,2,0,2,1
|
||||
52,1,1,134,201,0,1,158,0,0.8,2,1,2,1
|
||||
48,1,0,122,222,0,0,186,0,0,2,0,2,1
|
||||
45,1,0,115,260,0,0,185,0,0,2,0,2,1
|
||||
34,1,3,118,182,0,0,174,0,0,2,0,2,1
|
||||
57,0,0,128,303,0,0,159,0,0,2,1,2,1
|
||||
71,0,2,110,265,1,0,130,0,0,2,1,2,1
|
||||
54,1,1,108,309,0,1,156,0,0,2,0,3,1
|
||||
52,1,3,118,186,0,0,190,0,0,1,0,1,1
|
||||
41,1,1,135,203,0,1,132,0,0,1,0,1,1
|
||||
58,1,2,140,211,1,0,165,0,0,2,0,2,1
|
||||
35,0,0,138,183,0,1,182,0,1.4,2,0,2,1
|
||||
51,1,2,100,222,0,1,143,1,1.2,1,0,2,1
|
||||
45,0,1,130,234,0,0,175,0,0.6,1,0,2,1
|
||||
44,1,1,120,220,0,1,170,0,0,2,0,2,1
|
||||
62,0,0,124,209,0,1,163,0,0,2,0,2,1
|
||||
54,1,2,120,258,0,0,147,0,0.4,1,0,3,1
|
||||
51,1,2,94,227,0,1,154,1,0,2,1,3,1
|
||||
29,1,1,130,204,0,0,202,0,0,2,0,2,1
|
||||
51,1,0,140,261,0,0,186,1,0,2,0,2,1
|
||||
43,0,2,122,213,0,1,165,0,0.2,1,0,2,1
|
||||
55,0,1,135,250,0,0,161,0,1.4,1,0,2,1
|
||||
51,1,2,125,245,1,0,166,0,2.4,1,0,2,1
|
||||
59,1,1,140,221,0,1,164,1,0,2,0,2,1
|
||||
52,1,1,128,205,1,1,184,0,0,2,0,2,1
|
||||
58,1,2,105,240,0,0,154,1,0.6,1,0,3,1
|
||||
41,1,2,112,250,0,1,179,0,0,2,0,2,1
|
||||
45,1,1,128,308,0,0,170,0,0,2,0,2,1
|
||||
60,0,2,102,318,0,1,160,0,0,2,1,2,1
|
||||
52,1,3,152,298,1,1,178,0,1.2,1,0,3,1
|
||||
42,0,0,102,265,0,0,122,0,0.6,1,0,2,1
|
||||
67,0,2,115,564,0,0,160,0,1.6,1,0,3,1
|
||||
68,1,2,118,277,0,1,151,0,1,2,1,3,1
|
||||
46,1,1,101,197,1,1,156,0,0,2,0,3,1
|
||||
54,0,2,110,214,0,1,158,0,1.6,1,0,2,1
|
||||
58,0,0,100,248,0,0,122,0,1,1,0,2,1
|
||||
48,1,2,124,255,1,1,175,0,0,2,2,2,1
|
||||
57,1,0,132,207,0,1,168,1,0,2,0,3,1
|
||||
52,1,2,138,223,0,1,169,0,0,2,4,2,1
|
||||
54,0,1,132,288,1,0,159,1,0,2,1,2,1
|
||||
45,0,1,112,160,0,1,138,0,0,1,0,2,1
|
||||
53,1,0,142,226,0,0,111,1,0,2,0,3,1
|
||||
62,0,0,140,394,0,0,157,0,1.2,1,0,2,1
|
||||
52,1,0,108,233,1,1,147,0,0.1,2,3,3,1
|
||||
43,1,2,130,315,0,1,162,0,1.9,2,1,2,1
|
||||
53,1,2,130,246,1,0,173,0,0,2,3,2,1
|
||||
42,1,3,148,244,0,0,178,0,0.8,2,2,2,1
|
||||
59,1,3,178,270,0,0,145,0,4.2,0,0,3,1
|
||||
63,0,1,140,195,0,1,179,0,0,2,2,2,1
|
||||
42,1,2,120,240,1,1,194,0,0.8,0,0,3,1
|
||||
50,1,2,129,196,0,1,163,0,0,2,0,2,1
|
||||
68,0,2,120,211,0,0,115,0,1.5,1,0,2,1
|
||||
69,1,3,160,234,1,0,131,0,0.1,1,1,2,1
|
||||
45,0,0,138,236,0,0,152,1,0.2,1,0,2,1
|
||||
50,0,1,120,244,0,1,162,0,1.1,2,0,2,1
|
||||
50,0,0,110,254,0,0,159,0,0,2,0,2,1
|
||||
64,0,0,180,325,0,1,154,1,0,2,0,2,1
|
||||
57,1,2,150,126,1,1,173,0,0.2,2,1,3,1
|
||||
64,0,2,140,313,0,1,133,0,0.2,2,0,3,1
|
||||
43,1,0,110,211,0,1,161,0,0,2,0,3,1
|
||||
55,1,1,130,262,0,1,155,0,0,2,0,2,1
|
||||
37,0,2,120,215,0,1,170,0,0,2,0,2,1
|
||||
41,1,2,130,214,0,0,168,0,2,1,0,2,1
|
||||
56,1,3,120,193,0,0,162,0,1.9,1,0,3,1
|
||||
46,0,1,105,204,0,1,172,0,0,2,0,2,1
|
||||
46,0,0,138,243,0,0,152,1,0,1,0,2,1
|
||||
64,0,0,130,303,0,1,122,0,2,1,2,2,1
|
||||
59,1,0,138,271,0,0,182,0,0,2,0,2,1
|
||||
41,0,2,112,268,0,0,172,1,0,2,0,2,1
|
||||
54,0,2,108,267,0,0,167,0,0,2,0,2,1
|
||||
39,0,2,94,199,0,1,179,0,0,2,0,2,1
|
||||
34,0,1,118,210,0,1,192,0,0.7,2,0,2,1
|
||||
47,1,0,112,204,0,1,143,0,0.1,2,0,2,1
|
||||
67,0,2,152,277,0,1,172,0,0,2,1,2,1
|
||||
52,0,2,136,196,0,0,169,0,0.1,1,0,2,1
|
||||
74,0,1,120,269,0,0,121,1,0.2,2,1,2,1
|
||||
54,0,2,160,201,0,1,163,0,0,2,1,2,1
|
||||
49,0,1,134,271,0,1,162,0,0,1,0,2,1
|
||||
42,1,1,120,295,0,1,162,0,0,2,0,2,1
|
||||
41,1,1,110,235,0,1,153,0,0,2,0,2,1
|
||||
41,0,1,126,306,0,1,163,0,0,2,0,2,1
|
||||
49,0,0,130,269,0,1,163,0,0,2,0,2,1
|
||||
60,0,2,120,178,1,1,96,0,0,2,0,2,1
|
||||
62,1,1,128,208,1,0,140,0,0,2,0,2,1
|
||||
57,1,0,110,201,0,1,126,1,1.5,1,0,1,1
|
||||
64,1,0,128,263,0,1,105,1,0.2,1,1,3,1
|
||||
51,0,2,120,295,0,0,157,0,0.6,2,0,2,1
|
||||
43,1,0,115,303,0,1,181,0,1.2,1,0,2,1
|
||||
42,0,2,120,209,0,1,173,0,0,1,0,2,1
|
||||
67,0,0,106,223,0,1,142,0,0.3,2,2,2,1
|
||||
76,0,2,140,197,0,2,116,0,1.1,1,0,2,1
|
||||
70,1,1,156,245,0,0,143,0,0,2,0,2,1
|
||||
44,0,2,118,242,0,1,149,0,0.3,1,1,2,1
|
||||
60,0,3,150,240,0,1,171,0,0.9,2,0,2,1
|
||||
44,1,2,120,226,0,1,169,0,0,2,0,2,1
|
||||
42,1,2,130,180,0,1,150,0,0,2,0,2,1
|
||||
66,1,0,160,228,0,0,138,0,2.3,2,0,1,1
|
||||
71,0,0,112,149,0,1,125,0,1.6,1,0,2,1
|
||||
64,1,3,170,227,0,0,155,0,0.6,1,0,3,1
|
||||
66,0,2,146,278,0,0,152,0,0,1,1,2,1
|
||||
39,0,2,138,220,0,1,152,0,0,1,0,2,1
|
||||
58,0,0,130,197,0,1,131,0,0.6,1,0,2,1
|
||||
47,1,2,130,253,0,1,179,0,0,2,0,2,1
|
||||
35,1,1,122,192,0,1,174,0,0,2,0,2,1
|
||||
58,1,1,125,220,0,1,144,0,0.4,1,4,3,1
|
||||
56,1,1,130,221,0,0,163,0,0,2,0,3,1
|
||||
56,1,1,120,240,0,1,169,0,0,0,0,2,1
|
||||
55,0,1,132,342,0,1,166,0,1.2,2,0,2,1
|
||||
41,1,1,120,157,0,1,182,0,0,2,0,2,1
|
||||
38,1,2,138,175,0,1,173,0,0,2,4,2,1
|
||||
38,1,2,138,175,0,1,173,0,0,2,4,2,1
|
||||
67,1,0,160,286,0,0,108,1,1.5,1,3,2,0
|
||||
67,1,0,120,229,0,0,129,1,2.6,1,2,3,0
|
||||
62,0,0,140,268,0,0,160,0,3.6,0,2,2,0
|
||||
63,1,0,130,254,0,0,147,0,1.4,1,1,3,0
|
||||
53,1,0,140,203,1,0,155,1,3.1,0,0,3,0
|
||||
56,1,2,130,256,1,0,142,1,0.6,1,1,1,0
|
||||
48,1,1,110,229,0,1,168,0,1,0,0,3,0
|
||||
58,1,1,120,284,0,0,160,0,1.8,1,0,2,0
|
||||
58,1,2,132,224,0,0,173,0,3.2,2,2,3,0
|
||||
60,1,0,130,206,0,0,132,1,2.4,1,2,3,0
|
||||
40,1,0,110,167,0,0,114,1,2,1,0,3,0
|
||||
60,1,0,117,230,1,1,160,1,1.4,2,2,3,0
|
||||
64,1,2,140,335,0,1,158,0,0,2,0,2,0
|
||||
43,1,0,120,177,0,0,120,1,2.5,1,0,3,0
|
||||
57,1,0,150,276,0,0,112,1,0.6,1,1,1,0
|
||||
55,1,0,132,353,0,1,132,1,1.2,1,1,3,0
|
||||
65,0,0,150,225,0,0,114,0,1,1,3,3,0
|
||||
61,0,0,130,330,0,0,169,0,0,2,0,2,0
|
||||
58,1,2,112,230,0,0,165,0,2.5,1,1,3,0
|
||||
50,1,0,150,243,0,0,128,0,2.6,1,0,3,0
|
||||
44,1,0,112,290,0,0,153,0,0,2,1,2,0
|
||||
60,1,0,130,253,0,1,144,1,1.4,2,1,3,0
|
||||
54,1,0,124,266,0,0,109,1,2.2,1,1,3,0
|
||||
50,1,2,140,233,0,1,163,0,0.6,1,1,3,0
|
||||
41,1,0,110,172,0,0,158,0,0,2,0,3,0
|
||||
51,0,0,130,305,0,1,142,1,1.2,1,0,3,0
|
||||
58,1,0,128,216,0,0,131,1,2.2,1,3,3,0
|
||||
54,1,0,120,188,0,1,113,0,1.4,1,1,3,0
|
||||
60,1,0,145,282,0,0,142,1,2.8,1,2,3,0
|
||||
60,1,2,140,185,0,0,155,0,3,1,0,2,0
|
||||
59,1,0,170,326,0,0,140,1,3.4,0,0,3,0
|
||||
46,1,2,150,231,0,1,147,0,3.6,1,0,2,0
|
||||
67,1,0,125,254,1,1,163,0,0.2,1,2,3,0
|
||||
62,1,0,120,267,0,1,99,1,1.8,1,2,3,0
|
||||
65,1,0,110,248,0,0,158,0,0.6,2,2,1,0
|
||||
44,1,0,110,197,0,0,177,0,0,2,1,2,0
|
||||
60,1,0,125,258,0,0,141,1,2.8,1,1,3,0
|
||||
58,1,0,150,270,0,0,111,1,0.8,2,0,3,0
|
||||
68,1,2,180,274,1,0,150,1,1.6,1,0,3,0
|
||||
62,0,0,160,164,0,0,145,0,6.2,0,3,3,0
|
||||
52,1,0,128,255,0,1,161,1,0,2,1,3,0
|
||||
59,1,0,110,239,0,0,142,1,1.2,1,1,3,0
|
||||
60,0,0,150,258,0,0,157,0,2.6,1,2,3,0
|
||||
49,1,2,120,188,0,1,139,0,2,1,3,3,0
|
||||
59,1,0,140,177,0,1,162,1,0,2,1,3,0
|
||||
57,1,2,128,229,0,0,150,0,0.4,1,1,3,0
|
||||
61,1,0,120,260,0,1,140,1,3.6,1,1,3,0
|
||||
39,1,0,118,219,0,1,140,0,1.2,1,0,3,0
|
||||
61,0,0,145,307,0,0,146,1,1,1,0,3,0
|
||||
56,1,0,125,249,1,0,144,1,1.2,1,1,2,0
|
||||
43,0,0,132,341,1,0,136,1,3,1,0,3,0
|
||||
62,0,2,130,263,0,1,97,0,1.2,1,1,3,0
|
||||
63,1,0,130,330,1,0,132,1,1.8,2,3,3,0
|
||||
65,1,0,135,254,0,0,127,0,2.8,1,1,3,0
|
||||
48,1,0,130,256,1,0,150,1,0,2,2,3,0
|
||||
63,0,0,150,407,0,0,154,0,4,1,3,3,0
|
||||
55,1,0,140,217,0,1,111,1,5.6,0,0,3,0
|
||||
65,1,3,138,282,1,0,174,0,1.4,1,1,2,0
|
||||
56,0,0,200,288,1,0,133,1,4,0,2,3,0
|
||||
54,1,0,110,239,0,1,126,1,2.8,1,1,3,0
|
||||
70,1,0,145,174,0,1,125,1,2.6,0,0,3,0
|
||||
62,1,1,120,281,0,0,103,0,1.4,1,1,3,0
|
||||
35,1,0,120,198,0,1,130,1,1.6,1,0,3,0
|
||||
59,1,3,170,288,0,0,159,0,0.2,1,0,3,0
|
||||
64,1,2,125,309,0,1,131,1,1.8,1,0,3,0
|
||||
47,1,2,108,243,0,1,152,0,0,2,0,2,0
|
||||
57,1,0,165,289,1,0,124,0,1,1,3,3,0
|
||||
55,1,0,160,289,0,0,145,1,0.8,1,1,3,0
|
||||
64,1,0,120,246,0,0,96,1,2.2,0,1,2,0
|
||||
70,1,0,130,322,0,0,109,0,2.4,1,3,2,0
|
||||
51,1,0,140,299,0,1,173,1,1.6,2,0,3,0
|
||||
58,1,0,125,300,0,0,171,0,0,2,2,3,0
|
||||
60,1,0,140,293,0,0,170,0,1.2,1,2,3,0
|
||||
77,1,0,125,304,0,0,162,1,0,2,3,2,0
|
||||
35,1,0,126,282,0,0,156,1,0,2,0,3,0
|
||||
70,1,2,160,269,0,1,112,1,2.9,1,1,3,0
|
||||
59,0,0,174,249,0,1,143,1,0,1,0,2,0
|
||||
64,1,0,145,212,0,0,132,0,2,1,2,1,0
|
||||
57,1,0,152,274,0,1,88,1,1.2,1,1,3,0
|
||||
56,1,0,132,184,0,0,105,1,2.1,1,1,1,0
|
||||
48,1,0,124,274,0,0,166,0,0.5,1,0,3,0
|
||||
56,0,0,134,409,0,0,150,1,1.9,1,2,3,0
|
||||
66,1,1,160,246,0,1,120,1,0,1,3,1,0
|
||||
54,1,1,192,283,0,0,195,0,0,2,1,3,0
|
||||
69,1,2,140,254,0,0,146,0,2,1,3,3,0
|
||||
51,1,0,140,298,0,1,122,1,4.2,1,3,3,0
|
||||
43,1,0,132,247,1,0,143,1,0.1,1,4,3,0
|
||||
62,0,0,138,294,1,1,106,0,1.9,1,3,2,0
|
||||
67,1,0,100,299,0,0,125,1,0.9,1,2,2,0
|
||||
59,1,3,160,273,0,0,125,0,0,2,0,2,0
|
||||
45,1,0,142,309,0,0,147,1,0,1,3,3,0
|
||||
58,1,0,128,259,0,0,130,1,3,1,2,3,0
|
||||
50,1,0,144,200,0,0,126,1,0.9,1,0,3,0
|
||||
62,0,0,150,244,0,1,154,1,1.4,1,0,2,0
|
||||
38,1,3,120,231,0,1,182,1,3.8,1,0,3,0
|
||||
66,0,0,178,228,1,1,165,1,1,1,2,3,0
|
||||
52,1,0,112,230,0,1,160,0,0,2,1,2,0
|
||||
53,1,0,123,282,0,1,95,1,2,1,2,3,0
|
||||
63,0,0,108,269,0,1,169,1,1.8,1,2,2,0
|
||||
54,1,0,110,206,0,0,108,1,0,1,1,2,0
|
||||
66,1,0,112,212,0,0,132,1,0.1,2,1,2,0
|
||||
55,0,0,180,327,0,2,117,1,3.4,1,0,2,0
|
||||
49,1,2,118,149,0,0,126,0,0.8,2,3,2,0
|
||||
54,1,0,122,286,0,0,116,1,3.2,1,2,2,0
|
||||
56,1,0,130,283,1,0,103,1,1.6,0,0,3,0
|
||||
46,1,0,120,249,0,0,144,0,0.8,2,0,3,0
|
||||
61,1,3,134,234,0,1,145,0,2.6,1,2,2,0
|
||||
67,1,0,120,237,0,1,71,0,1,1,0,2,0
|
||||
58,1,0,100,234,0,1,156,0,0.1,2,1,3,0
|
||||
47,1,0,110,275,0,0,118,1,1,1,1,2,0
|
||||
52,1,0,125,212,0,1,168,0,1,2,2,3,0
|
||||
58,1,0,146,218,0,1,105,0,2,1,1,3,0
|
||||
57,1,1,124,261,0,1,141,0,0.3,2,0,3,0
|
||||
58,0,1,136,319,1,0,152,0,0,2,2,2,0
|
||||
61,1,0,138,166,0,0,125,1,3.6,1,1,2,0
|
||||
42,1,0,136,315,0,1,125,1,1.8,1,0,1,0
|
||||
52,1,0,128,204,1,1,156,1,1,1,0,0,0
|
||||
59,1,2,126,218,1,1,134,0,2.2,1,1,1,0
|
||||
40,1,0,152,223,0,1,181,0,0,2,0,3,0
|
||||
61,1,0,140,207,0,0,138,1,1.9,2,1,3,0
|
||||
46,1,0,140,311,0,1,120,1,1.8,1,2,3,0
|
||||
59,1,3,134,204,0,1,162,0,0.8,2,2,2,0
|
||||
57,1,1,154,232,0,0,164,0,0,2,1,2,0
|
||||
57,1,0,110,335,0,1,143,1,3,1,1,3,0
|
||||
55,0,0,128,205,0,2,130,1,2,1,1,3,0
|
||||
61,1,0,148,203,0,1,161,0,0,2,1,3,0
|
||||
58,1,0,114,318,0,2,140,0,4.4,0,3,1,0
|
||||
58,0,0,170,225,1,0,146,1,2.8,1,2,1,0
|
||||
67,1,2,152,212,0,0,150,0,0.8,1,0,3,0
|
||||
44,1,0,120,169,0,1,144,1,2.8,0,0,1,0
|
||||
63,1,0,140,187,0,0,144,1,4,2,2,3,0
|
||||
63,0,0,124,197,0,1,136,1,0,1,0,2,0
|
||||
59,1,0,164,176,1,0,90,0,1,1,2,1,0
|
||||
57,0,0,140,241,0,1,123,1,0.2,1,0,3,0
|
||||
45,1,3,110,264,0,1,132,0,1.2,1,0,3,0
|
||||
68,1,0,144,193,1,1,141,0,3.4,1,2,3,0
|
||||
57,1,0,130,131,0,1,115,1,1.2,1,1,3,0
|
||||
57,0,1,130,236,0,0,174,0,0,1,1,2,0
|
||||
|
48
data/housing-data.csv
Normal file
48
data/housing-data.csv
Normal file
@@ -0,0 +1,48 @@
|
||||
sqft,bdrms,age,price
|
||||
2104,3,70,399900
|
||||
1600,3,28,329900
|
||||
2400,3,44,369000
|
||||
1416,2,49,232000
|
||||
3000,4,75,539900
|
||||
1985,4,61,299900
|
||||
1534,3,12,314900
|
||||
1427,3,57,198999
|
||||
1380,3,14,212000
|
||||
1494,3,15,242500
|
||||
1940,4,7,239999
|
||||
2000,3,27,347000
|
||||
1890,3,45,329999
|
||||
4478,5,49,699900
|
||||
1268,3,58,259900
|
||||
2300,4,77,449900
|
||||
1320,2,62,299900
|
||||
1236,3,78,199900
|
||||
2609,4,5,499998
|
||||
3031,4,21,599000
|
||||
1767,3,44,252900
|
||||
1888,2,79,255000
|
||||
1604,3,13,242900
|
||||
1962,4,53,259900
|
||||
3890,3,36,573900
|
||||
1100,3,60,249900
|
||||
1458,3,29,464500
|
||||
2526,3,13,469000
|
||||
2200,3,28,475000
|
||||
2637,3,25,299900
|
||||
1839,2,40,349900
|
||||
1000,1,5,169900
|
||||
2040,4,75,314900
|
||||
3137,3,67,579900
|
||||
1811,4,24,285900
|
||||
1437,3,50,249900
|
||||
1239,3,22,229900
|
||||
2132,4,28,345000
|
||||
4215,4,66,549000
|
||||
2162,4,43,287000
|
||||
1664,2,40,368500
|
||||
2238,3,37,329900
|
||||
2567,4,57,314000
|
||||
1200,3,76,299000
|
||||
852,2,70,179900
|
||||
1852,4,64,299900
|
||||
1203,3,11,239500
|
||||
|
145
data/international-airline-passengers.csv
Normal file
145
data/international-airline-passengers.csv
Normal file
@@ -0,0 +1,145 @@
|
||||
"Month","Thousand Passengers"
|
||||
"1949-01",112
|
||||
"1949-02",118
|
||||
"1949-03",132
|
||||
"1949-04",129
|
||||
"1949-05",121
|
||||
"1949-06",135
|
||||
"1949-07",148
|
||||
"1949-08",148
|
||||
"1949-09",136
|
||||
"1949-10",119
|
||||
"1949-11",104
|
||||
"1949-12",118
|
||||
"1950-01",115
|
||||
"1950-02",126
|
||||
"1950-03",141
|
||||
"1950-04",135
|
||||
"1950-05",125
|
||||
"1950-06",149
|
||||
"1950-07",170
|
||||
"1950-08",170
|
||||
"1950-09",158
|
||||
"1950-10",133
|
||||
"1950-11",114
|
||||
"1950-12",140
|
||||
"1951-01",145
|
||||
"1951-02",150
|
||||
"1951-03",178
|
||||
"1951-04",163
|
||||
"1951-05",172
|
||||
"1951-06",178
|
||||
"1951-07",199
|
||||
"1951-08",199
|
||||
"1951-09",184
|
||||
"1951-10",162
|
||||
"1951-11",146
|
||||
"1951-12",166
|
||||
"1952-01",171
|
||||
"1952-02",180
|
||||
"1952-03",193
|
||||
"1952-04",181
|
||||
"1952-05",183
|
||||
"1952-06",218
|
||||
"1952-07",230
|
||||
"1952-08",242
|
||||
"1952-09",209
|
||||
"1952-10",191
|
||||
"1952-11",172
|
||||
"1952-12",194
|
||||
"1953-01",196
|
||||
"1953-02",196
|
||||
"1953-03",236
|
||||
"1953-04",235
|
||||
"1953-05",229
|
||||
"1953-06",243
|
||||
"1953-07",264
|
||||
"1953-08",272
|
||||
"1953-09",237
|
||||
"1953-10",211
|
||||
"1953-11",180
|
||||
"1953-12",201
|
||||
"1954-01",204
|
||||
"1954-02",188
|
||||
"1954-03",235
|
||||
"1954-04",227
|
||||
"1954-05",234
|
||||
"1954-06",264
|
||||
"1954-07",302
|
||||
"1954-08",293
|
||||
"1954-09",259
|
||||
"1954-10",229
|
||||
"1954-11",203
|
||||
"1954-12",229
|
||||
"1955-01",242
|
||||
"1955-02",233
|
||||
"1955-03",267
|
||||
"1955-04",269
|
||||
"1955-05",270
|
||||
"1955-06",315
|
||||
"1955-07",364
|
||||
"1955-08",347
|
||||
"1955-09",312
|
||||
"1955-10",274
|
||||
"1955-11",237
|
||||
"1955-12",278
|
||||
"1956-01",284
|
||||
"1956-02",277
|
||||
"1956-03",317
|
||||
"1956-04",313
|
||||
"1956-05",318
|
||||
"1956-06",374
|
||||
"1956-07",413
|
||||
"1956-08",405
|
||||
"1956-09",355
|
||||
"1956-10",306
|
||||
"1956-11",271
|
||||
"1956-12",306
|
||||
"1957-01",315
|
||||
"1957-02",301
|
||||
"1957-03",356
|
||||
"1957-04",348
|
||||
"1957-05",355
|
||||
"1957-06",422
|
||||
"1957-07",465
|
||||
"1957-08",467
|
||||
"1957-09",404
|
||||
"1957-10",347
|
||||
"1957-11",305
|
||||
"1957-12",336
|
||||
"1958-01",340
|
||||
"1958-02",318
|
||||
"1958-03",362
|
||||
"1958-04",348
|
||||
"1958-05",363
|
||||
"1958-06",435
|
||||
"1958-07",491
|
||||
"1958-08",505
|
||||
"1958-09",404
|
||||
"1958-10",359
|
||||
"1958-11",310
|
||||
"1958-12",337
|
||||
"1959-01",360
|
||||
"1959-02",342
|
||||
"1959-03",406
|
||||
"1959-04",396
|
||||
"1959-05",420
|
||||
"1959-06",472
|
||||
"1959-07",548
|
||||
"1959-08",559
|
||||
"1959-09",463
|
||||
"1959-10",407
|
||||
"1959-11",362
|
||||
"1959-12",405
|
||||
"1960-01",417
|
||||
"1960-02",391
|
||||
"1960-03",419
|
||||
"1960-04",461
|
||||
"1960-05",472
|
||||
"1960-06",535
|
||||
"1960-07",622
|
||||
"1960-08",606
|
||||
"1960-09",508
|
||||
"1960-10",461
|
||||
"1960-11",390
|
||||
"1960-12",432
|
||||
|
151
data/iris.csv
Normal file
151
data/iris.csv
Normal file
@@ -0,0 +1,151 @@
|
||||
sepal_length,sepal_width,petal_length,petal_width,species
|
||||
5.1,3.5,1.4,0.2,setosa
|
||||
4.9,3.0,1.4,0.2,setosa
|
||||
4.7,3.2,1.3,0.2,setosa
|
||||
4.6,3.1,1.5,0.2,setosa
|
||||
5.0,3.6,1.4,0.2,setosa
|
||||
5.4,3.9,1.7,0.4,setosa
|
||||
4.6,3.4,1.4,0.3,setosa
|
||||
5.0,3.4,1.5,0.2,setosa
|
||||
4.4,2.9,1.4,0.2,setosa
|
||||
4.9,3.1,1.5,0.1,setosa
|
||||
5.4,3.7,1.5,0.2,setosa
|
||||
4.8,3.4,1.6,0.2,setosa
|
||||
4.8,3.0,1.4,0.1,setosa
|
||||
4.3,3.0,1.1,0.1,setosa
|
||||
5.8,4.0,1.2,0.2,setosa
|
||||
5.7,4.4,1.5,0.4,setosa
|
||||
5.4,3.9,1.3,0.4,setosa
|
||||
5.1,3.5,1.4,0.3,setosa
|
||||
5.7,3.8,1.7,0.3,setosa
|
||||
5.1,3.8,1.5,0.3,setosa
|
||||
5.4,3.4,1.7,0.2,setosa
|
||||
5.1,3.7,1.5,0.4,setosa
|
||||
4.6,3.6,1.0,0.2,setosa
|
||||
5.1,3.3,1.7,0.5,setosa
|
||||
4.8,3.4,1.9,0.2,setosa
|
||||
5.0,3.0,1.6,0.2,setosa
|
||||
5.0,3.4,1.6,0.4,setosa
|
||||
5.2,3.5,1.5,0.2,setosa
|
||||
5.2,3.4,1.4,0.2,setosa
|
||||
4.7,3.2,1.6,0.2,setosa
|
||||
4.8,3.1,1.6,0.2,setosa
|
||||
5.4,3.4,1.5,0.4,setosa
|
||||
5.2,4.1,1.5,0.1,setosa
|
||||
5.5,4.2,1.4,0.2,setosa
|
||||
4.9,3.1,1.5,0.2,setosa
|
||||
5.0,3.2,1.2,0.2,setosa
|
||||
5.5,3.5,1.3,0.2,setosa
|
||||
4.9,3.6,1.4,0.1,setosa
|
||||
4.4,3.0,1.3,0.2,setosa
|
||||
5.1,3.4,1.5,0.2,setosa
|
||||
5.0,3.5,1.3,0.3,setosa
|
||||
4.5,2.3,1.3,0.3,setosa
|
||||
4.4,3.2,1.3,0.2,setosa
|
||||
5.0,3.5,1.6,0.6,setosa
|
||||
5.1,3.8,1.9,0.4,setosa
|
||||
4.8,3.0,1.4,0.3,setosa
|
||||
5.1,3.8,1.6,0.2,setosa
|
||||
4.6,3.2,1.4,0.2,setosa
|
||||
5.3,3.7,1.5,0.2,setosa
|
||||
5.0,3.3,1.4,0.2,setosa
|
||||
7.0,3.2,4.7,1.4,versicolor
|
||||
6.4,3.2,4.5,1.5,versicolor
|
||||
6.9,3.1,4.9,1.5,versicolor
|
||||
5.5,2.3,4.0,1.3,versicolor
|
||||
6.5,2.8,4.6,1.5,versicolor
|
||||
5.7,2.8,4.5,1.3,versicolor
|
||||
6.3,3.3,4.7,1.6,versicolor
|
||||
4.9,2.4,3.3,1.0,versicolor
|
||||
6.6,2.9,4.6,1.3,versicolor
|
||||
5.2,2.7,3.9,1.4,versicolor
|
||||
5.0,2.0,3.5,1.0,versicolor
|
||||
5.9,3.0,4.2,1.5,versicolor
|
||||
6.0,2.2,4.0,1.0,versicolor
|
||||
6.1,2.9,4.7,1.4,versicolor
|
||||
5.6,2.9,3.6,1.3,versicolor
|
||||
6.7,3.1,4.4,1.4,versicolor
|
||||
5.6,3.0,4.5,1.5,versicolor
|
||||
5.8,2.7,4.1,1.0,versicolor
|
||||
6.2,2.2,4.5,1.5,versicolor
|
||||
5.6,2.5,3.9,1.1,versicolor
|
||||
5.9,3.2,4.8,1.8,versicolor
|
||||
6.1,2.8,4.0,1.3,versicolor
|
||||
6.3,2.5,4.9,1.5,versicolor
|
||||
6.1,2.8,4.7,1.2,versicolor
|
||||
6.4,2.9,4.3,1.3,versicolor
|
||||
6.6,3.0,4.4,1.4,versicolor
|
||||
6.8,2.8,4.8,1.4,versicolor
|
||||
6.7,3.0,5.0,1.7,versicolor
|
||||
6.0,2.9,4.5,1.5,versicolor
|
||||
5.7,2.6,3.5,1.0,versicolor
|
||||
5.5,2.4,3.8,1.1,versicolor
|
||||
5.5,2.4,3.7,1.0,versicolor
|
||||
5.8,2.7,3.9,1.2,versicolor
|
||||
6.0,2.7,5.1,1.6,versicolor
|
||||
5.4,3.0,4.5,1.5,versicolor
|
||||
6.0,3.4,4.5,1.6,versicolor
|
||||
6.7,3.1,4.7,1.5,versicolor
|
||||
6.3,2.3,4.4,1.3,versicolor
|
||||
5.6,3.0,4.1,1.3,versicolor
|
||||
5.5,2.5,4.0,1.3,versicolor
|
||||
5.5,2.6,4.4,1.2,versicolor
|
||||
6.1,3.0,4.6,1.4,versicolor
|
||||
5.8,2.6,4.0,1.2,versicolor
|
||||
5.0,2.3,3.3,1.0,versicolor
|
||||
5.6,2.7,4.2,1.3,versicolor
|
||||
5.7,3.0,4.2,1.2,versicolor
|
||||
5.7,2.9,4.2,1.3,versicolor
|
||||
6.2,2.9,4.3,1.3,versicolor
|
||||
5.1,2.5,3.0,1.1,versicolor
|
||||
5.7,2.8,4.1,1.3,versicolor
|
||||
6.3,3.3,6.0,2.5,virginica
|
||||
5.8,2.7,5.1,1.9,virginica
|
||||
7.1,3.0,5.9,2.1,virginica
|
||||
6.3,2.9,5.6,1.8,virginica
|
||||
6.5,3.0,5.8,2.2,virginica
|
||||
7.6,3.0,6.6,2.1,virginica
|
||||
4.9,2.5,4.5,1.7,virginica
|
||||
7.3,2.9,6.3,1.8,virginica
|
||||
6.7,2.5,5.8,1.8,virginica
|
||||
7.2,3.6,6.1,2.5,virginica
|
||||
6.5,3.2,5.1,2.0,virginica
|
||||
6.4,2.7,5.3,1.9,virginica
|
||||
6.8,3.0,5.5,2.1,virginica
|
||||
5.7,2.5,5.0,2.0,virginica
|
||||
5.8,2.8,5.1,2.4,virginica
|
||||
6.4,3.2,5.3,2.3,virginica
|
||||
6.5,3.0,5.5,1.8,virginica
|
||||
7.7,3.8,6.7,2.2,virginica
|
||||
7.7,2.6,6.9,2.3,virginica
|
||||
6.0,2.2,5.0,1.5,virginica
|
||||
6.9,3.2,5.7,2.3,virginica
|
||||
5.6,2.8,4.9,2.0,virginica
|
||||
7.7,2.8,6.7,2.0,virginica
|
||||
6.3,2.7,4.9,1.8,virginica
|
||||
6.7,3.3,5.7,2.1,virginica
|
||||
7.2,3.2,6.0,1.8,virginica
|
||||
6.2,2.8,4.8,1.8,virginica
|
||||
6.1,3.0,4.9,1.8,virginica
|
||||
6.4,2.8,5.6,2.1,virginica
|
||||
7.2,3.0,5.8,1.6,virginica
|
||||
7.4,2.8,6.1,1.9,virginica
|
||||
7.9,3.8,6.4,2.0,virginica
|
||||
6.4,2.8,5.6,2.2,virginica
|
||||
6.3,2.8,5.1,1.5,virginica
|
||||
6.1,2.6,5.6,1.4,virginica
|
||||
7.7,3.0,6.1,2.3,virginica
|
||||
6.3,3.4,5.6,2.4,virginica
|
||||
6.4,3.1,5.5,1.8,virginica
|
||||
6.0,3.0,4.8,1.8,virginica
|
||||
6.9,3.1,5.4,2.1,virginica
|
||||
6.7,3.1,5.6,2.4,virginica
|
||||
6.9,3.1,5.1,2.3,virginica
|
||||
5.8,2.7,5.1,1.9,virginica
|
||||
6.8,3.2,5.9,2.3,virginica
|
||||
6.7,3.3,5.7,2.5,virginica
|
||||
6.7,3.0,5.2,2.3,virginica
|
||||
6.3,2.5,5.0,1.9,virginica
|
||||
6.5,3.0,5.2,2.0,virginica
|
||||
6.2,3.4,5.4,2.3,virginica
|
||||
5.9,3.0,5.1,1.8,virginica
|
||||
|
BIN
data/iss.jpg
Normal file
BIN
data/iss.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 104 KiB |
BIN
data/sms.wav
Normal file
BIN
data/sms.wav
Normal file
Binary file not shown.
301
data/test.csv
Normal file
301
data/test.csv
Normal file
@@ -0,0 +1,301 @@
|
||||
x,y
|
||||
77,79.77515201
|
||||
21,23.17727887
|
||||
22,25.60926156
|
||||
20,17.85738813
|
||||
36,41.84986439
|
||||
15,9.805234876
|
||||
62,58.87465933
|
||||
95,97.61793701
|
||||
20,18.39512747
|
||||
5,8.746747654
|
||||
4,2.811415826
|
||||
19,17.09537241
|
||||
96,95.14907176
|
||||
62,61.38800663
|
||||
36,40.24701716
|
||||
15,14.82248589
|
||||
65,66.95806869
|
||||
14,16.63507984
|
||||
87,90.65513736
|
||||
69,77.22982636
|
||||
89,92.11906278
|
||||
51,46.91387709
|
||||
89,89.82634442
|
||||
27,21.71380347
|
||||
97,97.41206981
|
||||
58,57.01631363
|
||||
79,78.31056542
|
||||
21,19.1315097
|
||||
93,93.03483388
|
||||
27,26.59112396
|
||||
99,97.55155344
|
||||
31,31.43524822
|
||||
33,35.12724777
|
||||
80,78.61042432
|
||||
28,33.07112825
|
||||
47,51.69967172
|
||||
53,53.62235225
|
||||
69,69.46306072
|
||||
28,27.42497237
|
||||
33,36.34644189
|
||||
91,95.06140858
|
||||
71,68.16724757
|
||||
50,50.96155532
|
||||
76,78.04237454
|
||||
4,5.607664865
|
||||
37,36.11334779
|
||||
70,67.2352155
|
||||
68,65.01324035
|
||||
40,38.14753871
|
||||
35,34.31141446
|
||||
94,95.28503937
|
||||
88,87.84749912
|
||||
52,54.08170635
|
||||
31,31.93063515
|
||||
59,59.61247085
|
||||
0,-1.040114209
|
||||
39,47.49374765
|
||||
64,62.60089773
|
||||
69,70.9146434
|
||||
57,56.14834113
|
||||
13,14.05572877
|
||||
72,68.11367147
|
||||
76,75.59701346
|
||||
61,59.225745
|
||||
82,85.45504157
|
||||
18,17.76197116
|
||||
41,38.68888682
|
||||
50,50.96343637
|
||||
55,51.83503872
|
||||
13,17.0761107
|
||||
46,46.56141773
|
||||
13,10.34754461
|
||||
79,77.91032969
|
||||
53,50.17008622
|
||||
15,13.25690647
|
||||
28,31.32274932
|
||||
81,73.9308764
|
||||
69,74.45114379
|
||||
52,52.01932286
|
||||
84,83.68820499
|
||||
68,70.3698748
|
||||
27,23.44479161
|
||||
56,49.83051801
|
||||
48,49.88226593
|
||||
40,41.04525583
|
||||
39,33.37834391
|
||||
82,81.29750133
|
||||
100,105.5918375
|
||||
59,56.82457013
|
||||
43,48.67252645
|
||||
67,67.02150613
|
||||
38,38.43076389
|
||||
63,58.61466887
|
||||
91,89.12377509
|
||||
60,60.9105427
|
||||
14,13.83959878
|
||||
21,16.89085185
|
||||
87,84.06676818
|
||||
73,70.34969772
|
||||
32,33.38474138
|
||||
2,-1.63296825
|
||||
82,88.54475895
|
||||
19,17.44047622
|
||||
74,75.69298554
|
||||
42,41.97607107
|
||||
12,12.59244741
|
||||
1,0.275307261
|
||||
90,98.13258005
|
||||
89,87.45721555
|
||||
0,-2.344738542
|
||||
41,39.3294153
|
||||
16,16.68715211
|
||||
94,96.58888601
|
||||
97,97.70342201
|
||||
66,67.01715955
|
||||
24,25.63476257
|
||||
17,13.41310757
|
||||
90,95.15647284
|
||||
13,9.744164258
|
||||
0,-3.467883789
|
||||
64,62.82816355
|
||||
96,97.27405461
|
||||
98,95.58017185
|
||||
12,7.468501839
|
||||
41,45.44599591
|
||||
47,46.69013968
|
||||
78,74.4993599
|
||||
20,21.63500655
|
||||
89,91.59548851
|
||||
29,26.49487961
|
||||
64,67.38654703
|
||||
75,74.25362837
|
||||
12,12.07991648
|
||||
25,21.32273728
|
||||
28,29.31770045
|
||||
30,26.48713683
|
||||
65,68.94699774
|
||||
59,59.10598995
|
||||
64,64.37521087
|
||||
53,60.20758349
|
||||
71,70.34329706
|
||||
97,97.1082562
|
||||
73,75.7584178
|
||||
9,10.80462727
|
||||
12,12.11219941
|
||||
63,63.28312382
|
||||
99,98.03017721
|
||||
60,63.19354354
|
||||
35,34.8534823
|
||||
2,-2.819913974
|
||||
60,59.8313966
|
||||
32,29.38505024
|
||||
94,97.00148372
|
||||
84,85.18657275
|
||||
63,61.74063192
|
||||
22,18.84798163
|
||||
81,78.79008525
|
||||
93,95.12400481
|
||||
33,30.48881287
|
||||
7,10.41468095
|
||||
42,38.98317436
|
||||
46,46.11021062
|
||||
54,52.45103628
|
||||
16,21.16523945
|
||||
49,52.28620611
|
||||
43,44.18863945
|
||||
95,97.13832018
|
||||
66,67.22008001
|
||||
21,18.98322306
|
||||
35,24.3884599
|
||||
80,79.44769523
|
||||
37,40.03504862
|
||||
54,53.32005764
|
||||
56,54.55446979
|
||||
1,-2.761182595
|
||||
32,37.80182795
|
||||
58,57.48741435
|
||||
32,36.06292994
|
||||
46,49.83538167
|
||||
72,74.68953276
|
||||
17,14.86159401
|
||||
97,101.0697879
|
||||
93,99.43577876
|
||||
91,91.69240746
|
||||
37,34.12473248
|
||||
4,6.079390073
|
||||
54,59.07247174
|
||||
51,56.43046022
|
||||
27,30.49412933
|
||||
46,48.35172635
|
||||
92,89.73153611
|
||||
73,72.86282528
|
||||
77,80.97144285
|
||||
91,91.36566374
|
||||
61,60.07137496
|
||||
99,99.87382707
|
||||
4,8.655714172
|
||||
72,69.39858505
|
||||
19,19.38780134
|
||||
57,53.11628433
|
||||
78,78.39683006
|
||||
26,25.75612514
|
||||
74,75.07484683
|
||||
90,92.88772282
|
||||
66,69.45498498
|
||||
13,13.12109842
|
||||
40,48.09843134
|
||||
77,79.3142548
|
||||
67,68.48820749
|
||||
75,73.2300846
|
||||
23,24.68362712
|
||||
45,41.90368917
|
||||
59,62.22635684
|
||||
44,45.96396877
|
||||
23,23.52647153
|
||||
55,51.80035866
|
||||
55,51.10774273
|
||||
95,95.79747345
|
||||
12,9.241138977
|
||||
4,7.646529763
|
||||
7,9.281699753
|
||||
100,103.5266162
|
||||
48,47.41006725
|
||||
42,42.03835773
|
||||
96,96.11982476
|
||||
39,38.05766408
|
||||
100,105.4503788
|
||||
87,88.80306911
|
||||
14,15.49301141
|
||||
14,12.42624606
|
||||
37,40.00709598
|
||||
5,5.634030902
|
||||
88,87.36938931
|
||||
91,89.73951993
|
||||
65,66.61499643
|
||||
74,72.9138853
|
||||
56,57.19103506
|
||||
16,11.21710477
|
||||
5,0.676076749
|
||||
28,28.15668543
|
||||
92,95.3958003
|
||||
46,52.05490703
|
||||
54,59.70864577
|
||||
39,36.79224762
|
||||
44,37.08457698
|
||||
31,24.18437976
|
||||
68,67.28725332
|
||||
86,82.870594
|
||||
90,89.899991
|
||||
38,36.94173178
|
||||
21,19.87562242
|
||||
95,90.71481654
|
||||
56,61.09367762
|
||||
60,60.11134958
|
||||
65,64.83296316
|
||||
78,81.40381769
|
||||
89,92.40217686
|
||||
6,2.576625376
|
||||
67,63.80768172
|
||||
36,38.67780759
|
||||
16,16.82839701
|
||||
100,99.78687252
|
||||
45,44.68913433
|
||||
73,71.00377824
|
||||
57,51.57326718
|
||||
20,19.87846479
|
||||
76,79.50341495
|
||||
34,34.58876491
|
||||
55,55.7383467
|
||||
72,68.19721905
|
||||
55,55.81628509
|
||||
8,9.391416798
|
||||
56,56.01448111
|
||||
72,77.9969477
|
||||
58,55.37049953
|
||||
6,11.89457829
|
||||
96,94.79081712
|
||||
23,25.69041546
|
||||
58,53.52042319
|
||||
23,18.31396758
|
||||
19,21.42637785
|
||||
25,30.41303282
|
||||
64,67.68142149
|
||||
21,17.0854783
|
||||
59,60.91792707
|
||||
19,14.99514319
|
||||
16,16.74923937
|
||||
42,41.46923883
|
||||
43,42.84526108
|
||||
61,59.12912974
|
||||
92,91.30863673
|
||||
11,8.673336357
|
||||
41,39.31485292
|
||||
1,5.313686205
|
||||
8,5.405220518
|
||||
71,68.5458879
|
||||
46,47.33487629
|
||||
55,54.09063686
|
||||
62,63.29717058
|
||||
47,52.45946688
|
||||
|
892
data/titanic-train.csv
Normal file
892
data/titanic-train.csv
Normal file
@@ -0,0 +1,892 @@
|
||||
PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked
|
||||
1,0,3,"Braund, Mr. Owen Harris",male,22,1,0,A/5 21171,7.25,,S
|
||||
2,1,1,"Cumings, Mrs. John Bradley (Florence Briggs Thayer)",female,38,1,0,PC 17599,71.2833,C85,C
|
||||
3,1,3,"Heikkinen, Miss. Laina",female,26,0,0,STON/O2. 3101282,7.925,,S
|
||||
4,1,1,"Futrelle, Mrs. Jacques Heath (Lily May Peel)",female,35,1,0,113803,53.1,C123,S
|
||||
5,0,3,"Allen, Mr. William Henry",male,35,0,0,373450,8.05,,S
|
||||
6,0,3,"Moran, Mr. James",male,,0,0,330877,8.4583,,Q
|
||||
7,0,1,"McCarthy, Mr. Timothy J",male,54,0,0,17463,51.8625,E46,S
|
||||
8,0,3,"Palsson, Master. Gosta Leonard",male,2,3,1,349909,21.075,,S
|
||||
9,1,3,"Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg)",female,27,0,2,347742,11.1333,,S
|
||||
10,1,2,"Nasser, Mrs. Nicholas (Adele Achem)",female,14,1,0,237736,30.0708,,C
|
||||
11,1,3,"Sandstrom, Miss. Marguerite Rut",female,4,1,1,PP 9549,16.7,G6,S
|
||||
12,1,1,"Bonnell, Miss. Elizabeth",female,58,0,0,113783,26.55,C103,S
|
||||
13,0,3,"Saundercock, Mr. William Henry",male,20,0,0,A/5. 2151,8.05,,S
|
||||
14,0,3,"Andersson, Mr. Anders Johan",male,39,1,5,347082,31.275,,S
|
||||
15,0,3,"Vestrom, Miss. Hulda Amanda Adolfina",female,14,0,0,350406,7.8542,,S
|
||||
16,1,2,"Hewlett, Mrs. (Mary D Kingcome) ",female,55,0,0,248706,16,,S
|
||||
17,0,3,"Rice, Master. Eugene",male,2,4,1,382652,29.125,,Q
|
||||
18,1,2,"Williams, Mr. Charles Eugene",male,,0,0,244373,13,,S
|
||||
19,0,3,"Vander Planke, Mrs. Julius (Emelia Maria Vandemoortele)",female,31,1,0,345763,18,,S
|
||||
20,1,3,"Masselmani, Mrs. Fatima",female,,0,0,2649,7.225,,C
|
||||
21,0,2,"Fynney, Mr. Joseph J",male,35,0,0,239865,26,,S
|
||||
22,1,2,"Beesley, Mr. Lawrence",male,34,0,0,248698,13,D56,S
|
||||
23,1,3,"McGowan, Miss. Anna ""Annie""",female,15,0,0,330923,8.0292,,Q
|
||||
24,1,1,"Sloper, Mr. William Thompson",male,28,0,0,113788,35.5,A6,S
|
||||
25,0,3,"Palsson, Miss. Torborg Danira",female,8,3,1,349909,21.075,,S
|
||||
26,1,3,"Asplund, Mrs. Carl Oscar (Selma Augusta Emilia Johansson)",female,38,1,5,347077,31.3875,,S
|
||||
27,0,3,"Emir, Mr. Farred Chehab",male,,0,0,2631,7.225,,C
|
||||
28,0,1,"Fortune, Mr. Charles Alexander",male,19,3,2,19950,263,C23 C25 C27,S
|
||||
29,1,3,"O'Dwyer, Miss. Ellen ""Nellie""",female,,0,0,330959,7.8792,,Q
|
||||
30,0,3,"Todoroff, Mr. Lalio",male,,0,0,349216,7.8958,,S
|
||||
31,0,1,"Uruchurtu, Don. Manuel E",male,40,0,0,PC 17601,27.7208,,C
|
||||
32,1,1,"Spencer, Mrs. William Augustus (Marie Eugenie)",female,,1,0,PC 17569,146.5208,B78,C
|
||||
33,1,3,"Glynn, Miss. Mary Agatha",female,,0,0,335677,7.75,,Q
|
||||
34,0,2,"Wheadon, Mr. Edward H",male,66,0,0,C.A. 24579,10.5,,S
|
||||
35,0,1,"Meyer, Mr. Edgar Joseph",male,28,1,0,PC 17604,82.1708,,C
|
||||
36,0,1,"Holverson, Mr. Alexander Oskar",male,42,1,0,113789,52,,S
|
||||
37,1,3,"Mamee, Mr. Hanna",male,,0,0,2677,7.2292,,C
|
||||
38,0,3,"Cann, Mr. Ernest Charles",male,21,0,0,A./5. 2152,8.05,,S
|
||||
39,0,3,"Vander Planke, Miss. Augusta Maria",female,18,2,0,345764,18,,S
|
||||
40,1,3,"Nicola-Yarred, Miss. Jamila",female,14,1,0,2651,11.2417,,C
|
||||
41,0,3,"Ahlin, Mrs. Johan (Johanna Persdotter Larsson)",female,40,1,0,7546,9.475,,S
|
||||
42,0,2,"Turpin, Mrs. William John Robert (Dorothy Ann Wonnacott)",female,27,1,0,11668,21,,S
|
||||
43,0,3,"Kraeff, Mr. Theodor",male,,0,0,349253,7.8958,,C
|
||||
44,1,2,"Laroche, Miss. Simonne Marie Anne Andree",female,3,1,2,SC/Paris 2123,41.5792,,C
|
||||
45,1,3,"Devaney, Miss. Margaret Delia",female,19,0,0,330958,7.8792,,Q
|
||||
46,0,3,"Rogers, Mr. William John",male,,0,0,S.C./A.4. 23567,8.05,,S
|
||||
47,0,3,"Lennon, Mr. Denis",male,,1,0,370371,15.5,,Q
|
||||
48,1,3,"O'Driscoll, Miss. Bridget",female,,0,0,14311,7.75,,Q
|
||||
49,0,3,"Samaan, Mr. Youssef",male,,2,0,2662,21.6792,,C
|
||||
50,0,3,"Arnold-Franchi, Mrs. Josef (Josefine Franchi)",female,18,1,0,349237,17.8,,S
|
||||
51,0,3,"Panula, Master. Juha Niilo",male,7,4,1,3101295,39.6875,,S
|
||||
52,0,3,"Nosworthy, Mr. Richard Cater",male,21,0,0,A/4. 39886,7.8,,S
|
||||
53,1,1,"Harper, Mrs. Henry Sleeper (Myna Haxtun)",female,49,1,0,PC 17572,76.7292,D33,C
|
||||
54,1,2,"Faunthorpe, Mrs. Lizzie (Elizabeth Anne Wilkinson)",female,29,1,0,2926,26,,S
|
||||
55,0,1,"Ostby, Mr. Engelhart Cornelius",male,65,0,1,113509,61.9792,B30,C
|
||||
56,1,1,"Woolner, Mr. Hugh",male,,0,0,19947,35.5,C52,S
|
||||
57,1,2,"Rugg, Miss. Emily",female,21,0,0,C.A. 31026,10.5,,S
|
||||
58,0,3,"Novel, Mr. Mansouer",male,28.5,0,0,2697,7.2292,,C
|
||||
59,1,2,"West, Miss. Constance Mirium",female,5,1,2,C.A. 34651,27.75,,S
|
||||
60,0,3,"Goodwin, Master. William Frederick",male,11,5,2,CA 2144,46.9,,S
|
||||
61,0,3,"Sirayanian, Mr. Orsen",male,22,0,0,2669,7.2292,,C
|
||||
62,1,1,"Icard, Miss. Amelie",female,38,0,0,113572,80,B28,
|
||||
63,0,1,"Harris, Mr. Henry Birkhardt",male,45,1,0,36973,83.475,C83,S
|
||||
64,0,3,"Skoog, Master. Harald",male,4,3,2,347088,27.9,,S
|
||||
65,0,1,"Stewart, Mr. Albert A",male,,0,0,PC 17605,27.7208,,C
|
||||
66,1,3,"Moubarek, Master. Gerios",male,,1,1,2661,15.2458,,C
|
||||
67,1,2,"Nye, Mrs. (Elizabeth Ramell)",female,29,0,0,C.A. 29395,10.5,F33,S
|
||||
68,0,3,"Crease, Mr. Ernest James",male,19,0,0,S.P. 3464,8.1583,,S
|
||||
69,1,3,"Andersson, Miss. Erna Alexandra",female,17,4,2,3101281,7.925,,S
|
||||
70,0,3,"Kink, Mr. Vincenz",male,26,2,0,315151,8.6625,,S
|
||||
71,0,2,"Jenkin, Mr. Stephen Curnow",male,32,0,0,C.A. 33111,10.5,,S
|
||||
72,0,3,"Goodwin, Miss. Lillian Amy",female,16,5,2,CA 2144,46.9,,S
|
||||
73,0,2,"Hood, Mr. Ambrose Jr",male,21,0,0,S.O.C. 14879,73.5,,S
|
||||
74,0,3,"Chronopoulos, Mr. Apostolos",male,26,1,0,2680,14.4542,,C
|
||||
75,1,3,"Bing, Mr. Lee",male,32,0,0,1601,56.4958,,S
|
||||
76,0,3,"Moen, Mr. Sigurd Hansen",male,25,0,0,348123,7.65,F G73,S
|
||||
77,0,3,"Staneff, Mr. Ivan",male,,0,0,349208,7.8958,,S
|
||||
78,0,3,"Moutal, Mr. Rahamin Haim",male,,0,0,374746,8.05,,S
|
||||
79,1,2,"Caldwell, Master. Alden Gates",male,0.83,0,2,248738,29,,S
|
||||
80,1,3,"Dowdell, Miss. Elizabeth",female,30,0,0,364516,12.475,,S
|
||||
81,0,3,"Waelens, Mr. Achille",male,22,0,0,345767,9,,S
|
||||
82,1,3,"Sheerlinck, Mr. Jan Baptist",male,29,0,0,345779,9.5,,S
|
||||
83,1,3,"McDermott, Miss. Brigdet Delia",female,,0,0,330932,7.7875,,Q
|
||||
84,0,1,"Carrau, Mr. Francisco M",male,28,0,0,113059,47.1,,S
|
||||
85,1,2,"Ilett, Miss. Bertha",female,17,0,0,SO/C 14885,10.5,,S
|
||||
86,1,3,"Backstrom, Mrs. Karl Alfred (Maria Mathilda Gustafsson)",female,33,3,0,3101278,15.85,,S
|
||||
87,0,3,"Ford, Mr. William Neal",male,16,1,3,W./C. 6608,34.375,,S
|
||||
88,0,3,"Slocovski, Mr. Selman Francis",male,,0,0,SOTON/OQ 392086,8.05,,S
|
||||
89,1,1,"Fortune, Miss. Mabel Helen",female,23,3,2,19950,263,C23 C25 C27,S
|
||||
90,0,3,"Celotti, Mr. Francesco",male,24,0,0,343275,8.05,,S
|
||||
91,0,3,"Christmann, Mr. Emil",male,29,0,0,343276,8.05,,S
|
||||
92,0,3,"Andreasson, Mr. Paul Edvin",male,20,0,0,347466,7.8542,,S
|
||||
93,0,1,"Chaffee, Mr. Herbert Fuller",male,46,1,0,W.E.P. 5734,61.175,E31,S
|
||||
94,0,3,"Dean, Mr. Bertram Frank",male,26,1,2,C.A. 2315,20.575,,S
|
||||
95,0,3,"Coxon, Mr. Daniel",male,59,0,0,364500,7.25,,S
|
||||
96,0,3,"Shorney, Mr. Charles Joseph",male,,0,0,374910,8.05,,S
|
||||
97,0,1,"Goldschmidt, Mr. George B",male,71,0,0,PC 17754,34.6542,A5,C
|
||||
98,1,1,"Greenfield, Mr. William Bertram",male,23,0,1,PC 17759,63.3583,D10 D12,C
|
||||
99,1,2,"Doling, Mrs. John T (Ada Julia Bone)",female,34,0,1,231919,23,,S
|
||||
100,0,2,"Kantor, Mr. Sinai",male,34,1,0,244367,26,,S
|
||||
101,0,3,"Petranec, Miss. Matilda",female,28,0,0,349245,7.8958,,S
|
||||
102,0,3,"Petroff, Mr. Pastcho (""Pentcho"")",male,,0,0,349215,7.8958,,S
|
||||
103,0,1,"White, Mr. Richard Frasar",male,21,0,1,35281,77.2875,D26,S
|
||||
104,0,3,"Johansson, Mr. Gustaf Joel",male,33,0,0,7540,8.6542,,S
|
||||
105,0,3,"Gustafsson, Mr. Anders Vilhelm",male,37,2,0,3101276,7.925,,S
|
||||
106,0,3,"Mionoff, Mr. Stoytcho",male,28,0,0,349207,7.8958,,S
|
||||
107,1,3,"Salkjelsvik, Miss. Anna Kristine",female,21,0,0,343120,7.65,,S
|
||||
108,1,3,"Moss, Mr. Albert Johan",male,,0,0,312991,7.775,,S
|
||||
109,0,3,"Rekic, Mr. Tido",male,38,0,0,349249,7.8958,,S
|
||||
110,1,3,"Moran, Miss. Bertha",female,,1,0,371110,24.15,,Q
|
||||
111,0,1,"Porter, Mr. Walter Chamberlain",male,47,0,0,110465,52,C110,S
|
||||
112,0,3,"Zabour, Miss. Hileni",female,14.5,1,0,2665,14.4542,,C
|
||||
113,0,3,"Barton, Mr. David John",male,22,0,0,324669,8.05,,S
|
||||
114,0,3,"Jussila, Miss. Katriina",female,20,1,0,4136,9.825,,S
|
||||
115,0,3,"Attalah, Miss. Malake",female,17,0,0,2627,14.4583,,C
|
||||
116,0,3,"Pekoniemi, Mr. Edvard",male,21,0,0,STON/O 2. 3101294,7.925,,S
|
||||
117,0,3,"Connors, Mr. Patrick",male,70.5,0,0,370369,7.75,,Q
|
||||
118,0,2,"Turpin, Mr. William John Robert",male,29,1,0,11668,21,,S
|
||||
119,0,1,"Baxter, Mr. Quigg Edmond",male,24,0,1,PC 17558,247.5208,B58 B60,C
|
||||
120,0,3,"Andersson, Miss. Ellis Anna Maria",female,2,4,2,347082,31.275,,S
|
||||
121,0,2,"Hickman, Mr. Stanley George",male,21,2,0,S.O.C. 14879,73.5,,S
|
||||
122,0,3,"Moore, Mr. Leonard Charles",male,,0,0,A4. 54510,8.05,,S
|
||||
123,0,2,"Nasser, Mr. Nicholas",male,32.5,1,0,237736,30.0708,,C
|
||||
124,1,2,"Webber, Miss. Susan",female,32.5,0,0,27267,13,E101,S
|
||||
125,0,1,"White, Mr. Percival Wayland",male,54,0,1,35281,77.2875,D26,S
|
||||
126,1,3,"Nicola-Yarred, Master. Elias",male,12,1,0,2651,11.2417,,C
|
||||
127,0,3,"McMahon, Mr. Martin",male,,0,0,370372,7.75,,Q
|
||||
128,1,3,"Madsen, Mr. Fridtjof Arne",male,24,0,0,C 17369,7.1417,,S
|
||||
129,1,3,"Peter, Miss. Anna",female,,1,1,2668,22.3583,F E69,C
|
||||
130,0,3,"Ekstrom, Mr. Johan",male,45,0,0,347061,6.975,,S
|
||||
131,0,3,"Drazenoic, Mr. Jozef",male,33,0,0,349241,7.8958,,C
|
||||
132,0,3,"Coelho, Mr. Domingos Fernandeo",male,20,0,0,SOTON/O.Q. 3101307,7.05,,S
|
||||
133,0,3,"Robins, Mrs. Alexander A (Grace Charity Laury)",female,47,1,0,A/5. 3337,14.5,,S
|
||||
134,1,2,"Weisz, Mrs. Leopold (Mathilde Francoise Pede)",female,29,1,0,228414,26,,S
|
||||
135,0,2,"Sobey, Mr. Samuel James Hayden",male,25,0,0,C.A. 29178,13,,S
|
||||
136,0,2,"Richard, Mr. Emile",male,23,0,0,SC/PARIS 2133,15.0458,,C
|
||||
137,1,1,"Newsom, Miss. Helen Monypeny",female,19,0,2,11752,26.2833,D47,S
|
||||
138,0,1,"Futrelle, Mr. Jacques Heath",male,37,1,0,113803,53.1,C123,S
|
||||
139,0,3,"Osen, Mr. Olaf Elon",male,16,0,0,7534,9.2167,,S
|
||||
140,0,1,"Giglio, Mr. Victor",male,24,0,0,PC 17593,79.2,B86,C
|
||||
141,0,3,"Boulos, Mrs. Joseph (Sultana)",female,,0,2,2678,15.2458,,C
|
||||
142,1,3,"Nysten, Miss. Anna Sofia",female,22,0,0,347081,7.75,,S
|
||||
143,1,3,"Hakkarainen, Mrs. Pekka Pietari (Elin Matilda Dolck)",female,24,1,0,STON/O2. 3101279,15.85,,S
|
||||
144,0,3,"Burke, Mr. Jeremiah",male,19,0,0,365222,6.75,,Q
|
||||
145,0,2,"Andrew, Mr. Edgardo Samuel",male,18,0,0,231945,11.5,,S
|
||||
146,0,2,"Nicholls, Mr. Joseph Charles",male,19,1,1,C.A. 33112,36.75,,S
|
||||
147,1,3,"Andersson, Mr. August Edvard (""Wennerstrom"")",male,27,0,0,350043,7.7958,,S
|
||||
148,0,3,"Ford, Miss. Robina Maggie ""Ruby""",female,9,2,2,W./C. 6608,34.375,,S
|
||||
149,0,2,"Navratil, Mr. Michel (""Louis M Hoffman"")",male,36.5,0,2,230080,26,F2,S
|
||||
150,0,2,"Byles, Rev. Thomas Roussel Davids",male,42,0,0,244310,13,,S
|
||||
151,0,2,"Bateman, Rev. Robert James",male,51,0,0,S.O.P. 1166,12.525,,S
|
||||
152,1,1,"Pears, Mrs. Thomas (Edith Wearne)",female,22,1,0,113776,66.6,C2,S
|
||||
153,0,3,"Meo, Mr. Alfonzo",male,55.5,0,0,A.5. 11206,8.05,,S
|
||||
154,0,3,"van Billiard, Mr. Austin Blyler",male,40.5,0,2,A/5. 851,14.5,,S
|
||||
155,0,3,"Olsen, Mr. Ole Martin",male,,0,0,Fa 265302,7.3125,,S
|
||||
156,0,1,"Williams, Mr. Charles Duane",male,51,0,1,PC 17597,61.3792,,C
|
||||
157,1,3,"Gilnagh, Miss. Katherine ""Katie""",female,16,0,0,35851,7.7333,,Q
|
||||
158,0,3,"Corn, Mr. Harry",male,30,0,0,SOTON/OQ 392090,8.05,,S
|
||||
159,0,3,"Smiljanic, Mr. Mile",male,,0,0,315037,8.6625,,S
|
||||
160,0,3,"Sage, Master. Thomas Henry",male,,8,2,CA. 2343,69.55,,S
|
||||
161,0,3,"Cribb, Mr. John Hatfield",male,44,0,1,371362,16.1,,S
|
||||
162,1,2,"Watt, Mrs. James (Elizabeth ""Bessie"" Inglis Milne)",female,40,0,0,C.A. 33595,15.75,,S
|
||||
163,0,3,"Bengtsson, Mr. John Viktor",male,26,0,0,347068,7.775,,S
|
||||
164,0,3,"Calic, Mr. Jovo",male,17,0,0,315093,8.6625,,S
|
||||
165,0,3,"Panula, Master. Eino Viljami",male,1,4,1,3101295,39.6875,,S
|
||||
166,1,3,"Goldsmith, Master. Frank John William ""Frankie""",male,9,0,2,363291,20.525,,S
|
||||
167,1,1,"Chibnall, Mrs. (Edith Martha Bowerman)",female,,0,1,113505,55,E33,S
|
||||
168,0,3,"Skoog, Mrs. William (Anna Bernhardina Karlsson)",female,45,1,4,347088,27.9,,S
|
||||
169,0,1,"Baumann, Mr. John D",male,,0,0,PC 17318,25.925,,S
|
||||
170,0,3,"Ling, Mr. Lee",male,28,0,0,1601,56.4958,,S
|
||||
171,0,1,"Van der hoef, Mr. Wyckoff",male,61,0,0,111240,33.5,B19,S
|
||||
172,0,3,"Rice, Master. Arthur",male,4,4,1,382652,29.125,,Q
|
||||
173,1,3,"Johnson, Miss. Eleanor Ileen",female,1,1,1,347742,11.1333,,S
|
||||
174,0,3,"Sivola, Mr. Antti Wilhelm",male,21,0,0,STON/O 2. 3101280,7.925,,S
|
||||
175,0,1,"Smith, Mr. James Clinch",male,56,0,0,17764,30.6958,A7,C
|
||||
176,0,3,"Klasen, Mr. Klas Albin",male,18,1,1,350404,7.8542,,S
|
||||
177,0,3,"Lefebre, Master. Henry Forbes",male,,3,1,4133,25.4667,,S
|
||||
178,0,1,"Isham, Miss. Ann Elizabeth",female,50,0,0,PC 17595,28.7125,C49,C
|
||||
179,0,2,"Hale, Mr. Reginald",male,30,0,0,250653,13,,S
|
||||
180,0,3,"Leonard, Mr. Lionel",male,36,0,0,LINE,0,,S
|
||||
181,0,3,"Sage, Miss. Constance Gladys",female,,8,2,CA. 2343,69.55,,S
|
||||
182,0,2,"Pernot, Mr. Rene",male,,0,0,SC/PARIS 2131,15.05,,C
|
||||
183,0,3,"Asplund, Master. Clarence Gustaf Hugo",male,9,4,2,347077,31.3875,,S
|
||||
184,1,2,"Becker, Master. Richard F",male,1,2,1,230136,39,F4,S
|
||||
185,1,3,"Kink-Heilmann, Miss. Luise Gretchen",female,4,0,2,315153,22.025,,S
|
||||
186,0,1,"Rood, Mr. Hugh Roscoe",male,,0,0,113767,50,A32,S
|
||||
187,1,3,"O'Brien, Mrs. Thomas (Johanna ""Hannah"" Godfrey)",female,,1,0,370365,15.5,,Q
|
||||
188,1,1,"Romaine, Mr. Charles Hallace (""Mr C Rolmane"")",male,45,0,0,111428,26.55,,S
|
||||
189,0,3,"Bourke, Mr. John",male,40,1,1,364849,15.5,,Q
|
||||
190,0,3,"Turcin, Mr. Stjepan",male,36,0,0,349247,7.8958,,S
|
||||
191,1,2,"Pinsky, Mrs. (Rosa)",female,32,0,0,234604,13,,S
|
||||
192,0,2,"Carbines, Mr. William",male,19,0,0,28424,13,,S
|
||||
193,1,3,"Andersen-Jensen, Miss. Carla Christine Nielsine",female,19,1,0,350046,7.8542,,S
|
||||
194,1,2,"Navratil, Master. Michel M",male,3,1,1,230080,26,F2,S
|
||||
195,1,1,"Brown, Mrs. James Joseph (Margaret Tobin)",female,44,0,0,PC 17610,27.7208,B4,C
|
||||
196,1,1,"Lurette, Miss. Elise",female,58,0,0,PC 17569,146.5208,B80,C
|
||||
197,0,3,"Mernagh, Mr. Robert",male,,0,0,368703,7.75,,Q
|
||||
198,0,3,"Olsen, Mr. Karl Siegwart Andreas",male,42,0,1,4579,8.4042,,S
|
||||
199,1,3,"Madigan, Miss. Margaret ""Maggie""",female,,0,0,370370,7.75,,Q
|
||||
200,0,2,"Yrois, Miss. Henriette (""Mrs Harbeck"")",female,24,0,0,248747,13,,S
|
||||
201,0,3,"Vande Walle, Mr. Nestor Cyriel",male,28,0,0,345770,9.5,,S
|
||||
202,0,3,"Sage, Mr. Frederick",male,,8,2,CA. 2343,69.55,,S
|
||||
203,0,3,"Johanson, Mr. Jakob Alfred",male,34,0,0,3101264,6.4958,,S
|
||||
204,0,3,"Youseff, Mr. Gerious",male,45.5,0,0,2628,7.225,,C
|
||||
205,1,3,"Cohen, Mr. Gurshon ""Gus""",male,18,0,0,A/5 3540,8.05,,S
|
||||
206,0,3,"Strom, Miss. Telma Matilda",female,2,0,1,347054,10.4625,G6,S
|
||||
207,0,3,"Backstrom, Mr. Karl Alfred",male,32,1,0,3101278,15.85,,S
|
||||
208,1,3,"Albimona, Mr. Nassef Cassem",male,26,0,0,2699,18.7875,,C
|
||||
209,1,3,"Carr, Miss. Helen ""Ellen""",female,16,0,0,367231,7.75,,Q
|
||||
210,1,1,"Blank, Mr. Henry",male,40,0,0,112277,31,A31,C
|
||||
211,0,3,"Ali, Mr. Ahmed",male,24,0,0,SOTON/O.Q. 3101311,7.05,,S
|
||||
212,1,2,"Cameron, Miss. Clear Annie",female,35,0,0,F.C.C. 13528,21,,S
|
||||
213,0,3,"Perkin, Mr. John Henry",male,22,0,0,A/5 21174,7.25,,S
|
||||
214,0,2,"Givard, Mr. Hans Kristensen",male,30,0,0,250646,13,,S
|
||||
215,0,3,"Kiernan, Mr. Philip",male,,1,0,367229,7.75,,Q
|
||||
216,1,1,"Newell, Miss. Madeleine",female,31,1,0,35273,113.275,D36,C
|
||||
217,1,3,"Honkanen, Miss. Eliina",female,27,0,0,STON/O2. 3101283,7.925,,S
|
||||
218,0,2,"Jacobsohn, Mr. Sidney Samuel",male,42,1,0,243847,27,,S
|
||||
219,1,1,"Bazzani, Miss. Albina",female,32,0,0,11813,76.2917,D15,C
|
||||
220,0,2,"Harris, Mr. Walter",male,30,0,0,W/C 14208,10.5,,S
|
||||
221,1,3,"Sunderland, Mr. Victor Francis",male,16,0,0,SOTON/OQ 392089,8.05,,S
|
||||
222,0,2,"Bracken, Mr. James H",male,27,0,0,220367,13,,S
|
||||
223,0,3,"Green, Mr. George Henry",male,51,0,0,21440,8.05,,S
|
||||
224,0,3,"Nenkoff, Mr. Christo",male,,0,0,349234,7.8958,,S
|
||||
225,1,1,"Hoyt, Mr. Frederick Maxfield",male,38,1,0,19943,90,C93,S
|
||||
226,0,3,"Berglund, Mr. Karl Ivar Sven",male,22,0,0,PP 4348,9.35,,S
|
||||
227,1,2,"Mellors, Mr. William John",male,19,0,0,SW/PP 751,10.5,,S
|
||||
228,0,3,"Lovell, Mr. John Hall (""Henry"")",male,20.5,0,0,A/5 21173,7.25,,S
|
||||
229,0,2,"Fahlstrom, Mr. Arne Jonas",male,18,0,0,236171,13,,S
|
||||
230,0,3,"Lefebre, Miss. Mathilde",female,,3,1,4133,25.4667,,S
|
||||
231,1,1,"Harris, Mrs. Henry Birkhardt (Irene Wallach)",female,35,1,0,36973,83.475,C83,S
|
||||
232,0,3,"Larsson, Mr. Bengt Edvin",male,29,0,0,347067,7.775,,S
|
||||
233,0,2,"Sjostedt, Mr. Ernst Adolf",male,59,0,0,237442,13.5,,S
|
||||
234,1,3,"Asplund, Miss. Lillian Gertrud",female,5,4,2,347077,31.3875,,S
|
||||
235,0,2,"Leyson, Mr. Robert William Norman",male,24,0,0,C.A. 29566,10.5,,S
|
||||
236,0,3,"Harknett, Miss. Alice Phoebe",female,,0,0,W./C. 6609,7.55,,S
|
||||
237,0,2,"Hold, Mr. Stephen",male,44,1,0,26707,26,,S
|
||||
238,1,2,"Collyer, Miss. Marjorie ""Lottie""",female,8,0,2,C.A. 31921,26.25,,S
|
||||
239,0,2,"Pengelly, Mr. Frederick William",male,19,0,0,28665,10.5,,S
|
||||
240,0,2,"Hunt, Mr. George Henry",male,33,0,0,SCO/W 1585,12.275,,S
|
||||
241,0,3,"Zabour, Miss. Thamine",female,,1,0,2665,14.4542,,C
|
||||
242,1,3,"Murphy, Miss. Katherine ""Kate""",female,,1,0,367230,15.5,,Q
|
||||
243,0,2,"Coleridge, Mr. Reginald Charles",male,29,0,0,W./C. 14263,10.5,,S
|
||||
244,0,3,"Maenpaa, Mr. Matti Alexanteri",male,22,0,0,STON/O 2. 3101275,7.125,,S
|
||||
245,0,3,"Attalah, Mr. Sleiman",male,30,0,0,2694,7.225,,C
|
||||
246,0,1,"Minahan, Dr. William Edward",male,44,2,0,19928,90,C78,Q
|
||||
247,0,3,"Lindahl, Miss. Agda Thorilda Viktoria",female,25,0,0,347071,7.775,,S
|
||||
248,1,2,"Hamalainen, Mrs. William (Anna)",female,24,0,2,250649,14.5,,S
|
||||
249,1,1,"Beckwith, Mr. Richard Leonard",male,37,1,1,11751,52.5542,D35,S
|
||||
250,0,2,"Carter, Rev. Ernest Courtenay",male,54,1,0,244252,26,,S
|
||||
251,0,3,"Reed, Mr. James George",male,,0,0,362316,7.25,,S
|
||||
252,0,3,"Strom, Mrs. Wilhelm (Elna Matilda Persson)",female,29,1,1,347054,10.4625,G6,S
|
||||
253,0,1,"Stead, Mr. William Thomas",male,62,0,0,113514,26.55,C87,S
|
||||
254,0,3,"Lobb, Mr. William Arthur",male,30,1,0,A/5. 3336,16.1,,S
|
||||
255,0,3,"Rosblom, Mrs. Viktor (Helena Wilhelmina)",female,41,0,2,370129,20.2125,,S
|
||||
256,1,3,"Touma, Mrs. Darwis (Hanne Youssef Razi)",female,29,0,2,2650,15.2458,,C
|
||||
257,1,1,"Thorne, Mrs. Gertrude Maybelle",female,,0,0,PC 17585,79.2,,C
|
||||
258,1,1,"Cherry, Miss. Gladys",female,30,0,0,110152,86.5,B77,S
|
||||
259,1,1,"Ward, Miss. Anna",female,35,0,0,PC 17755,512.3292,,C
|
||||
260,1,2,"Parrish, Mrs. (Lutie Davis)",female,50,0,1,230433,26,,S
|
||||
261,0,3,"Smith, Mr. Thomas",male,,0,0,384461,7.75,,Q
|
||||
262,1,3,"Asplund, Master. Edvin Rojj Felix",male,3,4,2,347077,31.3875,,S
|
||||
263,0,1,"Taussig, Mr. Emil",male,52,1,1,110413,79.65,E67,S
|
||||
264,0,1,"Harrison, Mr. William",male,40,0,0,112059,0,B94,S
|
||||
265,0,3,"Henry, Miss. Delia",female,,0,0,382649,7.75,,Q
|
||||
266,0,2,"Reeves, Mr. David",male,36,0,0,C.A. 17248,10.5,,S
|
||||
267,0,3,"Panula, Mr. Ernesti Arvid",male,16,4,1,3101295,39.6875,,S
|
||||
268,1,3,"Persson, Mr. Ernst Ulrik",male,25,1,0,347083,7.775,,S
|
||||
269,1,1,"Graham, Mrs. William Thompson (Edith Junkins)",female,58,0,1,PC 17582,153.4625,C125,S
|
||||
270,1,1,"Bissette, Miss. Amelia",female,35,0,0,PC 17760,135.6333,C99,S
|
||||
271,0,1,"Cairns, Mr. Alexander",male,,0,0,113798,31,,S
|
||||
272,1,3,"Tornquist, Mr. William Henry",male,25,0,0,LINE,0,,S
|
||||
273,1,2,"Mellinger, Mrs. (Elizabeth Anne Maidment)",female,41,0,1,250644,19.5,,S
|
||||
274,0,1,"Natsch, Mr. Charles H",male,37,0,1,PC 17596,29.7,C118,C
|
||||
275,1,3,"Healy, Miss. Hanora ""Nora""",female,,0,0,370375,7.75,,Q
|
||||
276,1,1,"Andrews, Miss. Kornelia Theodosia",female,63,1,0,13502,77.9583,D7,S
|
||||
277,0,3,"Lindblom, Miss. Augusta Charlotta",female,45,0,0,347073,7.75,,S
|
||||
278,0,2,"Parkes, Mr. Francis ""Frank""",male,,0,0,239853,0,,S
|
||||
279,0,3,"Rice, Master. Eric",male,7,4,1,382652,29.125,,Q
|
||||
280,1,3,"Abbott, Mrs. Stanton (Rosa Hunt)",female,35,1,1,C.A. 2673,20.25,,S
|
||||
281,0,3,"Duane, Mr. Frank",male,65,0,0,336439,7.75,,Q
|
||||
282,0,3,"Olsson, Mr. Nils Johan Goransson",male,28,0,0,347464,7.8542,,S
|
||||
283,0,3,"de Pelsmaeker, Mr. Alfons",male,16,0,0,345778,9.5,,S
|
||||
284,1,3,"Dorking, Mr. Edward Arthur",male,19,0,0,A/5. 10482,8.05,,S
|
||||
285,0,1,"Smith, Mr. Richard William",male,,0,0,113056,26,A19,S
|
||||
286,0,3,"Stankovic, Mr. Ivan",male,33,0,0,349239,8.6625,,C
|
||||
287,1,3,"de Mulder, Mr. Theodore",male,30,0,0,345774,9.5,,S
|
||||
288,0,3,"Naidenoff, Mr. Penko",male,22,0,0,349206,7.8958,,S
|
||||
289,1,2,"Hosono, Mr. Masabumi",male,42,0,0,237798,13,,S
|
||||
290,1,3,"Connolly, Miss. Kate",female,22,0,0,370373,7.75,,Q
|
||||
291,1,1,"Barber, Miss. Ellen ""Nellie""",female,26,0,0,19877,78.85,,S
|
||||
292,1,1,"Bishop, Mrs. Dickinson H (Helen Walton)",female,19,1,0,11967,91.0792,B49,C
|
||||
293,0,2,"Levy, Mr. Rene Jacques",male,36,0,0,SC/Paris 2163,12.875,D,C
|
||||
294,0,3,"Haas, Miss. Aloisia",female,24,0,0,349236,8.85,,S
|
||||
295,0,3,"Mineff, Mr. Ivan",male,24,0,0,349233,7.8958,,S
|
||||
296,0,1,"Lewy, Mr. Ervin G",male,,0,0,PC 17612,27.7208,,C
|
||||
297,0,3,"Hanna, Mr. Mansour",male,23.5,0,0,2693,7.2292,,C
|
||||
298,0,1,"Allison, Miss. Helen Loraine",female,2,1,2,113781,151.55,C22 C26,S
|
||||
299,1,1,"Saalfeld, Mr. Adolphe",male,,0,0,19988,30.5,C106,S
|
||||
300,1,1,"Baxter, Mrs. James (Helene DeLaudeniere Chaput)",female,50,0,1,PC 17558,247.5208,B58 B60,C
|
||||
301,1,3,"Kelly, Miss. Anna Katherine ""Annie Kate""",female,,0,0,9234,7.75,,Q
|
||||
302,1,3,"McCoy, Mr. Bernard",male,,2,0,367226,23.25,,Q
|
||||
303,0,3,"Johnson, Mr. William Cahoone Jr",male,19,0,0,LINE,0,,S
|
||||
304,1,2,"Keane, Miss. Nora A",female,,0,0,226593,12.35,E101,Q
|
||||
305,0,3,"Williams, Mr. Howard Hugh ""Harry""",male,,0,0,A/5 2466,8.05,,S
|
||||
306,1,1,"Allison, Master. Hudson Trevor",male,0.92,1,2,113781,151.55,C22 C26,S
|
||||
307,1,1,"Fleming, Miss. Margaret",female,,0,0,17421,110.8833,,C
|
||||
308,1,1,"Penasco y Castellana, Mrs. Victor de Satode (Maria Josefa Perez de Soto y Vallejo)",female,17,1,0,PC 17758,108.9,C65,C
|
||||
309,0,2,"Abelson, Mr. Samuel",male,30,1,0,P/PP 3381,24,,C
|
||||
310,1,1,"Francatelli, Miss. Laura Mabel",female,30,0,0,PC 17485,56.9292,E36,C
|
||||
311,1,1,"Hays, Miss. Margaret Bechstein",female,24,0,0,11767,83.1583,C54,C
|
||||
312,1,1,"Ryerson, Miss. Emily Borie",female,18,2,2,PC 17608,262.375,B57 B59 B63 B66,C
|
||||
313,0,2,"Lahtinen, Mrs. William (Anna Sylfven)",female,26,1,1,250651,26,,S
|
||||
314,0,3,"Hendekovic, Mr. Ignjac",male,28,0,0,349243,7.8958,,S
|
||||
315,0,2,"Hart, Mr. Benjamin",male,43,1,1,F.C.C. 13529,26.25,,S
|
||||
316,1,3,"Nilsson, Miss. Helmina Josefina",female,26,0,0,347470,7.8542,,S
|
||||
317,1,2,"Kantor, Mrs. Sinai (Miriam Sternin)",female,24,1,0,244367,26,,S
|
||||
318,0,2,"Moraweck, Dr. Ernest",male,54,0,0,29011,14,,S
|
||||
319,1,1,"Wick, Miss. Mary Natalie",female,31,0,2,36928,164.8667,C7,S
|
||||
320,1,1,"Spedden, Mrs. Frederic Oakley (Margaretta Corning Stone)",female,40,1,1,16966,134.5,E34,C
|
||||
321,0,3,"Dennis, Mr. Samuel",male,22,0,0,A/5 21172,7.25,,S
|
||||
322,0,3,"Danoff, Mr. Yoto",male,27,0,0,349219,7.8958,,S
|
||||
323,1,2,"Slayter, Miss. Hilda Mary",female,30,0,0,234818,12.35,,Q
|
||||
324,1,2,"Caldwell, Mrs. Albert Francis (Sylvia Mae Harbaugh)",female,22,1,1,248738,29,,S
|
||||
325,0,3,"Sage, Mr. George John Jr",male,,8,2,CA. 2343,69.55,,S
|
||||
326,1,1,"Young, Miss. Marie Grice",female,36,0,0,PC 17760,135.6333,C32,C
|
||||
327,0,3,"Nysveen, Mr. Johan Hansen",male,61,0,0,345364,6.2375,,S
|
||||
328,1,2,"Ball, Mrs. (Ada E Hall)",female,36,0,0,28551,13,D,S
|
||||
329,1,3,"Goldsmith, Mrs. Frank John (Emily Alice Brown)",female,31,1,1,363291,20.525,,S
|
||||
330,1,1,"Hippach, Miss. Jean Gertrude",female,16,0,1,111361,57.9792,B18,C
|
||||
331,1,3,"McCoy, Miss. Agnes",female,,2,0,367226,23.25,,Q
|
||||
332,0,1,"Partner, Mr. Austen",male,45.5,0,0,113043,28.5,C124,S
|
||||
333,0,1,"Graham, Mr. George Edward",male,38,0,1,PC 17582,153.4625,C91,S
|
||||
334,0,3,"Vander Planke, Mr. Leo Edmondus",male,16,2,0,345764,18,,S
|
||||
335,1,1,"Frauenthal, Mrs. Henry William (Clara Heinsheimer)",female,,1,0,PC 17611,133.65,,S
|
||||
336,0,3,"Denkoff, Mr. Mitto",male,,0,0,349225,7.8958,,S
|
||||
337,0,1,"Pears, Mr. Thomas Clinton",male,29,1,0,113776,66.6,C2,S
|
||||
338,1,1,"Burns, Miss. Elizabeth Margaret",female,41,0,0,16966,134.5,E40,C
|
||||
339,1,3,"Dahl, Mr. Karl Edwart",male,45,0,0,7598,8.05,,S
|
||||
340,0,1,"Blackwell, Mr. Stephen Weart",male,45,0,0,113784,35.5,T,S
|
||||
341,1,2,"Navratil, Master. Edmond Roger",male,2,1,1,230080,26,F2,S
|
||||
342,1,1,"Fortune, Miss. Alice Elizabeth",female,24,3,2,19950,263,C23 C25 C27,S
|
||||
343,0,2,"Collander, Mr. Erik Gustaf",male,28,0,0,248740,13,,S
|
||||
344,0,2,"Sedgwick, Mr. Charles Frederick Waddington",male,25,0,0,244361,13,,S
|
||||
345,0,2,"Fox, Mr. Stanley Hubert",male,36,0,0,229236,13,,S
|
||||
346,1,2,"Brown, Miss. Amelia ""Mildred""",female,24,0,0,248733,13,F33,S
|
||||
347,1,2,"Smith, Miss. Marion Elsie",female,40,0,0,31418,13,,S
|
||||
348,1,3,"Davison, Mrs. Thomas Henry (Mary E Finck)",female,,1,0,386525,16.1,,S
|
||||
349,1,3,"Coutts, Master. William Loch ""William""",male,3,1,1,C.A. 37671,15.9,,S
|
||||
350,0,3,"Dimic, Mr. Jovan",male,42,0,0,315088,8.6625,,S
|
||||
351,0,3,"Odahl, Mr. Nils Martin",male,23,0,0,7267,9.225,,S
|
||||
352,0,1,"Williams-Lambert, Mr. Fletcher Fellows",male,,0,0,113510,35,C128,S
|
||||
353,0,3,"Elias, Mr. Tannous",male,15,1,1,2695,7.2292,,C
|
||||
354,0,3,"Arnold-Franchi, Mr. Josef",male,25,1,0,349237,17.8,,S
|
||||
355,0,3,"Yousif, Mr. Wazli",male,,0,0,2647,7.225,,C
|
||||
356,0,3,"Vanden Steen, Mr. Leo Peter",male,28,0,0,345783,9.5,,S
|
||||
357,1,1,"Bowerman, Miss. Elsie Edith",female,22,0,1,113505,55,E33,S
|
||||
358,0,2,"Funk, Miss. Annie Clemmer",female,38,0,0,237671,13,,S
|
||||
359,1,3,"McGovern, Miss. Mary",female,,0,0,330931,7.8792,,Q
|
||||
360,1,3,"Mockler, Miss. Helen Mary ""Ellie""",female,,0,0,330980,7.8792,,Q
|
||||
361,0,3,"Skoog, Mr. Wilhelm",male,40,1,4,347088,27.9,,S
|
||||
362,0,2,"del Carlo, Mr. Sebastiano",male,29,1,0,SC/PARIS 2167,27.7208,,C
|
||||
363,0,3,"Barbara, Mrs. (Catherine David)",female,45,0,1,2691,14.4542,,C
|
||||
364,0,3,"Asim, Mr. Adola",male,35,0,0,SOTON/O.Q. 3101310,7.05,,S
|
||||
365,0,3,"O'Brien, Mr. Thomas",male,,1,0,370365,15.5,,Q
|
||||
366,0,3,"Adahl, Mr. Mauritz Nils Martin",male,30,0,0,C 7076,7.25,,S
|
||||
367,1,1,"Warren, Mrs. Frank Manley (Anna Sophia Atkinson)",female,60,1,0,110813,75.25,D37,C
|
||||
368,1,3,"Moussa, Mrs. (Mantoura Boulos)",female,,0,0,2626,7.2292,,C
|
||||
369,1,3,"Jermyn, Miss. Annie",female,,0,0,14313,7.75,,Q
|
||||
370,1,1,"Aubart, Mme. Leontine Pauline",female,24,0,0,PC 17477,69.3,B35,C
|
||||
371,1,1,"Harder, Mr. George Achilles",male,25,1,0,11765,55.4417,E50,C
|
||||
372,0,3,"Wiklund, Mr. Jakob Alfred",male,18,1,0,3101267,6.4958,,S
|
||||
373,0,3,"Beavan, Mr. William Thomas",male,19,0,0,323951,8.05,,S
|
||||
374,0,1,"Ringhini, Mr. Sante",male,22,0,0,PC 17760,135.6333,,C
|
||||
375,0,3,"Palsson, Miss. Stina Viola",female,3,3,1,349909,21.075,,S
|
||||
376,1,1,"Meyer, Mrs. Edgar Joseph (Leila Saks)",female,,1,0,PC 17604,82.1708,,C
|
||||
377,1,3,"Landergren, Miss. Aurora Adelia",female,22,0,0,C 7077,7.25,,S
|
||||
378,0,1,"Widener, Mr. Harry Elkins",male,27,0,2,113503,211.5,C82,C
|
||||
379,0,3,"Betros, Mr. Tannous",male,20,0,0,2648,4.0125,,C
|
||||
380,0,3,"Gustafsson, Mr. Karl Gideon",male,19,0,0,347069,7.775,,S
|
||||
381,1,1,"Bidois, Miss. Rosalie",female,42,0,0,PC 17757,227.525,,C
|
||||
382,1,3,"Nakid, Miss. Maria (""Mary"")",female,1,0,2,2653,15.7417,,C
|
||||
383,0,3,"Tikkanen, Mr. Juho",male,32,0,0,STON/O 2. 3101293,7.925,,S
|
||||
384,1,1,"Holverson, Mrs. Alexander Oskar (Mary Aline Towner)",female,35,1,0,113789,52,,S
|
||||
385,0,3,"Plotcharsky, Mr. Vasil",male,,0,0,349227,7.8958,,S
|
||||
386,0,2,"Davies, Mr. Charles Henry",male,18,0,0,S.O.C. 14879,73.5,,S
|
||||
387,0,3,"Goodwin, Master. Sidney Leonard",male,1,5,2,CA 2144,46.9,,S
|
||||
388,1,2,"Buss, Miss. Kate",female,36,0,0,27849,13,,S
|
||||
389,0,3,"Sadlier, Mr. Matthew",male,,0,0,367655,7.7292,,Q
|
||||
390,1,2,"Lehmann, Miss. Bertha",female,17,0,0,SC 1748,12,,C
|
||||
391,1,1,"Carter, Mr. William Ernest",male,36,1,2,113760,120,B96 B98,S
|
||||
392,1,3,"Jansson, Mr. Carl Olof",male,21,0,0,350034,7.7958,,S
|
||||
393,0,3,"Gustafsson, Mr. Johan Birger",male,28,2,0,3101277,7.925,,S
|
||||
394,1,1,"Newell, Miss. Marjorie",female,23,1,0,35273,113.275,D36,C
|
||||
395,1,3,"Sandstrom, Mrs. Hjalmar (Agnes Charlotta Bengtsson)",female,24,0,2,PP 9549,16.7,G6,S
|
||||
396,0,3,"Johansson, Mr. Erik",male,22,0,0,350052,7.7958,,S
|
||||
397,0,3,"Olsson, Miss. Elina",female,31,0,0,350407,7.8542,,S
|
||||
398,0,2,"McKane, Mr. Peter David",male,46,0,0,28403,26,,S
|
||||
399,0,2,"Pain, Dr. Alfred",male,23,0,0,244278,10.5,,S
|
||||
400,1,2,"Trout, Mrs. William H (Jessie L)",female,28,0,0,240929,12.65,,S
|
||||
401,1,3,"Niskanen, Mr. Juha",male,39,0,0,STON/O 2. 3101289,7.925,,S
|
||||
402,0,3,"Adams, Mr. John",male,26,0,0,341826,8.05,,S
|
||||
403,0,3,"Jussila, Miss. Mari Aina",female,21,1,0,4137,9.825,,S
|
||||
404,0,3,"Hakkarainen, Mr. Pekka Pietari",male,28,1,0,STON/O2. 3101279,15.85,,S
|
||||
405,0,3,"Oreskovic, Miss. Marija",female,20,0,0,315096,8.6625,,S
|
||||
406,0,2,"Gale, Mr. Shadrach",male,34,1,0,28664,21,,S
|
||||
407,0,3,"Widegren, Mr. Carl/Charles Peter",male,51,0,0,347064,7.75,,S
|
||||
408,1,2,"Richards, Master. William Rowe",male,3,1,1,29106,18.75,,S
|
||||
409,0,3,"Birkeland, Mr. Hans Martin Monsen",male,21,0,0,312992,7.775,,S
|
||||
410,0,3,"Lefebre, Miss. Ida",female,,3,1,4133,25.4667,,S
|
||||
411,0,3,"Sdycoff, Mr. Todor",male,,0,0,349222,7.8958,,S
|
||||
412,0,3,"Hart, Mr. Henry",male,,0,0,394140,6.8583,,Q
|
||||
413,1,1,"Minahan, Miss. Daisy E",female,33,1,0,19928,90,C78,Q
|
||||
414,0,2,"Cunningham, Mr. Alfred Fleming",male,,0,0,239853,0,,S
|
||||
415,1,3,"Sundman, Mr. Johan Julian",male,44,0,0,STON/O 2. 3101269,7.925,,S
|
||||
416,0,3,"Meek, Mrs. Thomas (Annie Louise Rowley)",female,,0,0,343095,8.05,,S
|
||||
417,1,2,"Drew, Mrs. James Vivian (Lulu Thorne Christian)",female,34,1,1,28220,32.5,,S
|
||||
418,1,2,"Silven, Miss. Lyyli Karoliina",female,18,0,2,250652,13,,S
|
||||
419,0,2,"Matthews, Mr. William John",male,30,0,0,28228,13,,S
|
||||
420,0,3,"Van Impe, Miss. Catharina",female,10,0,2,345773,24.15,,S
|
||||
421,0,3,"Gheorgheff, Mr. Stanio",male,,0,0,349254,7.8958,,C
|
||||
422,0,3,"Charters, Mr. David",male,21,0,0,A/5. 13032,7.7333,,Q
|
||||
423,0,3,"Zimmerman, Mr. Leo",male,29,0,0,315082,7.875,,S
|
||||
424,0,3,"Danbom, Mrs. Ernst Gilbert (Anna Sigrid Maria Brogren)",female,28,1,1,347080,14.4,,S
|
||||
425,0,3,"Rosblom, Mr. Viktor Richard",male,18,1,1,370129,20.2125,,S
|
||||
426,0,3,"Wiseman, Mr. Phillippe",male,,0,0,A/4. 34244,7.25,,S
|
||||
427,1,2,"Clarke, Mrs. Charles V (Ada Maria Winfield)",female,28,1,0,2003,26,,S
|
||||
428,1,2,"Phillips, Miss. Kate Florence (""Mrs Kate Louise Phillips Marshall"")",female,19,0,0,250655,26,,S
|
||||
429,0,3,"Flynn, Mr. James",male,,0,0,364851,7.75,,Q
|
||||
430,1,3,"Pickard, Mr. Berk (Berk Trembisky)",male,32,0,0,SOTON/O.Q. 392078,8.05,E10,S
|
||||
431,1,1,"Bjornstrom-Steffansson, Mr. Mauritz Hakan",male,28,0,0,110564,26.55,C52,S
|
||||
432,1,3,"Thorneycroft, Mrs. Percival (Florence Kate White)",female,,1,0,376564,16.1,,S
|
||||
433,1,2,"Louch, Mrs. Charles Alexander (Alice Adelaide Slow)",female,42,1,0,SC/AH 3085,26,,S
|
||||
434,0,3,"Kallio, Mr. Nikolai Erland",male,17,0,0,STON/O 2. 3101274,7.125,,S
|
||||
435,0,1,"Silvey, Mr. William Baird",male,50,1,0,13507,55.9,E44,S
|
||||
436,1,1,"Carter, Miss. Lucile Polk",female,14,1,2,113760,120,B96 B98,S
|
||||
437,0,3,"Ford, Miss. Doolina Margaret ""Daisy""",female,21,2,2,W./C. 6608,34.375,,S
|
||||
438,1,2,"Richards, Mrs. Sidney (Emily Hocking)",female,24,2,3,29106,18.75,,S
|
||||
439,0,1,"Fortune, Mr. Mark",male,64,1,4,19950,263,C23 C25 C27,S
|
||||
440,0,2,"Kvillner, Mr. Johan Henrik Johannesson",male,31,0,0,C.A. 18723,10.5,,S
|
||||
441,1,2,"Hart, Mrs. Benjamin (Esther Ada Bloomfield)",female,45,1,1,F.C.C. 13529,26.25,,S
|
||||
442,0,3,"Hampe, Mr. Leon",male,20,0,0,345769,9.5,,S
|
||||
443,0,3,"Petterson, Mr. Johan Emil",male,25,1,0,347076,7.775,,S
|
||||
444,1,2,"Reynaldo, Ms. Encarnacion",female,28,0,0,230434,13,,S
|
||||
445,1,3,"Johannesen-Bratthammer, Mr. Bernt",male,,0,0,65306,8.1125,,S
|
||||
446,1,1,"Dodge, Master. Washington",male,4,0,2,33638,81.8583,A34,S
|
||||
447,1,2,"Mellinger, Miss. Madeleine Violet",female,13,0,1,250644,19.5,,S
|
||||
448,1,1,"Seward, Mr. Frederic Kimber",male,34,0,0,113794,26.55,,S
|
||||
449,1,3,"Baclini, Miss. Marie Catherine",female,5,2,1,2666,19.2583,,C
|
||||
450,1,1,"Peuchen, Major. Arthur Godfrey",male,52,0,0,113786,30.5,C104,S
|
||||
451,0,2,"West, Mr. Edwy Arthur",male,36,1,2,C.A. 34651,27.75,,S
|
||||
452,0,3,"Hagland, Mr. Ingvald Olai Olsen",male,,1,0,65303,19.9667,,S
|
||||
453,0,1,"Foreman, Mr. Benjamin Laventall",male,30,0,0,113051,27.75,C111,C
|
||||
454,1,1,"Goldenberg, Mr. Samuel L",male,49,1,0,17453,89.1042,C92,C
|
||||
455,0,3,"Peduzzi, Mr. Joseph",male,,0,0,A/5 2817,8.05,,S
|
||||
456,1,3,"Jalsevac, Mr. Ivan",male,29,0,0,349240,7.8958,,C
|
||||
457,0,1,"Millet, Mr. Francis Davis",male,65,0,0,13509,26.55,E38,S
|
||||
458,1,1,"Kenyon, Mrs. Frederick R (Marion)",female,,1,0,17464,51.8625,D21,S
|
||||
459,1,2,"Toomey, Miss. Ellen",female,50,0,0,F.C.C. 13531,10.5,,S
|
||||
460,0,3,"O'Connor, Mr. Maurice",male,,0,0,371060,7.75,,Q
|
||||
461,1,1,"Anderson, Mr. Harry",male,48,0,0,19952,26.55,E12,S
|
||||
462,0,3,"Morley, Mr. William",male,34,0,0,364506,8.05,,S
|
||||
463,0,1,"Gee, Mr. Arthur H",male,47,0,0,111320,38.5,E63,S
|
||||
464,0,2,"Milling, Mr. Jacob Christian",male,48,0,0,234360,13,,S
|
||||
465,0,3,"Maisner, Mr. Simon",male,,0,0,A/S 2816,8.05,,S
|
||||
466,0,3,"Goncalves, Mr. Manuel Estanslas",male,38,0,0,SOTON/O.Q. 3101306,7.05,,S
|
||||
467,0,2,"Campbell, Mr. William",male,,0,0,239853,0,,S
|
||||
468,0,1,"Smart, Mr. John Montgomery",male,56,0,0,113792,26.55,,S
|
||||
469,0,3,"Scanlan, Mr. James",male,,0,0,36209,7.725,,Q
|
||||
470,1,3,"Baclini, Miss. Helene Barbara",female,0.75,2,1,2666,19.2583,,C
|
||||
471,0,3,"Keefe, Mr. Arthur",male,,0,0,323592,7.25,,S
|
||||
472,0,3,"Cacic, Mr. Luka",male,38,0,0,315089,8.6625,,S
|
||||
473,1,2,"West, Mrs. Edwy Arthur (Ada Mary Worth)",female,33,1,2,C.A. 34651,27.75,,S
|
||||
474,1,2,"Jerwan, Mrs. Amin S (Marie Marthe Thuillard)",female,23,0,0,SC/AH Basle 541,13.7917,D,C
|
||||
475,0,3,"Strandberg, Miss. Ida Sofia",female,22,0,0,7553,9.8375,,S
|
||||
476,0,1,"Clifford, Mr. George Quincy",male,,0,0,110465,52,A14,S
|
||||
477,0,2,"Renouf, Mr. Peter Henry",male,34,1,0,31027,21,,S
|
||||
478,0,3,"Braund, Mr. Lewis Richard",male,29,1,0,3460,7.0458,,S
|
||||
479,0,3,"Karlsson, Mr. Nils August",male,22,0,0,350060,7.5208,,S
|
||||
480,1,3,"Hirvonen, Miss. Hildur E",female,2,0,1,3101298,12.2875,,S
|
||||
481,0,3,"Goodwin, Master. Harold Victor",male,9,5,2,CA 2144,46.9,,S
|
||||
482,0,2,"Frost, Mr. Anthony Wood ""Archie""",male,,0,0,239854,0,,S
|
||||
483,0,3,"Rouse, Mr. Richard Henry",male,50,0,0,A/5 3594,8.05,,S
|
||||
484,1,3,"Turkula, Mrs. (Hedwig)",female,63,0,0,4134,9.5875,,S
|
||||
485,1,1,"Bishop, Mr. Dickinson H",male,25,1,0,11967,91.0792,B49,C
|
||||
486,0,3,"Lefebre, Miss. Jeannie",female,,3,1,4133,25.4667,,S
|
||||
487,1,1,"Hoyt, Mrs. Frederick Maxfield (Jane Anne Forby)",female,35,1,0,19943,90,C93,S
|
||||
488,0,1,"Kent, Mr. Edward Austin",male,58,0,0,11771,29.7,B37,C
|
||||
489,0,3,"Somerton, Mr. Francis William",male,30,0,0,A.5. 18509,8.05,,S
|
||||
490,1,3,"Coutts, Master. Eden Leslie ""Neville""",male,9,1,1,C.A. 37671,15.9,,S
|
||||
491,0,3,"Hagland, Mr. Konrad Mathias Reiersen",male,,1,0,65304,19.9667,,S
|
||||
492,0,3,"Windelov, Mr. Einar",male,21,0,0,SOTON/OQ 3101317,7.25,,S
|
||||
493,0,1,"Molson, Mr. Harry Markland",male,55,0,0,113787,30.5,C30,S
|
||||
494,0,1,"Artagaveytia, Mr. Ramon",male,71,0,0,PC 17609,49.5042,,C
|
||||
495,0,3,"Stanley, Mr. Edward Roland",male,21,0,0,A/4 45380,8.05,,S
|
||||
496,0,3,"Yousseff, Mr. Gerious",male,,0,0,2627,14.4583,,C
|
||||
497,1,1,"Eustis, Miss. Elizabeth Mussey",female,54,1,0,36947,78.2667,D20,C
|
||||
498,0,3,"Shellard, Mr. Frederick William",male,,0,0,C.A. 6212,15.1,,S
|
||||
499,0,1,"Allison, Mrs. Hudson J C (Bessie Waldo Daniels)",female,25,1,2,113781,151.55,C22 C26,S
|
||||
500,0,3,"Svensson, Mr. Olof",male,24,0,0,350035,7.7958,,S
|
||||
501,0,3,"Calic, Mr. Petar",male,17,0,0,315086,8.6625,,S
|
||||
502,0,3,"Canavan, Miss. Mary",female,21,0,0,364846,7.75,,Q
|
||||
503,0,3,"O'Sullivan, Miss. Bridget Mary",female,,0,0,330909,7.6292,,Q
|
||||
504,0,3,"Laitinen, Miss. Kristina Sofia",female,37,0,0,4135,9.5875,,S
|
||||
505,1,1,"Maioni, Miss. Roberta",female,16,0,0,110152,86.5,B79,S
|
||||
506,0,1,"Penasco y Castellana, Mr. Victor de Satode",male,18,1,0,PC 17758,108.9,C65,C
|
||||
507,1,2,"Quick, Mrs. Frederick Charles (Jane Richards)",female,33,0,2,26360,26,,S
|
||||
508,1,1,"Bradley, Mr. George (""George Arthur Brayton"")",male,,0,0,111427,26.55,,S
|
||||
509,0,3,"Olsen, Mr. Henry Margido",male,28,0,0,C 4001,22.525,,S
|
||||
510,1,3,"Lang, Mr. Fang",male,26,0,0,1601,56.4958,,S
|
||||
511,1,3,"Daly, Mr. Eugene Patrick",male,29,0,0,382651,7.75,,Q
|
||||
512,0,3,"Webber, Mr. James",male,,0,0,SOTON/OQ 3101316,8.05,,S
|
||||
513,1,1,"McGough, Mr. James Robert",male,36,0,0,PC 17473,26.2875,E25,S
|
||||
514,1,1,"Rothschild, Mrs. Martin (Elizabeth L. Barrett)",female,54,1,0,PC 17603,59.4,,C
|
||||
515,0,3,"Coleff, Mr. Satio",male,24,0,0,349209,7.4958,,S
|
||||
516,0,1,"Walker, Mr. William Anderson",male,47,0,0,36967,34.0208,D46,S
|
||||
517,1,2,"Lemore, Mrs. (Amelia Milley)",female,34,0,0,C.A. 34260,10.5,F33,S
|
||||
518,0,3,"Ryan, Mr. Patrick",male,,0,0,371110,24.15,,Q
|
||||
519,1,2,"Angle, Mrs. William A (Florence ""Mary"" Agnes Hughes)",female,36,1,0,226875,26,,S
|
||||
520,0,3,"Pavlovic, Mr. Stefo",male,32,0,0,349242,7.8958,,S
|
||||
521,1,1,"Perreault, Miss. Anne",female,30,0,0,12749,93.5,B73,S
|
||||
522,0,3,"Vovk, Mr. Janko",male,22,0,0,349252,7.8958,,S
|
||||
523,0,3,"Lahoud, Mr. Sarkis",male,,0,0,2624,7.225,,C
|
||||
524,1,1,"Hippach, Mrs. Louis Albert (Ida Sophia Fischer)",female,44,0,1,111361,57.9792,B18,C
|
||||
525,0,3,"Kassem, Mr. Fared",male,,0,0,2700,7.2292,,C
|
||||
526,0,3,"Farrell, Mr. James",male,40.5,0,0,367232,7.75,,Q
|
||||
527,1,2,"Ridsdale, Miss. Lucy",female,50,0,0,W./C. 14258,10.5,,S
|
||||
528,0,1,"Farthing, Mr. John",male,,0,0,PC 17483,221.7792,C95,S
|
||||
529,0,3,"Salonen, Mr. Johan Werner",male,39,0,0,3101296,7.925,,S
|
||||
530,0,2,"Hocking, Mr. Richard George",male,23,2,1,29104,11.5,,S
|
||||
531,1,2,"Quick, Miss. Phyllis May",female,2,1,1,26360,26,,S
|
||||
532,0,3,"Toufik, Mr. Nakli",male,,0,0,2641,7.2292,,C
|
||||
533,0,3,"Elias, Mr. Joseph Jr",male,17,1,1,2690,7.2292,,C
|
||||
534,1,3,"Peter, Mrs. Catherine (Catherine Rizk)",female,,0,2,2668,22.3583,,C
|
||||
535,0,3,"Cacic, Miss. Marija",female,30,0,0,315084,8.6625,,S
|
||||
536,1,2,"Hart, Miss. Eva Miriam",female,7,0,2,F.C.C. 13529,26.25,,S
|
||||
537,0,1,"Butt, Major. Archibald Willingham",male,45,0,0,113050,26.55,B38,S
|
||||
538,1,1,"LeRoy, Miss. Bertha",female,30,0,0,PC 17761,106.425,,C
|
||||
539,0,3,"Risien, Mr. Samuel Beard",male,,0,0,364498,14.5,,S
|
||||
540,1,1,"Frolicher, Miss. Hedwig Margaritha",female,22,0,2,13568,49.5,B39,C
|
||||
541,1,1,"Crosby, Miss. Harriet R",female,36,0,2,WE/P 5735,71,B22,S
|
||||
542,0,3,"Andersson, Miss. Ingeborg Constanzia",female,9,4,2,347082,31.275,,S
|
||||
543,0,3,"Andersson, Miss. Sigrid Elisabeth",female,11,4,2,347082,31.275,,S
|
||||
544,1,2,"Beane, Mr. Edward",male,32,1,0,2908,26,,S
|
||||
545,0,1,"Douglas, Mr. Walter Donald",male,50,1,0,PC 17761,106.425,C86,C
|
||||
546,0,1,"Nicholson, Mr. Arthur Ernest",male,64,0,0,693,26,,S
|
||||
547,1,2,"Beane, Mrs. Edward (Ethel Clarke)",female,19,1,0,2908,26,,S
|
||||
548,1,2,"Padro y Manent, Mr. Julian",male,,0,0,SC/PARIS 2146,13.8625,,C
|
||||
549,0,3,"Goldsmith, Mr. Frank John",male,33,1,1,363291,20.525,,S
|
||||
550,1,2,"Davies, Master. John Morgan Jr",male,8,1,1,C.A. 33112,36.75,,S
|
||||
551,1,1,"Thayer, Mr. John Borland Jr",male,17,0,2,17421,110.8833,C70,C
|
||||
552,0,2,"Sharp, Mr. Percival James R",male,27,0,0,244358,26,,S
|
||||
553,0,3,"O'Brien, Mr. Timothy",male,,0,0,330979,7.8292,,Q
|
||||
554,1,3,"Leeni, Mr. Fahim (""Philip Zenni"")",male,22,0,0,2620,7.225,,C
|
||||
555,1,3,"Ohman, Miss. Velin",female,22,0,0,347085,7.775,,S
|
||||
556,0,1,"Wright, Mr. George",male,62,0,0,113807,26.55,,S
|
||||
557,1,1,"Duff Gordon, Lady. (Lucille Christiana Sutherland) (""Mrs Morgan"")",female,48,1,0,11755,39.6,A16,C
|
||||
558,0,1,"Robbins, Mr. Victor",male,,0,0,PC 17757,227.525,,C
|
||||
559,1,1,"Taussig, Mrs. Emil (Tillie Mandelbaum)",female,39,1,1,110413,79.65,E67,S
|
||||
560,1,3,"de Messemaeker, Mrs. Guillaume Joseph (Emma)",female,36,1,0,345572,17.4,,S
|
||||
561,0,3,"Morrow, Mr. Thomas Rowan",male,,0,0,372622,7.75,,Q
|
||||
562,0,3,"Sivic, Mr. Husein",male,40,0,0,349251,7.8958,,S
|
||||
563,0,2,"Norman, Mr. Robert Douglas",male,28,0,0,218629,13.5,,S
|
||||
564,0,3,"Simmons, Mr. John",male,,0,0,SOTON/OQ 392082,8.05,,S
|
||||
565,0,3,"Meanwell, Miss. (Marion Ogden)",female,,0,0,SOTON/O.Q. 392087,8.05,,S
|
||||
566,0,3,"Davies, Mr. Alfred J",male,24,2,0,A/4 48871,24.15,,S
|
||||
567,0,3,"Stoytcheff, Mr. Ilia",male,19,0,0,349205,7.8958,,S
|
||||
568,0,3,"Palsson, Mrs. Nils (Alma Cornelia Berglund)",female,29,0,4,349909,21.075,,S
|
||||
569,0,3,"Doharr, Mr. Tannous",male,,0,0,2686,7.2292,,C
|
||||
570,1,3,"Jonsson, Mr. Carl",male,32,0,0,350417,7.8542,,S
|
||||
571,1,2,"Harris, Mr. George",male,62,0,0,S.W./PP 752,10.5,,S
|
||||
572,1,1,"Appleton, Mrs. Edward Dale (Charlotte Lamson)",female,53,2,0,11769,51.4792,C101,S
|
||||
573,1,1,"Flynn, Mr. John Irwin (""Irving"")",male,36,0,0,PC 17474,26.3875,E25,S
|
||||
574,1,3,"Kelly, Miss. Mary",female,,0,0,14312,7.75,,Q
|
||||
575,0,3,"Rush, Mr. Alfred George John",male,16,0,0,A/4. 20589,8.05,,S
|
||||
576,0,3,"Patchett, Mr. George",male,19,0,0,358585,14.5,,S
|
||||
577,1,2,"Garside, Miss. Ethel",female,34,0,0,243880,13,,S
|
||||
578,1,1,"Silvey, Mrs. William Baird (Alice Munger)",female,39,1,0,13507,55.9,E44,S
|
||||
579,0,3,"Caram, Mrs. Joseph (Maria Elias)",female,,1,0,2689,14.4583,,C
|
||||
580,1,3,"Jussila, Mr. Eiriik",male,32,0,0,STON/O 2. 3101286,7.925,,S
|
||||
581,1,2,"Christy, Miss. Julie Rachel",female,25,1,1,237789,30,,S
|
||||
582,1,1,"Thayer, Mrs. John Borland (Marian Longstreth Morris)",female,39,1,1,17421,110.8833,C68,C
|
||||
583,0,2,"Downton, Mr. William James",male,54,0,0,28403,26,,S
|
||||
584,0,1,"Ross, Mr. John Hugo",male,36,0,0,13049,40.125,A10,C
|
||||
585,0,3,"Paulner, Mr. Uscher",male,,0,0,3411,8.7125,,C
|
||||
586,1,1,"Taussig, Miss. Ruth",female,18,0,2,110413,79.65,E68,S
|
||||
587,0,2,"Jarvis, Mr. John Denzil",male,47,0,0,237565,15,,S
|
||||
588,1,1,"Frolicher-Stehli, Mr. Maxmillian",male,60,1,1,13567,79.2,B41,C
|
||||
589,0,3,"Gilinski, Mr. Eliezer",male,22,0,0,14973,8.05,,S
|
||||
590,0,3,"Murdlin, Mr. Joseph",male,,0,0,A./5. 3235,8.05,,S
|
||||
591,0,3,"Rintamaki, Mr. Matti",male,35,0,0,STON/O 2. 3101273,7.125,,S
|
||||
592,1,1,"Stephenson, Mrs. Walter Bertram (Martha Eustis)",female,52,1,0,36947,78.2667,D20,C
|
||||
593,0,3,"Elsbury, Mr. William James",male,47,0,0,A/5 3902,7.25,,S
|
||||
594,0,3,"Bourke, Miss. Mary",female,,0,2,364848,7.75,,Q
|
||||
595,0,2,"Chapman, Mr. John Henry",male,37,1,0,SC/AH 29037,26,,S
|
||||
596,0,3,"Van Impe, Mr. Jean Baptiste",male,36,1,1,345773,24.15,,S
|
||||
597,1,2,"Leitch, Miss. Jessie Wills",female,,0,0,248727,33,,S
|
||||
598,0,3,"Johnson, Mr. Alfred",male,49,0,0,LINE,0,,S
|
||||
599,0,3,"Boulos, Mr. Hanna",male,,0,0,2664,7.225,,C
|
||||
600,1,1,"Duff Gordon, Sir. Cosmo Edmund (""Mr Morgan"")",male,49,1,0,PC 17485,56.9292,A20,C
|
||||
601,1,2,"Jacobsohn, Mrs. Sidney Samuel (Amy Frances Christy)",female,24,2,1,243847,27,,S
|
||||
602,0,3,"Slabenoff, Mr. Petco",male,,0,0,349214,7.8958,,S
|
||||
603,0,1,"Harrington, Mr. Charles H",male,,0,0,113796,42.4,,S
|
||||
604,0,3,"Torber, Mr. Ernst William",male,44,0,0,364511,8.05,,S
|
||||
605,1,1,"Homer, Mr. Harry (""Mr E Haven"")",male,35,0,0,111426,26.55,,C
|
||||
606,0,3,"Lindell, Mr. Edvard Bengtsson",male,36,1,0,349910,15.55,,S
|
||||
607,0,3,"Karaic, Mr. Milan",male,30,0,0,349246,7.8958,,S
|
||||
608,1,1,"Daniel, Mr. Robert Williams",male,27,0,0,113804,30.5,,S
|
||||
609,1,2,"Laroche, Mrs. Joseph (Juliette Marie Louise Lafargue)",female,22,1,2,SC/Paris 2123,41.5792,,C
|
||||
610,1,1,"Shutes, Miss. Elizabeth W",female,40,0,0,PC 17582,153.4625,C125,S
|
||||
611,0,3,"Andersson, Mrs. Anders Johan (Alfrida Konstantia Brogren)",female,39,1,5,347082,31.275,,S
|
||||
612,0,3,"Jardin, Mr. Jose Neto",male,,0,0,SOTON/O.Q. 3101305,7.05,,S
|
||||
613,1,3,"Murphy, Miss. Margaret Jane",female,,1,0,367230,15.5,,Q
|
||||
614,0,3,"Horgan, Mr. John",male,,0,0,370377,7.75,,Q
|
||||
615,0,3,"Brocklebank, Mr. William Alfred",male,35,0,0,364512,8.05,,S
|
||||
616,1,2,"Herman, Miss. Alice",female,24,1,2,220845,65,,S
|
||||
617,0,3,"Danbom, Mr. Ernst Gilbert",male,34,1,1,347080,14.4,,S
|
||||
618,0,3,"Lobb, Mrs. William Arthur (Cordelia K Stanlick)",female,26,1,0,A/5. 3336,16.1,,S
|
||||
619,1,2,"Becker, Miss. Marion Louise",female,4,2,1,230136,39,F4,S
|
||||
620,0,2,"Gavey, Mr. Lawrence",male,26,0,0,31028,10.5,,S
|
||||
621,0,3,"Yasbeck, Mr. Antoni",male,27,1,0,2659,14.4542,,C
|
||||
622,1,1,"Kimball, Mr. Edwin Nelson Jr",male,42,1,0,11753,52.5542,D19,S
|
||||
623,1,3,"Nakid, Mr. Sahid",male,20,1,1,2653,15.7417,,C
|
||||
624,0,3,"Hansen, Mr. Henry Damsgaard",male,21,0,0,350029,7.8542,,S
|
||||
625,0,3,"Bowen, Mr. David John ""Dai""",male,21,0,0,54636,16.1,,S
|
||||
626,0,1,"Sutton, Mr. Frederick",male,61,0,0,36963,32.3208,D50,S
|
||||
627,0,2,"Kirkland, Rev. Charles Leonard",male,57,0,0,219533,12.35,,Q
|
||||
628,1,1,"Longley, Miss. Gretchen Fiske",female,21,0,0,13502,77.9583,D9,S
|
||||
629,0,3,"Bostandyeff, Mr. Guentcho",male,26,0,0,349224,7.8958,,S
|
||||
630,0,3,"O'Connell, Mr. Patrick D",male,,0,0,334912,7.7333,,Q
|
||||
631,1,1,"Barkworth, Mr. Algernon Henry Wilson",male,80,0,0,27042,30,A23,S
|
||||
632,0,3,"Lundahl, Mr. Johan Svensson",male,51,0,0,347743,7.0542,,S
|
||||
633,1,1,"Stahelin-Maeglin, Dr. Max",male,32,0,0,13214,30.5,B50,C
|
||||
634,0,1,"Parr, Mr. William Henry Marsh",male,,0,0,112052,0,,S
|
||||
635,0,3,"Skoog, Miss. Mabel",female,9,3,2,347088,27.9,,S
|
||||
636,1,2,"Davis, Miss. Mary",female,28,0,0,237668,13,,S
|
||||
637,0,3,"Leinonen, Mr. Antti Gustaf",male,32,0,0,STON/O 2. 3101292,7.925,,S
|
||||
638,0,2,"Collyer, Mr. Harvey",male,31,1,1,C.A. 31921,26.25,,S
|
||||
639,0,3,"Panula, Mrs. Juha (Maria Emilia Ojala)",female,41,0,5,3101295,39.6875,,S
|
||||
640,0,3,"Thorneycroft, Mr. Percival",male,,1,0,376564,16.1,,S
|
||||
641,0,3,"Jensen, Mr. Hans Peder",male,20,0,0,350050,7.8542,,S
|
||||
642,1,1,"Sagesser, Mlle. Emma",female,24,0,0,PC 17477,69.3,B35,C
|
||||
643,0,3,"Skoog, Miss. Margit Elizabeth",female,2,3,2,347088,27.9,,S
|
||||
644,1,3,"Foo, Mr. Choong",male,,0,0,1601,56.4958,,S
|
||||
645,1,3,"Baclini, Miss. Eugenie",female,0.75,2,1,2666,19.2583,,C
|
||||
646,1,1,"Harper, Mr. Henry Sleeper",male,48,1,0,PC 17572,76.7292,D33,C
|
||||
647,0,3,"Cor, Mr. Liudevit",male,19,0,0,349231,7.8958,,S
|
||||
648,1,1,"Simonius-Blumer, Col. Oberst Alfons",male,56,0,0,13213,35.5,A26,C
|
||||
649,0,3,"Willey, Mr. Edward",male,,0,0,S.O./P.P. 751,7.55,,S
|
||||
650,1,3,"Stanley, Miss. Amy Zillah Elsie",female,23,0,0,CA. 2314,7.55,,S
|
||||
651,0,3,"Mitkoff, Mr. Mito",male,,0,0,349221,7.8958,,S
|
||||
652,1,2,"Doling, Miss. Elsie",female,18,0,1,231919,23,,S
|
||||
653,0,3,"Kalvik, Mr. Johannes Halvorsen",male,21,0,0,8475,8.4333,,S
|
||||
654,1,3,"O'Leary, Miss. Hanora ""Norah""",female,,0,0,330919,7.8292,,Q
|
||||
655,0,3,"Hegarty, Miss. Hanora ""Nora""",female,18,0,0,365226,6.75,,Q
|
||||
656,0,2,"Hickman, Mr. Leonard Mark",male,24,2,0,S.O.C. 14879,73.5,,S
|
||||
657,0,3,"Radeff, Mr. Alexander",male,,0,0,349223,7.8958,,S
|
||||
658,0,3,"Bourke, Mrs. John (Catherine)",female,32,1,1,364849,15.5,,Q
|
||||
659,0,2,"Eitemiller, Mr. George Floyd",male,23,0,0,29751,13,,S
|
||||
660,0,1,"Newell, Mr. Arthur Webster",male,58,0,2,35273,113.275,D48,C
|
||||
661,1,1,"Frauenthal, Dr. Henry William",male,50,2,0,PC 17611,133.65,,S
|
||||
662,0,3,"Badt, Mr. Mohamed",male,40,0,0,2623,7.225,,C
|
||||
663,0,1,"Colley, Mr. Edward Pomeroy",male,47,0,0,5727,25.5875,E58,S
|
||||
664,0,3,"Coleff, Mr. Peju",male,36,0,0,349210,7.4958,,S
|
||||
665,1,3,"Lindqvist, Mr. Eino William",male,20,1,0,STON/O 2. 3101285,7.925,,S
|
||||
666,0,2,"Hickman, Mr. Lewis",male,32,2,0,S.O.C. 14879,73.5,,S
|
||||
667,0,2,"Butler, Mr. Reginald Fenton",male,25,0,0,234686,13,,S
|
||||
668,0,3,"Rommetvedt, Mr. Knud Paust",male,,0,0,312993,7.775,,S
|
||||
669,0,3,"Cook, Mr. Jacob",male,43,0,0,A/5 3536,8.05,,S
|
||||
670,1,1,"Taylor, Mrs. Elmer Zebley (Juliet Cummins Wright)",female,,1,0,19996,52,C126,S
|
||||
671,1,2,"Brown, Mrs. Thomas William Solomon (Elizabeth Catherine Ford)",female,40,1,1,29750,39,,S
|
||||
672,0,1,"Davidson, Mr. Thornton",male,31,1,0,F.C. 12750,52,B71,S
|
||||
673,0,2,"Mitchell, Mr. Henry Michael",male,70,0,0,C.A. 24580,10.5,,S
|
||||
674,1,2,"Wilhelms, Mr. Charles",male,31,0,0,244270,13,,S
|
||||
675,0,2,"Watson, Mr. Ennis Hastings",male,,0,0,239856,0,,S
|
||||
676,0,3,"Edvardsson, Mr. Gustaf Hjalmar",male,18,0,0,349912,7.775,,S
|
||||
677,0,3,"Sawyer, Mr. Frederick Charles",male,24.5,0,0,342826,8.05,,S
|
||||
678,1,3,"Turja, Miss. Anna Sofia",female,18,0,0,4138,9.8417,,S
|
||||
679,0,3,"Goodwin, Mrs. Frederick (Augusta Tyler)",female,43,1,6,CA 2144,46.9,,S
|
||||
680,1,1,"Cardeza, Mr. Thomas Drake Martinez",male,36,0,1,PC 17755,512.3292,B51 B53 B55,C
|
||||
681,0,3,"Peters, Miss. Katie",female,,0,0,330935,8.1375,,Q
|
||||
682,1,1,"Hassab, Mr. Hammad",male,27,0,0,PC 17572,76.7292,D49,C
|
||||
683,0,3,"Olsvigen, Mr. Thor Anderson",male,20,0,0,6563,9.225,,S
|
||||
684,0,3,"Goodwin, Mr. Charles Edward",male,14,5,2,CA 2144,46.9,,S
|
||||
685,0,2,"Brown, Mr. Thomas William Solomon",male,60,1,1,29750,39,,S
|
||||
686,0,2,"Laroche, Mr. Joseph Philippe Lemercier",male,25,1,2,SC/Paris 2123,41.5792,,C
|
||||
687,0,3,"Panula, Mr. Jaako Arnold",male,14,4,1,3101295,39.6875,,S
|
||||
688,0,3,"Dakic, Mr. Branko",male,19,0,0,349228,10.1708,,S
|
||||
689,0,3,"Fischer, Mr. Eberhard Thelander",male,18,0,0,350036,7.7958,,S
|
||||
690,1,1,"Madill, Miss. Georgette Alexandra",female,15,0,1,24160,211.3375,B5,S
|
||||
691,1,1,"Dick, Mr. Albert Adrian",male,31,1,0,17474,57,B20,S
|
||||
692,1,3,"Karun, Miss. Manca",female,4,0,1,349256,13.4167,,C
|
||||
693,1,3,"Lam, Mr. Ali",male,,0,0,1601,56.4958,,S
|
||||
694,0,3,"Saad, Mr. Khalil",male,25,0,0,2672,7.225,,C
|
||||
695,0,1,"Weir, Col. John",male,60,0,0,113800,26.55,,S
|
||||
696,0,2,"Chapman, Mr. Charles Henry",male,52,0,0,248731,13.5,,S
|
||||
697,0,3,"Kelly, Mr. James",male,44,0,0,363592,8.05,,S
|
||||
698,1,3,"Mullens, Miss. Katherine ""Katie""",female,,0,0,35852,7.7333,,Q
|
||||
699,0,1,"Thayer, Mr. John Borland",male,49,1,1,17421,110.8833,C68,C
|
||||
700,0,3,"Humblen, Mr. Adolf Mathias Nicolai Olsen",male,42,0,0,348121,7.65,F G63,S
|
||||
701,1,1,"Astor, Mrs. John Jacob (Madeleine Talmadge Force)",female,18,1,0,PC 17757,227.525,C62 C64,C
|
||||
702,1,1,"Silverthorne, Mr. Spencer Victor",male,35,0,0,PC 17475,26.2875,E24,S
|
||||
703,0,3,"Barbara, Miss. Saiide",female,18,0,1,2691,14.4542,,C
|
||||
704,0,3,"Gallagher, Mr. Martin",male,25,0,0,36864,7.7417,,Q
|
||||
705,0,3,"Hansen, Mr. Henrik Juul",male,26,1,0,350025,7.8542,,S
|
||||
706,0,2,"Morley, Mr. Henry Samuel (""Mr Henry Marshall"")",male,39,0,0,250655,26,,S
|
||||
707,1,2,"Kelly, Mrs. Florence ""Fannie""",female,45,0,0,223596,13.5,,S
|
||||
708,1,1,"Calderhead, Mr. Edward Pennington",male,42,0,0,PC 17476,26.2875,E24,S
|
||||
709,1,1,"Cleaver, Miss. Alice",female,22,0,0,113781,151.55,,S
|
||||
710,1,3,"Moubarek, Master. Halim Gonios (""William George"")",male,,1,1,2661,15.2458,,C
|
||||
711,1,1,"Mayne, Mlle. Berthe Antonine (""Mrs de Villiers"")",female,24,0,0,PC 17482,49.5042,C90,C
|
||||
712,0,1,"Klaber, Mr. Herman",male,,0,0,113028,26.55,C124,S
|
||||
713,1,1,"Taylor, Mr. Elmer Zebley",male,48,1,0,19996,52,C126,S
|
||||
714,0,3,"Larsson, Mr. August Viktor",male,29,0,0,7545,9.4833,,S
|
||||
715,0,2,"Greenberg, Mr. Samuel",male,52,0,0,250647,13,,S
|
||||
716,0,3,"Soholt, Mr. Peter Andreas Lauritz Andersen",male,19,0,0,348124,7.65,F G73,S
|
||||
717,1,1,"Endres, Miss. Caroline Louise",female,38,0,0,PC 17757,227.525,C45,C
|
||||
718,1,2,"Troutt, Miss. Edwina Celia ""Winnie""",female,27,0,0,34218,10.5,E101,S
|
||||
719,0,3,"McEvoy, Mr. Michael",male,,0,0,36568,15.5,,Q
|
||||
720,0,3,"Johnson, Mr. Malkolm Joackim",male,33,0,0,347062,7.775,,S
|
||||
721,1,2,"Harper, Miss. Annie Jessie ""Nina""",female,6,0,1,248727,33,,S
|
||||
722,0,3,"Jensen, Mr. Svend Lauritz",male,17,1,0,350048,7.0542,,S
|
||||
723,0,2,"Gillespie, Mr. William Henry",male,34,0,0,12233,13,,S
|
||||
724,0,2,"Hodges, Mr. Henry Price",male,50,0,0,250643,13,,S
|
||||
725,1,1,"Chambers, Mr. Norman Campbell",male,27,1,0,113806,53.1,E8,S
|
||||
726,0,3,"Oreskovic, Mr. Luka",male,20,0,0,315094,8.6625,,S
|
||||
727,1,2,"Renouf, Mrs. Peter Henry (Lillian Jefferys)",female,30,3,0,31027,21,,S
|
||||
728,1,3,"Mannion, Miss. Margareth",female,,0,0,36866,7.7375,,Q
|
||||
729,0,2,"Bryhl, Mr. Kurt Arnold Gottfrid",male,25,1,0,236853,26,,S
|
||||
730,0,3,"Ilmakangas, Miss. Pieta Sofia",female,25,1,0,STON/O2. 3101271,7.925,,S
|
||||
731,1,1,"Allen, Miss. Elisabeth Walton",female,29,0,0,24160,211.3375,B5,S
|
||||
732,0,3,"Hassan, Mr. Houssein G N",male,11,0,0,2699,18.7875,,C
|
||||
733,0,2,"Knight, Mr. Robert J",male,,0,0,239855,0,,S
|
||||
734,0,2,"Berriman, Mr. William John",male,23,0,0,28425,13,,S
|
||||
735,0,2,"Troupiansky, Mr. Moses Aaron",male,23,0,0,233639,13,,S
|
||||
736,0,3,"Williams, Mr. Leslie",male,28.5,0,0,54636,16.1,,S
|
||||
737,0,3,"Ford, Mrs. Edward (Margaret Ann Watson)",female,48,1,3,W./C. 6608,34.375,,S
|
||||
738,1,1,"Lesurer, Mr. Gustave J",male,35,0,0,PC 17755,512.3292,B101,C
|
||||
739,0,3,"Ivanoff, Mr. Kanio",male,,0,0,349201,7.8958,,S
|
||||
740,0,3,"Nankoff, Mr. Minko",male,,0,0,349218,7.8958,,S
|
||||
741,1,1,"Hawksford, Mr. Walter James",male,,0,0,16988,30,D45,S
|
||||
742,0,1,"Cavendish, Mr. Tyrell William",male,36,1,0,19877,78.85,C46,S
|
||||
743,1,1,"Ryerson, Miss. Susan Parker ""Suzette""",female,21,2,2,PC 17608,262.375,B57 B59 B63 B66,C
|
||||
744,0,3,"McNamee, Mr. Neal",male,24,1,0,376566,16.1,,S
|
||||
745,1,3,"Stranden, Mr. Juho",male,31,0,0,STON/O 2. 3101288,7.925,,S
|
||||
746,0,1,"Crosby, Capt. Edward Gifford",male,70,1,1,WE/P 5735,71,B22,S
|
||||
747,0,3,"Abbott, Mr. Rossmore Edward",male,16,1,1,C.A. 2673,20.25,,S
|
||||
748,1,2,"Sinkkonen, Miss. Anna",female,30,0,0,250648,13,,S
|
||||
749,0,1,"Marvin, Mr. Daniel Warner",male,19,1,0,113773,53.1,D30,S
|
||||
750,0,3,"Connaghton, Mr. Michael",male,31,0,0,335097,7.75,,Q
|
||||
751,1,2,"Wells, Miss. Joan",female,4,1,1,29103,23,,S
|
||||
752,1,3,"Moor, Master. Meier",male,6,0,1,392096,12.475,E121,S
|
||||
753,0,3,"Vande Velde, Mr. Johannes Joseph",male,33,0,0,345780,9.5,,S
|
||||
754,0,3,"Jonkoff, Mr. Lalio",male,23,0,0,349204,7.8958,,S
|
||||
755,1,2,"Herman, Mrs. Samuel (Jane Laver)",female,48,1,2,220845,65,,S
|
||||
756,1,2,"Hamalainen, Master. Viljo",male,0.67,1,1,250649,14.5,,S
|
||||
757,0,3,"Carlsson, Mr. August Sigfrid",male,28,0,0,350042,7.7958,,S
|
||||
758,0,2,"Bailey, Mr. Percy Andrew",male,18,0,0,29108,11.5,,S
|
||||
759,0,3,"Theobald, Mr. Thomas Leonard",male,34,0,0,363294,8.05,,S
|
||||
760,1,1,"Rothes, the Countess. of (Lucy Noel Martha Dyer-Edwards)",female,33,0,0,110152,86.5,B77,S
|
||||
761,0,3,"Garfirth, Mr. John",male,,0,0,358585,14.5,,S
|
||||
762,0,3,"Nirva, Mr. Iisakki Antino Aijo",male,41,0,0,SOTON/O2 3101272,7.125,,S
|
||||
763,1,3,"Barah, Mr. Hanna Assi",male,20,0,0,2663,7.2292,,C
|
||||
764,1,1,"Carter, Mrs. William Ernest (Lucile Polk)",female,36,1,2,113760,120,B96 B98,S
|
||||
765,0,3,"Eklund, Mr. Hans Linus",male,16,0,0,347074,7.775,,S
|
||||
766,1,1,"Hogeboom, Mrs. John C (Anna Andrews)",female,51,1,0,13502,77.9583,D11,S
|
||||
767,0,1,"Brewe, Dr. Arthur Jackson",male,,0,0,112379,39.6,,C
|
||||
768,0,3,"Mangan, Miss. Mary",female,30.5,0,0,364850,7.75,,Q
|
||||
769,0,3,"Moran, Mr. Daniel J",male,,1,0,371110,24.15,,Q
|
||||
770,0,3,"Gronnestad, Mr. Daniel Danielsen",male,32,0,0,8471,8.3625,,S
|
||||
771,0,3,"Lievens, Mr. Rene Aime",male,24,0,0,345781,9.5,,S
|
||||
772,0,3,"Jensen, Mr. Niels Peder",male,48,0,0,350047,7.8542,,S
|
||||
773,0,2,"Mack, Mrs. (Mary)",female,57,0,0,S.O./P.P. 3,10.5,E77,S
|
||||
774,0,3,"Elias, Mr. Dibo",male,,0,0,2674,7.225,,C
|
||||
775,1,2,"Hocking, Mrs. Elizabeth (Eliza Needs)",female,54,1,3,29105,23,,S
|
||||
776,0,3,"Myhrman, Mr. Pehr Fabian Oliver Malkolm",male,18,0,0,347078,7.75,,S
|
||||
777,0,3,"Tobin, Mr. Roger",male,,0,0,383121,7.75,F38,Q
|
||||
778,1,3,"Emanuel, Miss. Virginia Ethel",female,5,0,0,364516,12.475,,S
|
||||
779,0,3,"Kilgannon, Mr. Thomas J",male,,0,0,36865,7.7375,,Q
|
||||
780,1,1,"Robert, Mrs. Edward Scott (Elisabeth Walton McMillan)",female,43,0,1,24160,211.3375,B3,S
|
||||
781,1,3,"Ayoub, Miss. Banoura",female,13,0,0,2687,7.2292,,C
|
||||
782,1,1,"Dick, Mrs. Albert Adrian (Vera Gillespie)",female,17,1,0,17474,57,B20,S
|
||||
783,0,1,"Long, Mr. Milton Clyde",male,29,0,0,113501,30,D6,S
|
||||
784,0,3,"Johnston, Mr. Andrew G",male,,1,2,W./C. 6607,23.45,,S
|
||||
785,0,3,"Ali, Mr. William",male,25,0,0,SOTON/O.Q. 3101312,7.05,,S
|
||||
786,0,3,"Harmer, Mr. Abraham (David Lishin)",male,25,0,0,374887,7.25,,S
|
||||
787,1,3,"Sjoblom, Miss. Anna Sofia",female,18,0,0,3101265,7.4958,,S
|
||||
788,0,3,"Rice, Master. George Hugh",male,8,4,1,382652,29.125,,Q
|
||||
789,1,3,"Dean, Master. Bertram Vere",male,1,1,2,C.A. 2315,20.575,,S
|
||||
790,0,1,"Guggenheim, Mr. Benjamin",male,46,0,0,PC 17593,79.2,B82 B84,C
|
||||
791,0,3,"Keane, Mr. Andrew ""Andy""",male,,0,0,12460,7.75,,Q
|
||||
792,0,2,"Gaskell, Mr. Alfred",male,16,0,0,239865,26,,S
|
||||
793,0,3,"Sage, Miss. Stella Anna",female,,8,2,CA. 2343,69.55,,S
|
||||
794,0,1,"Hoyt, Mr. William Fisher",male,,0,0,PC 17600,30.6958,,C
|
||||
795,0,3,"Dantcheff, Mr. Ristiu",male,25,0,0,349203,7.8958,,S
|
||||
796,0,2,"Otter, Mr. Richard",male,39,0,0,28213,13,,S
|
||||
797,1,1,"Leader, Dr. Alice (Farnham)",female,49,0,0,17465,25.9292,D17,S
|
||||
798,1,3,"Osman, Mrs. Mara",female,31,0,0,349244,8.6833,,S
|
||||
799,0,3,"Ibrahim Shawah, Mr. Yousseff",male,30,0,0,2685,7.2292,,C
|
||||
800,0,3,"Van Impe, Mrs. Jean Baptiste (Rosalie Paula Govaert)",female,30,1,1,345773,24.15,,S
|
||||
801,0,2,"Ponesell, Mr. Martin",male,34,0,0,250647,13,,S
|
||||
802,1,2,"Collyer, Mrs. Harvey (Charlotte Annie Tate)",female,31,1,1,C.A. 31921,26.25,,S
|
||||
803,1,1,"Carter, Master. William Thornton II",male,11,1,2,113760,120,B96 B98,S
|
||||
804,1,3,"Thomas, Master. Assad Alexander",male,0.42,0,1,2625,8.5167,,C
|
||||
805,1,3,"Hedman, Mr. Oskar Arvid",male,27,0,0,347089,6.975,,S
|
||||
806,0,3,"Johansson, Mr. Karl Johan",male,31,0,0,347063,7.775,,S
|
||||
807,0,1,"Andrews, Mr. Thomas Jr",male,39,0,0,112050,0,A36,S
|
||||
808,0,3,"Pettersson, Miss. Ellen Natalia",female,18,0,0,347087,7.775,,S
|
||||
809,0,2,"Meyer, Mr. August",male,39,0,0,248723,13,,S
|
||||
810,1,1,"Chambers, Mrs. Norman Campbell (Bertha Griggs)",female,33,1,0,113806,53.1,E8,S
|
||||
811,0,3,"Alexander, Mr. William",male,26,0,0,3474,7.8875,,S
|
||||
812,0,3,"Lester, Mr. James",male,39,0,0,A/4 48871,24.15,,S
|
||||
813,0,2,"Slemen, Mr. Richard James",male,35,0,0,28206,10.5,,S
|
||||
814,0,3,"Andersson, Miss. Ebba Iris Alfrida",female,6,4,2,347082,31.275,,S
|
||||
815,0,3,"Tomlin, Mr. Ernest Portage",male,30.5,0,0,364499,8.05,,S
|
||||
816,0,1,"Fry, Mr. Richard",male,,0,0,112058,0,B102,S
|
||||
817,0,3,"Heininen, Miss. Wendla Maria",female,23,0,0,STON/O2. 3101290,7.925,,S
|
||||
818,0,2,"Mallet, Mr. Albert",male,31,1,1,S.C./PARIS 2079,37.0042,,C
|
||||
819,0,3,"Holm, Mr. John Fredrik Alexander",male,43,0,0,C 7075,6.45,,S
|
||||
820,0,3,"Skoog, Master. Karl Thorsten",male,10,3,2,347088,27.9,,S
|
||||
821,1,1,"Hays, Mrs. Charles Melville (Clara Jennings Gregg)",female,52,1,1,12749,93.5,B69,S
|
||||
822,1,3,"Lulic, Mr. Nikola",male,27,0,0,315098,8.6625,,S
|
||||
823,0,1,"Reuchlin, Jonkheer. John George",male,38,0,0,19972,0,,S
|
||||
824,1,3,"Moor, Mrs. (Beila)",female,27,0,1,392096,12.475,E121,S
|
||||
825,0,3,"Panula, Master. Urho Abraham",male,2,4,1,3101295,39.6875,,S
|
||||
826,0,3,"Flynn, Mr. John",male,,0,0,368323,6.95,,Q
|
||||
827,0,3,"Lam, Mr. Len",male,,0,0,1601,56.4958,,S
|
||||
828,1,2,"Mallet, Master. Andre",male,1,0,2,S.C./PARIS 2079,37.0042,,C
|
||||
829,1,3,"McCormack, Mr. Thomas Joseph",male,,0,0,367228,7.75,,Q
|
||||
830,1,1,"Stone, Mrs. George Nelson (Martha Evelyn)",female,62,0,0,113572,80,B28,
|
||||
831,1,3,"Yasbeck, Mrs. Antoni (Selini Alexander)",female,15,1,0,2659,14.4542,,C
|
||||
832,1,2,"Richards, Master. George Sibley",male,0.83,1,1,29106,18.75,,S
|
||||
833,0,3,"Saad, Mr. Amin",male,,0,0,2671,7.2292,,C
|
||||
834,0,3,"Augustsson, Mr. Albert",male,23,0,0,347468,7.8542,,S
|
||||
835,0,3,"Allum, Mr. Owen George",male,18,0,0,2223,8.3,,S
|
||||
836,1,1,"Compton, Miss. Sara Rebecca",female,39,1,1,PC 17756,83.1583,E49,C
|
||||
837,0,3,"Pasic, Mr. Jakob",male,21,0,0,315097,8.6625,,S
|
||||
838,0,3,"Sirota, Mr. Maurice",male,,0,0,392092,8.05,,S
|
||||
839,1,3,"Chip, Mr. Chang",male,32,0,0,1601,56.4958,,S
|
||||
840,1,1,"Marechal, Mr. Pierre",male,,0,0,11774,29.7,C47,C
|
||||
841,0,3,"Alhomaki, Mr. Ilmari Rudolf",male,20,0,0,SOTON/O2 3101287,7.925,,S
|
||||
842,0,2,"Mudd, Mr. Thomas Charles",male,16,0,0,S.O./P.P. 3,10.5,,S
|
||||
843,1,1,"Serepeca, Miss. Augusta",female,30,0,0,113798,31,,C
|
||||
844,0,3,"Lemberopolous, Mr. Peter L",male,34.5,0,0,2683,6.4375,,C
|
||||
845,0,3,"Culumovic, Mr. Jeso",male,17,0,0,315090,8.6625,,S
|
||||
846,0,3,"Abbing, Mr. Anthony",male,42,0,0,C.A. 5547,7.55,,S
|
||||
847,0,3,"Sage, Mr. Douglas Bullen",male,,8,2,CA. 2343,69.55,,S
|
||||
848,0,3,"Markoff, Mr. Marin",male,35,0,0,349213,7.8958,,C
|
||||
849,0,2,"Harper, Rev. John",male,28,0,1,248727,33,,S
|
||||
850,1,1,"Goldenberg, Mrs. Samuel L (Edwiga Grabowska)",female,,1,0,17453,89.1042,C92,C
|
||||
851,0,3,"Andersson, Master. Sigvard Harald Elias",male,4,4,2,347082,31.275,,S
|
||||
852,0,3,"Svensson, Mr. Johan",male,74,0,0,347060,7.775,,S
|
||||
853,0,3,"Boulos, Miss. Nourelain",female,9,1,1,2678,15.2458,,C
|
||||
854,1,1,"Lines, Miss. Mary Conover",female,16,0,1,PC 17592,39.4,D28,S
|
||||
855,0,2,"Carter, Mrs. Ernest Courtenay (Lilian Hughes)",female,44,1,0,244252,26,,S
|
||||
856,1,3,"Aks, Mrs. Sam (Leah Rosen)",female,18,0,1,392091,9.35,,S
|
||||
857,1,1,"Wick, Mrs. George Dennick (Mary Hitchcock)",female,45,1,1,36928,164.8667,,S
|
||||
858,1,1,"Daly, Mr. Peter Denis ",male,51,0,0,113055,26.55,E17,S
|
||||
859,1,3,"Baclini, Mrs. Solomon (Latifa Qurban)",female,24,0,3,2666,19.2583,,C
|
||||
860,0,3,"Razi, Mr. Raihed",male,,0,0,2629,7.2292,,C
|
||||
861,0,3,"Hansen, Mr. Claus Peter",male,41,2,0,350026,14.1083,,S
|
||||
862,0,2,"Giles, Mr. Frederick Edward",male,21,1,0,28134,11.5,,S
|
||||
863,1,1,"Swift, Mrs. Frederick Joel (Margaret Welles Barron)",female,48,0,0,17466,25.9292,D17,S
|
||||
864,0,3,"Sage, Miss. Dorothy Edith ""Dolly""",female,,8,2,CA. 2343,69.55,,S
|
||||
865,0,2,"Gill, Mr. John William",male,24,0,0,233866,13,,S
|
||||
866,1,2,"Bystrom, Mrs. (Karolina)",female,42,0,0,236852,13,,S
|
||||
867,1,2,"Duran y More, Miss. Asuncion",female,27,1,0,SC/PARIS 2149,13.8583,,C
|
||||
868,0,1,"Roebling, Mr. Washington Augustus II",male,31,0,0,PC 17590,50.4958,A24,S
|
||||
869,0,3,"van Melkebeke, Mr. Philemon",male,,0,0,345777,9.5,,S
|
||||
870,1,3,"Johnson, Master. Harold Theodor",male,4,1,1,347742,11.1333,,S
|
||||
871,0,3,"Balkic, Mr. Cerin",male,26,0,0,349248,7.8958,,S
|
||||
872,1,1,"Beckwith, Mrs. Richard Leonard (Sallie Monypeny)",female,47,1,1,11751,52.5542,D35,S
|
||||
873,0,1,"Carlsson, Mr. Frans Olof",male,33,0,0,695,5,B51 B53 B55,S
|
||||
874,0,3,"Vander Cruyssen, Mr. Victor",male,47,0,0,345765,9,,S
|
||||
875,1,2,"Abelson, Mrs. Samuel (Hannah Wizosky)",female,28,1,0,P/PP 3381,24,,C
|
||||
876,1,3,"Najib, Miss. Adele Kiamie ""Jane""",female,15,0,0,2667,7.225,,C
|
||||
877,0,3,"Gustafsson, Mr. Alfred Ossian",male,20,0,0,7534,9.8458,,S
|
||||
878,0,3,"Petroff, Mr. Nedelio",male,19,0,0,349212,7.8958,,S
|
||||
879,0,3,"Laleff, Mr. Kristo",male,,0,0,349217,7.8958,,S
|
||||
880,1,1,"Potter, Mrs. Thomas Jr (Lily Alexenia Wilson)",female,56,0,1,11767,83.1583,C50,C
|
||||
881,1,2,"Shelley, Mrs. William (Imanita Parrish Hall)",female,25,0,1,230433,26,,S
|
||||
882,0,3,"Markun, Mr. Johann",male,33,0,0,349257,7.8958,,S
|
||||
883,0,3,"Dahlberg, Miss. Gerda Ulrika",female,22,0,0,7552,10.5167,,S
|
||||
884,0,2,"Banfield, Mr. Frederick James",male,28,0,0,C.A./SOTON 34068,10.5,,S
|
||||
885,0,3,"Sutehall, Mr. Henry Jr",male,25,0,0,SOTON/OQ 392076,7.05,,S
|
||||
886,0,3,"Rice, Mrs. William (Margaret Norton)",female,39,0,5,382652,29.125,,Q
|
||||
887,0,2,"Montvila, Rev. Juozas",male,27,0,0,211536,13,,S
|
||||
888,1,1,"Graham, Miss. Margaret Edith",female,19,0,0,112053,30,B42,S
|
||||
889,0,3,"Johnston, Miss. Catherine Helen ""Carrie""",female,,1,2,W./C. 6607,23.45,,S
|
||||
890,1,1,"Behr, Mr. Karl Howell",male,26,0,0,111369,30,C148,C
|
||||
891,0,3,"Dooley, Mr. Patrick",male,32,0,0,370376,7.75,,Q
|
||||
|
1
data/us_retail_sales.csv
Normal file
1
data/us_retail_sales.csv
Normal file
@@ -0,0 +1 @@
|
||||
YEAR,JAN,FEB,MAR,APR,MAY,JUN,JUL,AUG,SEP,OCT,NOV,DEC
|
||||
|
101
data/user_visit_duration.csv
Normal file
101
data/user_visit_duration.csv
Normal file
@@ -0,0 +1,101 @@
|
||||
Time (min),Buy
|
||||
2.0,0
|
||||
0.6833333333333333,0
|
||||
3.216666666666667,1
|
||||
0.9,0
|
||||
1.5333333333333334,1
|
||||
2.8833333333333333,1
|
||||
0.8,0
|
||||
1.4666666666666666,0
|
||||
1.1166666666666667,0
|
||||
0.6,0
|
||||
1.35,1
|
||||
3.183333333333333,1
|
||||
2.7666666666666666,0
|
||||
2.183333333333333,1
|
||||
1.5,1
|
||||
1.3333333333333333,1
|
||||
1.5333333333333334,0
|
||||
0.7833333333333333,0
|
||||
2.9833333333333334,1
|
||||
4.15,1
|
||||
0.85,0
|
||||
2.033333333333333,1
|
||||
1.6,0
|
||||
2.6166666666666667,1
|
||||
2.683333333333333,1
|
||||
1.95,0
|
||||
0.4666666666666667,1
|
||||
2.716666666666667,1
|
||||
2.333333333333333,1
|
||||
3.4166666666666665,1
|
||||
0.26666666666666666,0
|
||||
1.3833333333333333,1
|
||||
0.5166666666666667,0
|
||||
2.7,1
|
||||
2.05,0
|
||||
2.95,1
|
||||
1.2333333333333334,0
|
||||
3.6166666666666663,1
|
||||
1.4333333333333331,1
|
||||
2.066666666666667,0
|
||||
2.066666666666667,1
|
||||
1.5,0
|
||||
2.433333333333333,0
|
||||
2.95,1
|
||||
2.216666666666667,1
|
||||
0.9166666666666666,0
|
||||
2.1,1
|
||||
3.75,1
|
||||
1.0,0
|
||||
0.0,0
|
||||
2.65,1
|
||||
1.55,0
|
||||
1.0666666666666669,0
|
||||
2.0166666666666666,1
|
||||
0.0,0
|
||||
0.0,0
|
||||
0.6666666666666666,0
|
||||
2.5166666666666666,1
|
||||
1.0666666666666669,0
|
||||
1.25,0
|
||||
2.95,1
|
||||
0.0,0
|
||||
1.9666666666666663,0
|
||||
2.2,1
|
||||
2.9,1
|
||||
3.85,1
|
||||
2.3833333333333333,1
|
||||
2.083333333333333,1
|
||||
3.183333333333333,1
|
||||
3.8666666666666663,1
|
||||
2.183333333333333,0
|
||||
2.833333333333333,1
|
||||
2.7333333333333334,1
|
||||
1.3833333333333333,0
|
||||
1.1666666666666667,0
|
||||
0.38333333333333336,0
|
||||
1.1666666666666667,0
|
||||
1.5166666666666666,0
|
||||
3.216666666666667,1
|
||||
1.1333333333333333,0
|
||||
0.7,0
|
||||
0.8166666666666667,0
|
||||
3.883333333333333,1
|
||||
2.216666666666667,1
|
||||
0.75,0
|
||||
2.566666666666667,0
|
||||
0.0,0
|
||||
0.0,0
|
||||
1.7666666666666666,1
|
||||
1.6833333333333331,1
|
||||
0.21666666666666667,0
|
||||
0.0,0
|
||||
2.8833333333333333,1
|
||||
2.466666666666667,1
|
||||
1.2666666666666666,0
|
||||
3.75,1
|
||||
3.883333333333333,1
|
||||
1.5666666666666669,0
|
||||
1.6666666666666667,0
|
||||
2.15,1
|
||||
|
10001
data/weight-height.csv
Normal file
10001
data/weight-height.csv
Normal file
File diff suppressed because it is too large
Load Diff
179
data/wines.csv
Normal file
179
data/wines.csv
Normal file
@@ -0,0 +1,179 @@
|
||||
Class,Alcohol,Malic_acid,Ash,Alcalinity_of_ash,Magnesium,Total_phenols,Flavanoids,Nonflavanoid_phenols,Proanthocyanins,Color_intensity,Hue,OD280-OD315_of_diluted_wines,Proline
|
||||
1,14.23,1.71,2.43,15.6,127,2.8,3.06,.28,2.29,5.64,1.04,3.92,1065
|
||||
1,13.2,1.78,2.14,11.2,100,2.65,2.76,.26,1.28,4.38,1.05,3.4,1050
|
||||
1,13.16,2.36,2.67,18.6,101,2.8,3.24,.3,2.81,5.68,1.03,3.17,1185
|
||||
1,14.37,1.95,2.5,16.8,113,3.85,3.49,.24,2.18,7.8,.86,3.45,1480
|
||||
1,13.24,2.59,2.87,21,118,2.8,2.69,.39,1.82,4.32,1.04,2.93,735
|
||||
1,14.2,1.76,2.45,15.2,112,3.27,3.39,.34,1.97,6.75,1.05,2.85,1450
|
||||
1,14.39,1.87,2.45,14.6,96,2.5,2.52,.3,1.98,5.25,1.02,3.58,1290
|
||||
1,14.06,2.15,2.61,17.6,121,2.6,2.51,.31,1.25,5.05,1.06,3.58,1295
|
||||
1,14.83,1.64,2.17,14,97,2.8,2.98,.29,1.98,5.2,1.08,2.85,1045
|
||||
1,13.86,1.35,2.27,16,98,2.98,3.15,.22,1.85,7.22,1.01,3.55,1045
|
||||
1,14.1,2.16,2.3,18,105,2.95,3.32,.22,2.38,5.75,1.25,3.17,1510
|
||||
1,14.12,1.48,2.32,16.8,95,2.2,2.43,.26,1.57,5,1.17,2.82,1280
|
||||
1,13.75,1.73,2.41,16,89,2.6,2.76,.29,1.81,5.6,1.15,2.9,1320
|
||||
1,14.75,1.73,2.39,11.4,91,3.1,3.69,.43,2.81,5.4,1.25,2.73,1150
|
||||
1,14.38,1.87,2.38,12,102,3.3,3.64,.29,2.96,7.5,1.2,3,1547
|
||||
1,13.63,1.81,2.7,17.2,112,2.85,2.91,.3,1.46,7.3,1.28,2.88,1310
|
||||
1,14.3,1.92,2.72,20,120,2.8,3.14,.33,1.97,6.2,1.07,2.65,1280
|
||||
1,13.83,1.57,2.62,20,115,2.95,3.4,.4,1.72,6.6,1.13,2.57,1130
|
||||
1,14.19,1.59,2.48,16.5,108,3.3,3.93,.32,1.86,8.7,1.23,2.82,1680
|
||||
1,13.64,3.1,2.56,15.2,116,2.7,3.03,.17,1.66,5.1,.96,3.36,845
|
||||
1,14.06,1.63,2.28,16,126,3,3.17,.24,2.1,5.65,1.09,3.71,780
|
||||
1,12.93,3.8,2.65,18.6,102,2.41,2.41,.25,1.98,4.5,1.03,3.52,770
|
||||
1,13.71,1.86,2.36,16.6,101,2.61,2.88,.27,1.69,3.8,1.11,4,1035
|
||||
1,12.85,1.6,2.52,17.8,95,2.48,2.37,.26,1.46,3.93,1.09,3.63,1015
|
||||
1,13.5,1.81,2.61,20,96,2.53,2.61,.28,1.66,3.52,1.12,3.82,845
|
||||
1,13.05,2.05,3.22,25,124,2.63,2.68,.47,1.92,3.58,1.13,3.2,830
|
||||
1,13.39,1.77,2.62,16.1,93,2.85,2.94,.34,1.45,4.8,.92,3.22,1195
|
||||
1,13.3,1.72,2.14,17,94,2.4,2.19,.27,1.35,3.95,1.02,2.77,1285
|
||||
1,13.87,1.9,2.8,19.4,107,2.95,2.97,.37,1.76,4.5,1.25,3.4,915
|
||||
1,14.02,1.68,2.21,16,96,2.65,2.33,.26,1.98,4.7,1.04,3.59,1035
|
||||
1,13.73,1.5,2.7,22.5,101,3,3.25,.29,2.38,5.7,1.19,2.71,1285
|
||||
1,13.58,1.66,2.36,19.1,106,2.86,3.19,.22,1.95,6.9,1.09,2.88,1515
|
||||
1,13.68,1.83,2.36,17.2,104,2.42,2.69,.42,1.97,3.84,1.23,2.87,990
|
||||
1,13.76,1.53,2.7,19.5,132,2.95,2.74,.5,1.35,5.4,1.25,3,1235
|
||||
1,13.51,1.8,2.65,19,110,2.35,2.53,.29,1.54,4.2,1.1,2.87,1095
|
||||
1,13.48,1.81,2.41,20.5,100,2.7,2.98,.26,1.86,5.1,1.04,3.47,920
|
||||
1,13.28,1.64,2.84,15.5,110,2.6,2.68,.34,1.36,4.6,1.09,2.78,880
|
||||
1,13.05,1.65,2.55,18,98,2.45,2.43,.29,1.44,4.25,1.12,2.51,1105
|
||||
1,13.07,1.5,2.1,15.5,98,2.4,2.64,.28,1.37,3.7,1.18,2.69,1020
|
||||
1,14.22,3.99,2.51,13.2,128,3,3.04,.2,2.08,5.1,.89,3.53,760
|
||||
1,13.56,1.71,2.31,16.2,117,3.15,3.29,.34,2.34,6.13,.95,3.38,795
|
||||
1,13.41,3.84,2.12,18.8,90,2.45,2.68,.27,1.48,4.28,.91,3,1035
|
||||
1,13.88,1.89,2.59,15,101,3.25,3.56,.17,1.7,5.43,.88,3.56,1095
|
||||
1,13.24,3.98,2.29,17.5,103,2.64,2.63,.32,1.66,4.36,.82,3,680
|
||||
1,13.05,1.77,2.1,17,107,3,3,.28,2.03,5.04,.88,3.35,885
|
||||
1,14.21,4.04,2.44,18.9,111,2.85,2.65,.3,1.25,5.24,.87,3.33,1080
|
||||
1,14.38,3.59,2.28,16,102,3.25,3.17,.27,2.19,4.9,1.04,3.44,1065
|
||||
1,13.9,1.68,2.12,16,101,3.1,3.39,.21,2.14,6.1,.91,3.33,985
|
||||
1,14.1,2.02,2.4,18.8,103,2.75,2.92,.32,2.38,6.2,1.07,2.75,1060
|
||||
1,13.94,1.73,2.27,17.4,108,2.88,3.54,.32,2.08,8.90,1.12,3.1,1260
|
||||
1,13.05,1.73,2.04,12.4,92,2.72,3.27,.17,2.91,7.2,1.12,2.91,1150
|
||||
1,13.83,1.65,2.6,17.2,94,2.45,2.99,.22,2.29,5.6,1.24,3.37,1265
|
||||
1,13.82,1.75,2.42,14,111,3.88,3.74,.32,1.87,7.05,1.01,3.26,1190
|
||||
1,13.77,1.9,2.68,17.1,115,3,2.79,.39,1.68,6.3,1.13,2.93,1375
|
||||
1,13.74,1.67,2.25,16.4,118,2.6,2.9,.21,1.62,5.85,.92,3.2,1060
|
||||
1,13.56,1.73,2.46,20.5,116,2.96,2.78,.2,2.45,6.25,.98,3.03,1120
|
||||
1,14.22,1.7,2.3,16.3,118,3.2,3,.26,2.03,6.38,.94,3.31,970
|
||||
1,13.29,1.97,2.68,16.8,102,3,3.23,.31,1.66,6,1.07,2.84,1270
|
||||
1,13.72,1.43,2.5,16.7,108,3.4,3.67,.19,2.04,6.8,.89,2.87,1285
|
||||
2,12.37,.94,1.36,10.6,88,1.98,.57,.28,.42,1.95,1.05,1.82,520
|
||||
2,12.33,1.1,2.28,16,101,2.05,1.09,.63,.41,3.27,1.25,1.67,680
|
||||
2,12.64,1.36,2.02,16.8,100,2.02,1.41,.53,.62,5.75,.98,1.59,450
|
||||
2,13.67,1.25,1.92,18,94,2.1,1.79,.32,.73,3.8,1.23,2.46,630
|
||||
2,12.37,1.13,2.16,19,87,3.5,3.1,.19,1.87,4.45,1.22,2.87,420
|
||||
2,12.17,1.45,2.53,19,104,1.89,1.75,.45,1.03,2.95,1.45,2.23,355
|
||||
2,12.37,1.21,2.56,18.1,98,2.42,2.65,.37,2.08,4.6,1.19,2.3,678
|
||||
2,13.11,1.01,1.7,15,78,2.98,3.18,.26,2.28,5.3,1.12,3.18,502
|
||||
2,12.37,1.17,1.92,19.6,78,2.11,2,.27,1.04,4.68,1.12,3.48,510
|
||||
2,13.34,.94,2.36,17,110,2.53,1.3,.55,.42,3.17,1.02,1.93,750
|
||||
2,12.21,1.19,1.75,16.8,151,1.85,1.28,.14,2.5,2.85,1.28,3.07,718
|
||||
2,12.29,1.61,2.21,20.4,103,1.1,1.02,.37,1.46,3.05,.906,1.82,870
|
||||
2,13.86,1.51,2.67,25,86,2.95,2.86,.21,1.87,3.38,1.36,3.16,410
|
||||
2,13.49,1.66,2.24,24,87,1.88,1.84,.27,1.03,3.74,.98,2.78,472
|
||||
2,12.99,1.67,2.6,30,139,3.3,2.89,.21,1.96,3.35,1.31,3.5,985
|
||||
2,11.96,1.09,2.3,21,101,3.38,2.14,.13,1.65,3.21,.99,3.13,886
|
||||
2,11.66,1.88,1.92,16,97,1.61,1.57,.34,1.15,3.8,1.23,2.14,428
|
||||
2,13.03,.9,1.71,16,86,1.95,2.03,.24,1.46,4.6,1.19,2.48,392
|
||||
2,11.84,2.89,2.23,18,112,1.72,1.32,.43,.95,2.65,.96,2.52,500
|
||||
2,12.33,.99,1.95,14.8,136,1.9,1.85,.35,2.76,3.4,1.06,2.31,750
|
||||
2,12.7,3.87,2.4,23,101,2.83,2.55,.43,1.95,2.57,1.19,3.13,463
|
||||
2,12,.92,2,19,86,2.42,2.26,.3,1.43,2.5,1.38,3.12,278
|
||||
2,12.72,1.81,2.2,18.8,86,2.2,2.53,.26,1.77,3.9,1.16,3.14,714
|
||||
2,12.08,1.13,2.51,24,78,2,1.58,.4,1.4,2.2,1.31,2.72,630
|
||||
2,13.05,3.86,2.32,22.5,85,1.65,1.59,.61,1.62,4.8,.84,2.01,515
|
||||
2,11.84,.89,2.58,18,94,2.2,2.21,.22,2.35,3.05,.79,3.08,520
|
||||
2,12.67,.98,2.24,18,99,2.2,1.94,.3,1.46,2.62,1.23,3.16,450
|
||||
2,12.16,1.61,2.31,22.8,90,1.78,1.69,.43,1.56,2.45,1.33,2.26,495
|
||||
2,11.65,1.67,2.62,26,88,1.92,1.61,.4,1.34,2.6,1.36,3.21,562
|
||||
2,11.64,2.06,2.46,21.6,84,1.95,1.69,.48,1.35,2.8,1,2.75,680
|
||||
2,12.08,1.33,2.3,23.6,70,2.2,1.59,.42,1.38,1.74,1.07,3.21,625
|
||||
2,12.08,1.83,2.32,18.5,81,1.6,1.5,.52,1.64,2.4,1.08,2.27,480
|
||||
2,12,1.51,2.42,22,86,1.45,1.25,.5,1.63,3.6,1.05,2.65,450
|
||||
2,12.69,1.53,2.26,20.7,80,1.38,1.46,.58,1.62,3.05,.96,2.06,495
|
||||
2,12.29,2.83,2.22,18,88,2.45,2.25,.25,1.99,2.15,1.15,3.3,290
|
||||
2,11.62,1.99,2.28,18,98,3.02,2.26,.17,1.35,3.25,1.16,2.96,345
|
||||
2,12.47,1.52,2.2,19,162,2.5,2.27,.32,3.28,2.6,1.16,2.63,937
|
||||
2,11.81,2.12,2.74,21.5,134,1.6,.99,.14,1.56,2.5,.95,2.26,625
|
||||
2,12.29,1.41,1.98,16,85,2.55,2.5,.29,1.77,2.9,1.23,2.74,428
|
||||
2,12.37,1.07,2.1,18.5,88,3.52,3.75,.24,1.95,4.5,1.04,2.77,660
|
||||
2,12.29,3.17,2.21,18,88,2.85,2.99,.45,2.81,2.3,1.42,2.83,406
|
||||
2,12.08,2.08,1.7,17.5,97,2.23,2.17,.26,1.4,3.3,1.27,2.96,710
|
||||
2,12.6,1.34,1.9,18.5,88,1.45,1.36,.29,1.35,2.45,1.04,2.77,562
|
||||
2,12.34,2.45,2.46,21,98,2.56,2.11,.34,1.31,2.8,.8,3.38,438
|
||||
2,11.82,1.72,1.88,19.5,86,2.5,1.64,.37,1.42,2.06,.94,2.44,415
|
||||
2,12.51,1.73,1.98,20.5,85,2.2,1.92,.32,1.48,2.94,1.04,3.57,672
|
||||
2,12.42,2.55,2.27,22,90,1.68,1.84,.66,1.42,2.7,.86,3.3,315
|
||||
2,12.25,1.73,2.12,19,80,1.65,2.03,.37,1.63,3.4,1,3.17,510
|
||||
2,12.72,1.75,2.28,22.5,84,1.38,1.76,.48,1.63,3.3,.88,2.42,488
|
||||
2,12.22,1.29,1.94,19,92,2.36,2.04,.39,2.08,2.7,.86,3.02,312
|
||||
2,11.61,1.35,2.7,20,94,2.74,2.92,.29,2.49,2.65,.96,3.26,680
|
||||
2,11.46,3.74,1.82,19.5,107,3.18,2.58,.24,3.58,2.9,.75,2.81,562
|
||||
2,12.52,2.43,2.17,21,88,2.55,2.27,.26,1.22,2,.9,2.78,325
|
||||
2,11.76,2.68,2.92,20,103,1.75,2.03,.6,1.05,3.8,1.23,2.5,607
|
||||
2,11.41,.74,2.5,21,88,2.48,2.01,.42,1.44,3.08,1.1,2.31,434
|
||||
2,12.08,1.39,2.5,22.5,84,2.56,2.29,.43,1.04,2.9,.93,3.19,385
|
||||
2,11.03,1.51,2.2,21.5,85,2.46,2.17,.52,2.01,1.9,1.71,2.87,407
|
||||
2,11.82,1.47,1.99,20.8,86,1.98,1.6,.3,1.53,1.95,.95,3.33,495
|
||||
2,12.42,1.61,2.19,22.5,108,2,2.09,.34,1.61,2.06,1.06,2.96,345
|
||||
2,12.77,3.43,1.98,16,80,1.63,1.25,.43,.83,3.4,.7,2.12,372
|
||||
2,12,3.43,2,19,87,2,1.64,.37,1.87,1.28,.93,3.05,564
|
||||
2,11.45,2.4,2.42,20,96,2.9,2.79,.32,1.83,3.25,.8,3.39,625
|
||||
2,11.56,2.05,3.23,28.5,119,3.18,5.08,.47,1.87,6,.93,3.69,465
|
||||
2,12.42,4.43,2.73,26.5,102,2.2,2.13,.43,1.71,2.08,.92,3.12,365
|
||||
2,13.05,5.8,2.13,21.5,86,2.62,2.65,.3,2.01,2.6,.73,3.1,380
|
||||
2,11.87,4.31,2.39,21,82,2.86,3.03,.21,2.91,2.8,.75,3.64,380
|
||||
2,12.07,2.16,2.17,21,85,2.6,2.65,.37,1.35,2.76,.86,3.28,378
|
||||
2,12.43,1.53,2.29,21.5,86,2.74,3.15,.39,1.77,3.94,.69,2.84,352
|
||||
2,11.79,2.13,2.78,28.5,92,2.13,2.24,.58,1.76,3,.97,2.44,466
|
||||
2,12.37,1.63,2.3,24.5,88,2.22,2.45,.4,1.9,2.12,.89,2.78,342
|
||||
2,12.04,4.3,2.38,22,80,2.1,1.75,.42,1.35,2.6,.79,2.57,580
|
||||
3,12.86,1.35,2.32,18,122,1.51,1.25,.21,.94,4.1,.76,1.29,630
|
||||
3,12.88,2.99,2.4,20,104,1.3,1.22,.24,.83,5.4,.74,1.42,530
|
||||
3,12.81,2.31,2.4,24,98,1.15,1.09,.27,.83,5.7,.66,1.36,560
|
||||
3,12.7,3.55,2.36,21.5,106,1.7,1.2,.17,.84,5,.78,1.29,600
|
||||
3,12.51,1.24,2.25,17.5,85,2,.58,.6,1.25,5.45,.75,1.51,650
|
||||
3,12.6,2.46,2.2,18.5,94,1.62,.66,.63,.94,7.1,.73,1.58,695
|
||||
3,12.25,4.72,2.54,21,89,1.38,.47,.53,.8,3.85,.75,1.27,720
|
||||
3,12.53,5.51,2.64,25,96,1.79,.6,.63,1.1,5,.82,1.69,515
|
||||
3,13.49,3.59,2.19,19.5,88,1.62,.48,.58,.88,5.7,.81,1.82,580
|
||||
3,12.84,2.96,2.61,24,101,2.32,.6,.53,.81,4.92,.89,2.15,590
|
||||
3,12.93,2.81,2.7,21,96,1.54,.5,.53,.75,4.6,.77,2.31,600
|
||||
3,13.36,2.56,2.35,20,89,1.4,.5,.37,.64,5.6,.7,2.47,780
|
||||
3,13.52,3.17,2.72,23.5,97,1.55,.52,.5,.55,4.35,.89,2.06,520
|
||||
3,13.62,4.95,2.35,20,92,2,.8,.47,1.02,4.4,.91,2.05,550
|
||||
3,12.25,3.88,2.2,18.5,112,1.38,.78,.29,1.14,8.21,.65,2,855
|
||||
3,13.16,3.57,2.15,21,102,1.5,.55,.43,1.3,4,.6,1.68,830
|
||||
3,13.88,5.04,2.23,20,80,.98,.34,.4,.68,4.9,.58,1.33,415
|
||||
3,12.87,4.61,2.48,21.5,86,1.7,.65,.47,.86,7.65,.54,1.86,625
|
||||
3,13.32,3.24,2.38,21.5,92,1.93,.76,.45,1.25,8.42,.55,1.62,650
|
||||
3,13.08,3.9,2.36,21.5,113,1.41,1.39,.34,1.14,9.40,.57,1.33,550
|
||||
3,13.5,3.12,2.62,24,123,1.4,1.57,.22,1.25,8.60,.59,1.3,500
|
||||
3,12.79,2.67,2.48,22,112,1.48,1.36,.24,1.26,10.8,.48,1.47,480
|
||||
3,13.11,1.9,2.75,25.5,116,2.2,1.28,.26,1.56,7.1,.61,1.33,425
|
||||
3,13.23,3.3,2.28,18.5,98,1.8,.83,.61,1.87,10.52,.56,1.51,675
|
||||
3,12.58,1.29,2.1,20,103,1.48,.58,.53,1.4,7.6,.58,1.55,640
|
||||
3,13.17,5.19,2.32,22,93,1.74,.63,.61,1.55,7.9,.6,1.48,725
|
||||
3,13.84,4.12,2.38,19.5,89,1.8,.83,.48,1.56,9.01,.57,1.64,480
|
||||
3,12.45,3.03,2.64,27,97,1.9,.58,.63,1.14,7.5,.67,1.73,880
|
||||
3,14.34,1.68,2.7,25,98,2.8,1.31,.53,2.7,13,.57,1.96,660
|
||||
3,13.48,1.67,2.64,22.5,89,2.6,1.1,.52,2.29,11.75,.57,1.78,620
|
||||
3,12.36,3.83,2.38,21,88,2.3,.92,.5,1.04,7.65,.56,1.58,520
|
||||
3,13.69,3.26,2.54,20,107,1.83,.56,.5,.8,5.88,.96,1.82,680
|
||||
3,12.85,3.27,2.58,22,106,1.65,.6,.6,.96,5.58,.87,2.11,570
|
||||
3,12.96,3.45,2.35,18.5,106,1.39,.7,.4,.94,5.28,.68,1.75,675
|
||||
3,13.78,2.76,2.3,22,90,1.35,.68,.41,1.03,9.58,.7,1.68,615
|
||||
3,13.73,4.36,2.26,22.5,88,1.28,.47,.52,1.15,6.62,.78,1.75,520
|
||||
3,13.45,3.7,2.6,23,111,1.7,.92,.43,1.46,10.68,.85,1.56,695
|
||||
3,12.82,3.37,2.3,19.5,88,1.48,.66,.4,.97,10.26,.72,1.75,685
|
||||
3,13.58,2.58,2.69,24.5,105,1.55,.84,.39,1.54,8.66,.74,1.8,750
|
||||
3,13.4,4.6,2.86,25,112,1.98,.96,.27,1.11,8.5,.67,1.92,630
|
||||
3,12.2,3.03,2.32,19,96,1.25,.49,.4,.73,5.5,.66,1.83,510
|
||||
3,12.77,2.39,2.28,19.5,86,1.39,.51,.48,.64,9.899999,.57,1.63,470
|
||||
3,14.16,2.51,2.48,20,91,1.68,.7,.44,1.24,9.7,.62,1.71,660
|
||||
3,13.71,5.65,2.45,20.5,95,1.68,.61,.52,1.06,7.7,.64,1.74,740
|
||||
3,13.4,3.91,2.48,23,102,1.8,.75,.43,1.41,7.3,.7,1.56,750
|
||||
3,13.27,4.28,2.26,20,120,1.59,.69,.43,1.35,10.2,.59,1.56,835
|
||||
3,13.17,2.59,2.37,20,120,1.65,.68,.53,1.46,9.3,.6,1.62,840
|
||||
3,14.13,4.1,2.74,24.5,96,2.05,.76,.56,1.35,9.2,.61,1.6,560
|
||||
|
21
environment.yml
Normal file
21
environment.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
name: ztdl
|
||||
channels:
|
||||
- defaults
|
||||
dependencies:
|
||||
- python=3.7.*
|
||||
- bz2file==0.98
|
||||
- cython==0.29.*
|
||||
- pip==21.0.*
|
||||
- numpy==1.19.*
|
||||
- jupyter==1.0.*
|
||||
- matplotlib==3.3.*
|
||||
- setuptools==52.0.*
|
||||
- scikit-learn==0.24.*
|
||||
- scipy==1.6.*
|
||||
- pandas==1.2.*
|
||||
- pillow==8.2.*
|
||||
- seaborn==0.11.*
|
||||
- pytest==6.2.*
|
||||
- twisted==21.2.*
|
||||
- pip:
|
||||
- tensorflow==2.5.*
|
||||
197
exercises/First Deep Learning Model commented.ipynb
Normal file
197
exercises/First Deep Learning Model commented.ipynb
Normal file
@@ -0,0 +1,197 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# First Deep Learning Model"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Imports"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np # import the numpy library and assign the name np to it\n",
|
||||
"%matplotlib inline # magic function that sets the backend of matplotlib to the inline backend\n",
|
||||
"import matplotlib.pyplot as plt # import the matplotlib.pyplot and assign the name plt to it"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.datasets import make_circles # import the make_circles module from the sklearn.datasets module"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X, y = make_circles(n_samples=1000,\n",
|
||||
" noise=0.1,\n",
|
||||
" factor=0.2,\n",
|
||||
" random_state=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(5, 5))\n",
|
||||
"plt.plot(X[y==0, 0], X[y==0, 1], 'ob', alpha=0.5)\n",
|
||||
"plt.plot(X[y==1, 0], X[y==1, 1], 'xr', alpha=0.5)\n",
|
||||
"plt.xlim(-1.5, 1.5)\n",
|
||||
"plt.ylim(-1.5, 1.5)\n",
|
||||
"plt.legend(['0', '1'])\n",
|
||||
"plt.title(\"Blue circles and Red crosses\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"from tensorflow.keras.optimizers import SGD"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.add(Dense(4, input_shape=(2,), activation='tanh'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.add(Dense(1, activation='sigmoid'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.compile(SGD(learning_rate=0.5), 'binary_crossentropy', metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X, y, epochs=20)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"hticks = np.linspace(-1.5, 1.5, 101)\n",
|
||||
"vticks = np.linspace(-1.5, 1.5, 101)\n",
|
||||
"aa, bb = np.meshgrid(hticks, vticks)\n",
|
||||
"ab = np.c_[aa.ravel(), bb.ravel()]\n",
|
||||
"c = model.predict(ab)\n",
|
||||
"cc = c.reshape(aa.shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.figure(figsize=(5, 5))\n",
|
||||
"plt.contourf(aa, bb, cc, cmap='bwr', alpha=0.2)\n",
|
||||
"plt.plot(X[y==0, 0], X[y==0, 1], 'ob', alpha=0.5)\n",
|
||||
"plt.plot(X[y==1, 0], X[y==1, 1], 'xr', alpha=0.5)\n",
|
||||
"plt.xlim(-1.5, 1.5)\n",
|
||||
"plt.ylim(-1.5, 1.5)\n",
|
||||
"plt.legend(['0', '1'])\n",
|
||||
"plt.title(\"Blue circles and Red crosses\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
2604
exercises/Jupyter notebook CVML.ipynb
Normal file
2604
exercises/Jupyter notebook CVML.ipynb
Normal file
File diff suppressed because one or more lines are too long
6
exercises/Untitled.ipynb
Normal file
6
exercises/Untitled.ipynb
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"cells": [],
|
||||
"metadata": {},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
404
solutions/2 Data exploration Exercises Solution.ipynb
Normal file
404
solutions/2 Data exploration Exercises Solution.ipynb
Normal file
@@ -0,0 +1,404 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"\n",
|
||||
"import pandas as pd"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 1\n",
|
||||
"- load the dataset: `../data/international-airline-passengers.csv`\n",
|
||||
"- inspect it using the `.info()` and `.head()` commands\n",
|
||||
"- use the function `pd.to_datetime()` to change the column type of 'Month' to a datatime type\n",
|
||||
"- set the index of df to be a datetime index using the column 'Month' and the `df.set_index()` method\n",
|
||||
"- choose the appropriate plot and display the data\n",
|
||||
"- choose appropriate scale\n",
|
||||
"- label the axes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# - load the dataset: ../data/international-airline-passengers.csv\n",
|
||||
"df = pd.read_csv('../data/international-airline-passengers.csv')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# - inspect it using the .info() and .head() commands\n",
|
||||
"df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# - use the function to_datetime() to change the column type of 'Month' to a datatime type\n",
|
||||
"# - set the index of df to be a datetime index using the column 'Month' and tthe set_index() method\n",
|
||||
"\n",
|
||||
"df['Month'] = pd.to_datetime(df['Month'])\n",
|
||||
"df = df.set_index('Month')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# - choose the appropriate plot and display the data\n",
|
||||
"# - choose appropriate scale\n",
|
||||
"# - label the axes\n",
|
||||
"\n",
|
||||
"df.plot();"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 2\n",
|
||||
"- load the dataset: `../data/weight-height.csv`\n",
|
||||
"- inspect it\n",
|
||||
"- plot it using a scatter plot with Weight as a function of Height\n",
|
||||
"- plot the male and female populations with 2 different colors on a new scatter plot\n",
|
||||
"- remember to label the axes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# - load the dataset: ../data/weight-height.csv\n",
|
||||
"# - inspect it\n",
|
||||
"df = pd.read_csv('../data/weight-height.csv')\n",
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.describe()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['Gender'].value_counts()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# - plot it using a scatter plot with Weight as a function of Height\n",
|
||||
"_ = df.plot(kind='scatter', x='Height', y='Weight');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# - plot the male and female populations with 2 different colors on a new scatter plot\n",
|
||||
"# - remember to label the axes\n",
|
||||
"\n",
|
||||
"# this can be done in several ways, showing 2 here:\n",
|
||||
"males = df[df['Gender'] == 'Male']\n",
|
||||
"females = df.query('Gender == \"Female\"')\n",
|
||||
"fig, ax = plt.subplots()\n",
|
||||
"\n",
|
||||
"males.plot(kind='scatter', x='Height', y='Weight',\n",
|
||||
" ax=ax, color='blue', alpha=0.3,\n",
|
||||
" title='Male & Female Populations')\n",
|
||||
"\n",
|
||||
"females.plot(kind='scatter', x='Height', y='Weight',\n",
|
||||
" ax=ax, color='red', alpha=0.3);"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['Gendercolor'] = df['Gender'].map({'Male': 'blue', 'Female': 'red'})\n",
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.plot(kind='scatter', \n",
|
||||
" x='Height',\n",
|
||||
" y='Weight',\n",
|
||||
" c=df['Gendercolor'],\n",
|
||||
" alpha=0.3,\n",
|
||||
" title='Male & Female Populations');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"fig, ax = plt.subplots()\n",
|
||||
"ax.plot(males['Height'], males['Weight'], 'ob', \n",
|
||||
" females['Height'], females['Weight'], 'or', alpha=0.3)\n",
|
||||
"plt.xlabel('Height')\n",
|
||||
"plt.ylabel('Weight')\n",
|
||||
"plt.title('Male & Female Populations');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"## Exercise 3\n",
|
||||
"- plot the histogram of the heights for males and for females on the same plot\n",
|
||||
"- use alpha to control transparency in the plot comand\n",
|
||||
"- plot a vertical line at the mean of each population using `plt.axvline()`"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"males['Height'].plot(kind='hist',\n",
|
||||
" bins=50,\n",
|
||||
" range=(50, 80),\n",
|
||||
" alpha=0.3,\n",
|
||||
" color='blue')\n",
|
||||
"\n",
|
||||
"females['Height'].plot(kind='hist',\n",
|
||||
" bins=50,\n",
|
||||
" range=(50, 80),\n",
|
||||
" alpha=0.3,\n",
|
||||
" color='red')\n",
|
||||
"\n",
|
||||
"plt.title('Height distribution')\n",
|
||||
"plt.legend([\"Males\", \"Females\"])\n",
|
||||
"plt.xlabel(\"Heigth (in)\")\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"plt.axvline(males['Height'].mean(), color='blue', linewidth=2)\n",
|
||||
"plt.axvline(females['Height'].mean(), color='red', linewidth=2);"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"males['Height'].plot(kind='hist',\n",
|
||||
" bins=200,\n",
|
||||
" range=(50, 80),\n",
|
||||
" alpha=0.3,\n",
|
||||
" color='blue',\n",
|
||||
" cumulative=True,\n",
|
||||
" density=True)\n",
|
||||
"\n",
|
||||
"females['Height'].plot(kind='hist',\n",
|
||||
" bins=200,\n",
|
||||
" range=(50, 80),\n",
|
||||
" alpha=0.3,\n",
|
||||
" color='red',\n",
|
||||
" cumulative=True,\n",
|
||||
" density=True)\n",
|
||||
"\n",
|
||||
"plt.title('Height distribution')\n",
|
||||
"plt.legend([\"Males\", \"Females\"])\n",
|
||||
"plt.xlabel(\"Heigth (in)\")\n",
|
||||
"\n",
|
||||
"plt.axhline(0.8)\n",
|
||||
"plt.axhline(0.5)\n",
|
||||
"plt.axhline(0.2);"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 4\n",
|
||||
"- plot the weights of the males and females using a box plot\n",
|
||||
"- which one is easier to read?\n",
|
||||
"- (remember to put in titles, axes and legends)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"dfpvt = df.pivot(columns = 'Gender', values = 'Weight')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"dfpvt.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"dfpvt.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"dfpvt.plot(kind='box')\n",
|
||||
"plt.title('Weight Box Plot')\n",
|
||||
"plt.ylabel(\"Weight (lbs)\");"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 5\n",
|
||||
"- load the dataset: `../data/titanic-train.csv`\n",
|
||||
"- learn about scattermatrix here: http://pandas.pydata.org/pandas-docs/stable/visualization.html\n",
|
||||
"- display the data using a scattermatrix"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.read_csv('../data/titanic-train.csv')\n",
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from pandas.plotting import scatter_matrix"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"_ = scatter_matrix(df.drop('PassengerId', axis=1), figsize=(10, 10))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"anaconda-cloud": {},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
||||
669
solutions/3 Machine Learning Exercises Solution.ipynb
Normal file
669
solutions/3 Machine Learning Exercises Solution.ipynb
Normal file
@@ -0,0 +1,669 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Machine Learning Exercises Solution"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 1\n",
|
||||
"\n",
|
||||
"You've just been hired at a real estate investment firm and they would like you to build a model for pricing houses. You are given a dataset that contains data for house prices and a few features like number of bedrooms, size in square feet and age of the house. Let's see if you can build a model that is able to predict the price. In this exercise we extend what we have learned about linear regression to a dataset with more than one feature. Here are the steps to complete it:\n",
|
||||
"\n",
|
||||
"1. Load the dataset ../data/housing-data.csv\n",
|
||||
"- plot the histograms for each feature\n",
|
||||
"- create 2 variables called X and y: X shall be a matrix with 3 columns (sqft,bdrms,age) and y shall be a vector with 1 column (price)\n",
|
||||
"- create a linear regression model in Keras with the appropriate number of inputs and output\n",
|
||||
"- split the data into train and test with a 20% test size\n",
|
||||
"- train the model on the training set and check its accuracy on training and test set\n",
|
||||
"- how's your model doing? Is the loss growing smaller?\n",
|
||||
"- try to improve your model with these experiments:\n",
|
||||
" - normalize the input features with one of the rescaling techniques mentioned above\n",
|
||||
" - use a different value for the learning rate of your model\n",
|
||||
" - use a different optimizer\n",
|
||||
"- once you're satisfied with training, check the R2score on the test set"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Load the dataset ../data/housing-data.csv\n",
|
||||
"df = pd.read_csv('../data/housing-data.csv')\n",
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.columns"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# plot the histograms for each feature\n",
|
||||
"plt.figure(figsize=(15, 5))\n",
|
||||
"for i, feature in enumerate(df.columns):\n",
|
||||
" plt.subplot(1, 4, i+1)\n",
|
||||
" df[feature].plot(kind='hist', title=feature)\n",
|
||||
" plt.xlabel(feature)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# create 2 variables called X and y:\n",
|
||||
"# X shall be a matrix with 3 columns (sqft,bdrms,age)\n",
|
||||
"# and y shall be a vector with 1 column (price)\n",
|
||||
"X = df[['sqft', 'bdrms', 'age']].values\n",
|
||||
"y = df['price'].values"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"from tensorflow.keras.optimizers import Adam"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# create a linear regression model in Keras\n",
|
||||
"# with the appropriate number of inputs and output\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(1, input_shape=(3,)))\n",
|
||||
"model.compile(Adam(learning_rate=0.8), 'mean_squared_error')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.model_selection import train_test_split"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# split the data into train and test with a 20% test size\n",
|
||||
"X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"len(X_train)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"len(X)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# train the model on the training set and check its accuracy on training and test set\n",
|
||||
"# how's your model doing? Is the loss growing smaller?\n",
|
||||
"model.fit(X_train, y_train, epochs=10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.describe()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.metrics import r2_score"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# check the R2score on training and test set (probably very bad)\n",
|
||||
"\n",
|
||||
"y_train_pred = model.predict(X_train)\n",
|
||||
"y_test_pred = model.predict(X_test)\n",
|
||||
"\n",
|
||||
"print(\"The R2 score on the Train set is:\\t{:0.3f}\".format(r2_score(y_train, y_train_pred)))\n",
|
||||
"print(\"The R2 score on the Test set is:\\t{:0.3f}\".format(r2_score(y_test, y_test_pred)))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# try to improve your model with these experiments:\n",
|
||||
"# - normalize the input features with one of the rescaling techniques mentioned above\n",
|
||||
"# - use a different value for the learning rate of your model\n",
|
||||
"# - use a different optimizer\n",
|
||||
"df['sqft1000'] = df['sqft']/1000.0\n",
|
||||
"df['age10'] = df['age']/10.0\n",
|
||||
"df['price100k'] = df['price']/1e5"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X = df[['sqft1000', 'bdrms', 'age10']].values\n",
|
||||
"y = df['price100k'].values"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(1, input_dim=3))\n",
|
||||
"model.compile(Adam(learning_rate=0.1), 'mean_squared_error')\n",
|
||||
"model.fit(X_train, y_train, epochs=20)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# once you're satisfied with training, check the R2score on the test set\n",
|
||||
"\n",
|
||||
"y_train_pred = model.predict(X_train)\n",
|
||||
"y_test_pred = model.predict(X_test)\n",
|
||||
"\n",
|
||||
"print(\"The R2 score on the Train set is:\\t{:0.3f}\".format(r2_score(y_train, y_train_pred)))\n",
|
||||
"print(\"The R2 score on the Test set is:\\t{:0.3f}\".format(r2_score(y_test, y_test_pred)))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train, epochs=40, verbose=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# once you're satisfied with training, check the R2score on the test set\n",
|
||||
"\n",
|
||||
"y_train_pred = model.predict(X_train)\n",
|
||||
"y_test_pred = model.predict(X_test)\n",
|
||||
"\n",
|
||||
"print(\"The R2 score on the Train set is:\\t{:0.3f}\".format(r2_score(y_train, y_train_pred)))\n",
|
||||
"print(\"The R2 score on the Test set is:\\t{:0.3f}\".format(r2_score(y_test, y_test_pred)))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 2\n",
|
||||
"\n",
|
||||
"Your boss was extremely happy with your work on the housing price prediction model and decided to entrust you with a more challenging task. They've seen a lot of people leave the company recently and they would like to understand why that's happening. They have collected historical data on employees and they would like you to build a model that is able to predict which employee will leave next. The would like a model that is better than random guessing. They also prefer false negatives than false positives, in this first phase. Fields in the dataset include:\n",
|
||||
"\n",
|
||||
"- Employee satisfaction level\n",
|
||||
"- Last evaluation\n",
|
||||
"- Number of projects\n",
|
||||
"- Average monthly hours\n",
|
||||
"- Time spent at the company\n",
|
||||
"- Whether they have had a work accident\n",
|
||||
"- Whether they have had a promotion in the last 5 years\n",
|
||||
"- Department\n",
|
||||
"- Salary\n",
|
||||
"- Whether the employee has left\n",
|
||||
"\n",
|
||||
"Your goal is to predict the binary outcome variable `left` using the rest of the data. Since the outcome is binary, this is a classification problem. Here are some things you may want to try out:\n",
|
||||
"\n",
|
||||
"1. load the dataset at ../data/HR_comma_sep.csv, inspect it with `.head()`, `.info()` and `.describe()`.\n",
|
||||
"- Establish a benchmark: what would be your accuracy score if you predicted everyone stay?\n",
|
||||
"- Check if any feature needs rescaling. You may plot a histogram of the feature to decide which rescaling method is more appropriate.\n",
|
||||
"- convert the categorical features into binary dummy columns. You will then have to combine them with the numerical features using `pd.concat`.\n",
|
||||
"- do the usual train/test split with a 20% test size\n",
|
||||
"- play around with learning rate and optimizer\n",
|
||||
"- check the confusion matrix, precision and recall\n",
|
||||
"- check if you still get the same results if you use a 5-Fold cross validation on all the data\n",
|
||||
"- Is the model good enough for your boss?\n",
|
||||
"\n",
|
||||
"As you will see in this exercise, the a logistic regression model is not good enough to help your boss. In the next chapter we will learn how to go beyond linear models.\n",
|
||||
"\n",
|
||||
"This dataset comes from https://www.kaggle.com/ludobenistant/hr-analytics/ and is released under [CC BY-SA 4.0 License](https://creativecommons.org/licenses/by-sa/4.0/)."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# load the dataset at ../data/HR_comma_sep.csv, inspect it with `.head()`, `.info()` and `.describe()`.\n",
|
||||
"\n",
|
||||
"df = pd.read_csv('../data/HR_comma_sep.csv')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.describe()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Establish a benchmark: what would be your accuracy score if you predicted everyone stay?\n",
|
||||
"\n",
|
||||
"df.left.value_counts() / len(df)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Predicting 0 all the time would yield an accuracy of 76%"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Check if any feature needs rescaling.\n",
|
||||
"# You may plot a histogram of the feature to decide which rescaling method is more appropriate.\n",
|
||||
"df['average_montly_hours'].plot(kind='hist');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['average_montly_hours_100'] = df['average_montly_hours']/100.0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['average_montly_hours_100'].plot(kind='hist');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['time_spend_company'].plot(kind='hist');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# convert the categorical features into binary dummy columns.\n",
|
||||
"# You will then have to combine them with\n",
|
||||
"# the numerical features using `pd.concat`.\n",
|
||||
"df_dummies = pd.get_dummies(df[['sales', 'salary']])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df_dummies.head()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.columns"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X = pd.concat([df[['satisfaction_level', 'last_evaluation', 'number_project',\n",
|
||||
" 'time_spend_company', 'Work_accident',\n",
|
||||
" 'promotion_last_5years', 'average_montly_hours_100']],\n",
|
||||
" df_dummies], axis=1).values\n",
|
||||
"y = df['left'].values"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# do the usual train/test split with a 20% test size\n",
|
||||
"\n",
|
||||
"X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# play around with learning rate and optimizer\n",
|
||||
"\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(1, input_dim=20, activation='sigmoid'))\n",
|
||||
"model.compile(Adam(learning_rate=0.5), 'binary_crossentropy', metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train, epochs=10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_test_pred = model.predict_classes(X_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.metrics import confusion_matrix, classification_report"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def pretty_confusion_matrix(y_true, y_pred, labels=[\"False\", \"True\"]):\n",
|
||||
" cm = confusion_matrix(y_true, y_pred)\n",
|
||||
" pred_labels = ['Predicted '+ l for l in labels]\n",
|
||||
" df = pd.DataFrame(cm, index=labels, columns=pred_labels)\n",
|
||||
" return df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# check the confusion matrix, precision and recall\n",
|
||||
"\n",
|
||||
"pretty_confusion_matrix(y_test, y_test_pred, labels=['Stay', 'Leave'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(classification_report(y_test, y_test_pred))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.wrappers.scikit_learn import KerasClassifier"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# check if you still get the same results if you use a 5-Fold cross validation on all the data\n",
|
||||
"\n",
|
||||
"def build_logistic_regression_model():\n",
|
||||
" model = Sequential()\n",
|
||||
" model.add(Dense(1, input_dim=20, activation='sigmoid'))\n",
|
||||
" model.compile(Adam(learning_rate=0.5), 'binary_crossentropy', metrics=['accuracy'])\n",
|
||||
" return model\n",
|
||||
"\n",
|
||||
"model = KerasClassifier(build_fn=build_logistic_regression_model,\n",
|
||||
" epochs=10, verbose=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.model_selection import KFold, cross_val_score"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"cv = KFold(5, shuffle=True)\n",
|
||||
"scores = cross_val_score(model, X, y, cv=cv)\n",
|
||||
"\n",
|
||||
"print(\"The cross validation accuracy is {:0.4f} ± {:0.4f}\".format(scores.mean(), scores.std()))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scores"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Is the model good enough for your boss?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"No, the model is not good enough for my boss, since it performs no better than the benchmark."
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
413
solutions/4 Deep Learning Intro Exercises Solution.ipynb
Normal file
413
solutions/4 Deep Learning Intro Exercises Solution.ipynb
Normal file
@@ -0,0 +1,413 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Deep Learning Intro"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"The [Pima Indians dataset](https://archive.ics.uci.edu/ml/datasets/diabetes) is a very famous dataset distributed by UCI and originally collected from the National Institute of Diabetes and Digestive and Kidney Diseases. It contains data from clinical exams for women age 21 and above of Pima indian origins. The objective is to predict based on diagnostic measurements whether a patient has diabetes.\n",
|
||||
"\n",
|
||||
"It has the following features:\n",
|
||||
"\n",
|
||||
"- Pregnancies: Number of times pregnant\n",
|
||||
"- Glucose: Plasma glucose concentration a 2 hours in an oral glucose tolerance test\n",
|
||||
"- BloodPressure: Diastolic blood pressure (mm Hg)\n",
|
||||
"- SkinThickness: Triceps skin fold thickness (mm)\n",
|
||||
"- Insulin: 2-Hour serum insulin (mu U/ml)\n",
|
||||
"- BMI: Body mass index (weight in kg/(height in m)^2)\n",
|
||||
"- DiabetesPedigreeFunction: Diabetes pedigree function\n",
|
||||
"- Age: Age (years)\n",
|
||||
"\n",
|
||||
"The last colum is the outcome, and it is a binary variable.\n",
|
||||
"\n",
|
||||
"In this first exercise we will explore it through the following steps:\n",
|
||||
"\n",
|
||||
"1. Load the ..data/diabetes.csv dataset, use pandas to explore the range of each feature\n",
|
||||
"- For each feature draw a histogram. Bonus points if you draw all the histograms in the same figure.\n",
|
||||
"- Explore correlations of features with the outcome column. You can do this in several ways, for example using the `sns.pairplot` we used above or drawing a heatmap of the correlations.\n",
|
||||
"- Do features need standardization? If so what stardardization technique will you use? MinMax? Standard?\n",
|
||||
"- Prepare your final `X` and `y` variables to be used by a ML model. Make sure you define your target variable well. Will you need dummy columns?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.read_csv('../data/diabetes.csv')\n",
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"_ = df.hist(figsize=(12, 10))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import seaborn as sns"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"sns.pairplot(df, hue='Outcome');"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"sns.heatmap(df.corr(), annot = True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.describe()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.preprocessing import StandardScaler"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.utils import to_categorical"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"sc = StandardScaler()\n",
|
||||
"X = sc.fit_transform(df.drop('Outcome', axis=1))\n",
|
||||
"y = df['Outcome'].values\n",
|
||||
"y_cat = to_categorical(y)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_cat.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"Build a fully connected NN model that predicts diabetes. Follow these steps:\n",
|
||||
"\n",
|
||||
"1. Split your data in a train/test with a test size of 20% and a `random_state = 22`\n",
|
||||
"- define a sequential model with at least one inner layer. You will have to make choices for the following things:\n",
|
||||
" - what is the size of the input?\n",
|
||||
" - how many nodes will you use in each layer?\n",
|
||||
" - what is the size of the output?\n",
|
||||
" - what activation functions will you use in the inner layers?\n",
|
||||
" - what activation function will you use at output?\n",
|
||||
" - what loss function will you use?\n",
|
||||
" - what optimizer will you use?\n",
|
||||
"- fit your model on the training set, using a validation_split of 0.1\n",
|
||||
"- test your trained model on the test data from the train/test split\n",
|
||||
"- check the accuracy score, the confusion matrix and the classification report"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.model_selection import train_test_split"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train, X_test, y_train, y_test = train_test_split(X, y_cat,\n",
|
||||
" random_state=22,\n",
|
||||
" test_size=0.2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"from tensorflow.keras.optimizers import Adam"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(32, input_shape=(8,), activation='relu'))\n",
|
||||
"model.add(Dense(32, activation='relu'))\n",
|
||||
"model.add(Dense(2, activation='softmax'))\n",
|
||||
"model.compile(Adam(learning_rate=0.05),\n",
|
||||
" loss='categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"32*8 + 32"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train, epochs=20, verbose=2, validation_split=0.1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = model.predict(X_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_test_class = np.argmax(y_test, axis=1)\n",
|
||||
"y_pred_class = np.argmax(y_pred, axis=1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.metrics import accuracy_score\n",
|
||||
"from sklearn.metrics import classification_report\n",
|
||||
"from sklearn.metrics import confusion_matrix"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pd.Series(y_test_class).value_counts() / len(y_test_class)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"accuracy_score(y_test_class, y_pred_class)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(classification_report(y_test_class, y_pred_class))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"confusion_matrix(y_test_class, y_pred_class)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 3\n",
|
||||
"Compare your work with the results presented in [this notebook](https://www.kaggle.com/sheshu/pima-data-visualisation-and-machine-learning). Are your Neural Network results better or worse than the results obtained by traditional Machine Learning techniques?\n",
|
||||
"\n",
|
||||
"- Try training a Support Vector Machine or a Random Forest model on the exact same train/test split. Is the performance better or worse?\n",
|
||||
"- Try restricting your features to only 4 features like in the suggested notebook. How does model performance change?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.ensemble import RandomForestClassifier\n",
|
||||
"from sklearn.svm import SVC\n",
|
||||
"from sklearn.naive_bayes import GaussianNB\n",
|
||||
"\n",
|
||||
"for mod in [RandomForestClassifier(), SVC(), GaussianNB()]:\n",
|
||||
" mod.fit(X_train, y_train[:, 1])\n",
|
||||
" y_pred = mod.predict(X_test)\n",
|
||||
" print(\"=\"*80)\n",
|
||||
" print(mod)\n",
|
||||
" print(\"-\"*80)\n",
|
||||
" print(\"Accuracy score: {:0.3}\".format(accuracy_score(y_test_class,\n",
|
||||
" y_pred)))\n",
|
||||
" print(\"Confusion Matrix:\")\n",
|
||||
" print(confusion_matrix(y_test_class, y_pred))\n",
|
||||
" print()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 4\n",
|
||||
"\n",
|
||||
"[Tensorflow playground](http://playground.tensorflow.org/) is a web based neural network demo. It is really useful to develop an intuition about what happens when you change architecture, activation function or other parameters. Try playing with it for a few minutes. You don't nee do understand the meaning of every knob and button in the page, just get a sense for what happens if you change something. In the next chapter we'll explore these things in more detail.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
518
solutions/5 Gradient Descent Exercises Solution.ipynb
Normal file
518
solutions/5 Gradient Descent Exercises Solution.ipynb
Normal file
@@ -0,0 +1,518 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Gradient Descent"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Exercise 1\n",
|
||||
"\n",
|
||||
"You've just been hired at a wine company and they would like you to help them build a model that predicts the quality of their wine based on several measurements. They give you a dataset with wine\n",
|
||||
"\n",
|
||||
"- Load the ../data/wines.csv into Pandas\n",
|
||||
"- Use the column called \"Class\" as target\n",
|
||||
"- Check how many classes are there in target, and if necessary use dummy columns for a multi-class classification\n",
|
||||
"- Use all the other columns as features, check their range and distribution (using seaborn pairplot)\n",
|
||||
"- Rescale all the features using either MinMaxScaler or StandardScaler\n",
|
||||
"- Build a deep model with at least 1 hidden layer to classify the data\n",
|
||||
"- Choose the cost function, what will you use? Mean Squared Error? Binary Cross-Entropy? Categorical Cross-Entropy?\n",
|
||||
"- Choose an optimizer\n",
|
||||
"- Choose a value for the learning rate, you may want to try with several values\n",
|
||||
"- Choose a batch size\n",
|
||||
"- Train your model on all the data using a `validation_split=0.2`. Can you converge to 100% validation accuracy?\n",
|
||||
"- What's the minumum number of epochs to converge?\n",
|
||||
"- Repeat the training several times to verify how stable your results are"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.read_csv('../data/wines.csv')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y = df['Class']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y.value_counts()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_cat = pd.get_dummies(y)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_cat.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X = df.drop('Class', axis=1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import seaborn as sns"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"sns.pairplot(df, hue='Class')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.preprocessing import StandardScaler"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"sc = StandardScaler()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"Xsc = sc.fit_transform(X)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense\n",
|
||||
"from tensorflow.keras.optimizers import SGD, Adam, Adadelta, RMSprop\n",
|
||||
"import tensorflow.keras.backend as K"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(5, input_shape=(13,),\n",
|
||||
" kernel_initializer='he_normal',\n",
|
||||
" activation='relu'))\n",
|
||||
"model.add(Dense(3, activation='softmax'))\n",
|
||||
"\n",
|
||||
"model.compile(RMSprop(learning_rate=0.1),\n",
|
||||
" 'categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])\n",
|
||||
"\n",
|
||||
"model.fit(Xsc, y_cat.values,\n",
|
||||
" batch_size=8,\n",
|
||||
" epochs=10,\n",
|
||||
" verbose=1,\n",
|
||||
" validation_split=0.2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Exercise 2\n",
|
||||
"\n",
|
||||
"Since this dataset has 13 features we can only visualize pairs of features like we did in the Paired plot. We could however exploit the fact that a neural network is a function to extract 2 high level features to represent our data.\n",
|
||||
"\n",
|
||||
"- Build a deep fully connected network with the following structure:\n",
|
||||
" - Layer 1: 8 nodes\n",
|
||||
" - Layer 2: 5 nodes\n",
|
||||
" - Layer 3: 2 nodes\n",
|
||||
" - Output : 3 nodes\n",
|
||||
"- Choose activation functions, inizializations, optimizer and learning rate so that it converges to 100% accuracy within 20 epochs (not easy)\n",
|
||||
"- Remember to train the model on the scaled data\n",
|
||||
"- Define a Feature Function like we did above between the input of the 1st layer and the output of the 3rd layer\n",
|
||||
"- Calculate the features and plot them on a 2-dimensional scatter plot\n",
|
||||
"- Can we distinguish the 3 classes well?\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(Dense(8, input_shape=(13,),\n",
|
||||
" kernel_initializer='he_normal', activation='tanh'))\n",
|
||||
"model.add(Dense(5, kernel_initializer='he_normal', activation='tanh'))\n",
|
||||
"model.add(Dense(2, kernel_initializer='he_normal', activation='tanh'))\n",
|
||||
"model.add(Dense(3, activation='softmax'))\n",
|
||||
"\n",
|
||||
"model.compile(RMSprop(learning_rate=0.05),\n",
|
||||
" 'categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])\n",
|
||||
"\n",
|
||||
"model.fit(Xsc, y_cat.values,\n",
|
||||
" batch_size=16,\n",
|
||||
" epochs=20,\n",
|
||||
" verbose=1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"inp = model.layers[0].input\n",
|
||||
"out = model.layers[2].output"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"features_function = K.function([inp], [out])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"features = features_function([Xsc])[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"features.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.scatter(features[:, 0], features[:, 1], c=y)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Exercise 3\n",
|
||||
"\n",
|
||||
"Keras functional API. So far we've always used the Sequential model API in Keras. However, Keras also offers a Functional API, which is much more powerful. You can find its [documentation here](https://keras.io/getting-started/functional-api-guide/). Let's see how we can leverage it.\n",
|
||||
"\n",
|
||||
"- define an input layer called `inputs`\n",
|
||||
"- define two hidden layers as before, one with 8 nodes, one with 5 nodes\n",
|
||||
"- define a `second_to_last` layer with 2 nodes\n",
|
||||
"- define an output layer with 3 nodes\n",
|
||||
"- create a model that connect input and output\n",
|
||||
"- train it and make sure that it converges\n",
|
||||
"- define a function between inputs and second_to_last layer\n",
|
||||
"- recalculate the features and plot them"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import Input\n",
|
||||
"from tensorflow.keras.models import Model"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"\n",
|
||||
"inputs = Input(shape=(13,))\n",
|
||||
"x = Dense(8, kernel_initializer='he_normal', activation='tanh')(inputs)\n",
|
||||
"x = Dense(5, kernel_initializer='he_normal', activation='tanh')(x)\n",
|
||||
"second_to_last = Dense(2, kernel_initializer='he_normal',\n",
|
||||
" activation='tanh')(x)\n",
|
||||
"outputs = Dense(3, activation='softmax')(second_to_last)\n",
|
||||
"\n",
|
||||
"model = Model(inputs=inputs, outputs=outputs)\n",
|
||||
"\n",
|
||||
"model.compile(RMSprop(learning_rate=0.05),\n",
|
||||
" 'categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])\n",
|
||||
"\n",
|
||||
"model.fit(Xsc, y_cat.values, batch_size=16, epochs=20, verbose=1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"features_function = K.function([inputs], [second_to_last])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"features = features_function([Xsc])[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.scatter(features[:, 0], features[:, 1], c=y)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 4 \n",
|
||||
"\n",
|
||||
"Keras offers the possibility to call a function at each epoch. These are Callbacks, and their [documentation is here](https://keras.io/callbacks/). Callbacks allow us to add some neat functionality. In this exercise we'll explore a few of them.\n",
|
||||
"\n",
|
||||
"- Split the data into train and test sets with a test_size = 0.3 and random_state=42\n",
|
||||
"- Reset and recompile your model\n",
|
||||
"- train the model on the train data using `validation_data=(X_test, y_test)`\n",
|
||||
"- Use the `EarlyStopping` callback to stop your training if the `val_loss` doesn't improve\n",
|
||||
"- Use the `ModelCheckpoint` callback to save the trained model to disk once training is finished\n",
|
||||
"- Use the `TensorBoard` callback to output your training information to a `/tmp/` subdirectory\n",
|
||||
"- Watch the next video for an overview of tensorboard"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping, TensorBoard"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"checkpointer = ModelCheckpoint(filepath=\"/tmp/udemy/weights.hdf5\",\n",
|
||||
" verbose=1, save_best_only=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"earlystopper = EarlyStopping(monitor='val_loss', min_delta=0,\n",
|
||||
" patience=1, verbose=1, mode='auto')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"tensorboard = TensorBoard(log_dir='/tmp/udemy/tensorboard/')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.model_selection import train_test_split"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train, X_test, y_train, y_test = train_test_split(Xsc, y_cat.values,\n",
|
||||
" test_size=0.3,\n",
|
||||
" random_state=42)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"\n",
|
||||
"inputs = Input(shape=(13,))\n",
|
||||
"\n",
|
||||
"x = Dense(8, kernel_initializer='he_normal', activation='tanh')(inputs)\n",
|
||||
"x = Dense(5, kernel_initializer='he_normal', activation='tanh')(x)\n",
|
||||
"second_to_last = Dense(2, kernel_initializer='he_normal',\n",
|
||||
" activation='tanh')(x)\n",
|
||||
"outputs = Dense(3, activation='softmax')(second_to_last)\n",
|
||||
"\n",
|
||||
"model = Model(inputs=inputs, outputs=outputs)\n",
|
||||
"\n",
|
||||
"model.compile(RMSprop(learning_rate=0.05), 'categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])\n",
|
||||
"\n",
|
||||
"model.fit(X_train, y_train, batch_size=32,\n",
|
||||
" epochs=20, verbose=2,\n",
|
||||
" validation_data=(X_test, y_test),\n",
|
||||
" callbacks=[checkpointer, earlystopper, tensorboard])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Run Tensorboard with the command:\n",
|
||||
"\n",
|
||||
" tensorboard --logdir /tmp/udemy/tensorboard/\n",
|
||||
" \n",
|
||||
"and open your browser at http://localhost:6006"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
@@ -0,0 +1,365 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Convolutional Neural Networks Exercises Solution"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.utils import to_categorical"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Dense, Conv2D, MaxPool2D, Flatten\n",
|
||||
"import tensorflow.keras.backend as K"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"### Exercise 1\n",
|
||||
"\n",
|
||||
"You've been hired by a shipping company to overhaul the way they route mail, parcels and packages. They want to build an image recognition system capable of recognizing the digits in the zipcode on a package, so that it can be automatically routed to the correct location.\n",
|
||||
"You are tasked to build the digit recognition system. Luckily, you can rely on the MNIST dataset for the intial training of your model!\n",
|
||||
"\n",
|
||||
"Build a deep convolutional neural network with at least two convolutional and two pooling layers before the fully connected layer.\n",
|
||||
"\n",
|
||||
"- Start from the network we have just built\n",
|
||||
"- Insert a `Conv2D` layer after the first `MaxPool2D`, give it 64 filters.\n",
|
||||
"- Insert a `MaxPool2D` after that one\n",
|
||||
"- Insert an `Activation` layer\n",
|
||||
"- retrain the model\n",
|
||||
"- does performance improve?\n",
|
||||
"- how many parameters does this new model have? More or less than the previous model? Why?\n",
|
||||
"- how long did this second model take to train? Longer or shorter than the previous model? Why?\n",
|
||||
"- did it perform better or worse than the previous model?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.datasets import mnist"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"(X_train, y_train), (X_test, y_test) = mnist.load_data(('/tmp/mnist.npz'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = X_train.astype('float32') / 255.0\n",
|
||||
"X_test = X_test.astype('float32') / 255.0\n",
|
||||
"\n",
|
||||
"X_train = X_train.reshape(-1, 28, 28, 1)\n",
|
||||
"X_test = X_test.reshape(-1, 28, 28, 1)\n",
|
||||
"\n",
|
||||
"y_train_cat = to_categorical(y_train, 10)\n",
|
||||
"y_test_cat = to_categorical(y_test, 10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"\n",
|
||||
"model = Sequential()\n",
|
||||
"\n",
|
||||
"model.add(Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)))\n",
|
||||
"model.add(MaxPool2D(pool_size=(2, 2)))\n",
|
||||
"\n",
|
||||
"model.add(Conv2D(64, (3, 3), activation='relu'))\n",
|
||||
"model.add(MaxPool2D(pool_size=(2, 2)))\n",
|
||||
"\n",
|
||||
"model.add(Flatten())\n",
|
||||
"\n",
|
||||
"model.add(Dense(128, activation='relu'))\n",
|
||||
"\n",
|
||||
"model.add(Dense(10, activation='softmax'))\n",
|
||||
"\n",
|
||||
"model.compile(loss='categorical_crossentropy',\n",
|
||||
" optimizer='rmsprop',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train_cat, batch_size=128,\n",
|
||||
" epochs=2, verbose=1, validation_split=0.3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.evaluate(X_test, y_test_cat)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Exercise 2\n",
|
||||
"\n",
|
||||
"Pleased with your performance with the digits recognition task, your boss decides to challenge you with a harder task. Their online branch allows people to upload images to a website that generates and prints a postcard that is shipped to destination. Your boss would like to know what images people are loading on the site in order to provide targeted advertising on the same page, so he asks you to build an image recognition system capable of recognizing a few objects. Luckily for you, there's a dataset ready made with a collection of labeled images. This is the [Cifar 10 Dataset](http://www.cs.toronto.edu/~kriz/cifar.html), a very famous dataset that contains images for 10 different categories:\n",
|
||||
"\n",
|
||||
"- airplane \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- automobile \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- bird \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- cat \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- deer \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- dog \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- frog \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- horse \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- ship \t\t\t\t\t\t\t\t\t\t\n",
|
||||
"- truck\n",
|
||||
"\n",
|
||||
"In this exercise we will reach the limit of what you can achieve on your laptop and get ready for the next session on cloud GPUs.\n",
|
||||
"\n",
|
||||
"Here's what you have to do:\n",
|
||||
"- load the cifar10 dataset using `keras.datasets.cifar10.load_data()`\n",
|
||||
"- display a few images, see how hard/easy it is for you to recognize an object with such low resolution\n",
|
||||
"- check the shape of X_train, does it need reshape?\n",
|
||||
"- check the scale of X_train, does it need rescaling?\n",
|
||||
"- check the shape of y_train, does it need reshape?\n",
|
||||
"- build a model with the following architecture, and choose the parameters and activation functions for each of the layers:\n",
|
||||
" - conv2d\n",
|
||||
" - conv2d\n",
|
||||
" - maxpool\n",
|
||||
" - conv2d\n",
|
||||
" - conv2d\n",
|
||||
" - maxpool\n",
|
||||
" - flatten\n",
|
||||
" - dense\n",
|
||||
" - output\n",
|
||||
"- compile the model and check the number of parameters\n",
|
||||
"- attempt to train the model with the optimizer of your choice. How fast does training proceed?\n",
|
||||
"- If training is too slow (as expected) stop the execution and move to the next session!"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.datasets import cifar10"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"(X_train, y_train), (X_test, y_test) = cifar10.load_data()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(X_train[1])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = X_train.astype('float32') / 255.0\n",
|
||||
"X_test = X_test.astype('float32') / 255.0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train_cat = to_categorical(y_train, 10)\n",
|
||||
"y_test_cat = to_categorical(y_test, 10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_train_cat.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Conv2D(32, (3, 3),\n",
|
||||
" padding='same',\n",
|
||||
" input_shape=(32, 32, 3),\n",
|
||||
" activation='relu'))\n",
|
||||
"model.add(Conv2D(32, (3, 3), activation='relu'))\n",
|
||||
"model.add(MaxPool2D(pool_size=(2, 2)))\n",
|
||||
"\n",
|
||||
"model.add(Conv2D(64, (3, 3), padding='same', activation='relu'))\n",
|
||||
"model.add(Conv2D(64, (3, 3), activation='relu'))\n",
|
||||
"model.add(MaxPool2D(pool_size=(2, 2)))\n",
|
||||
"\n",
|
||||
"model.add(Flatten())\n",
|
||||
"model.add(Dense(512, activation='relu'))\n",
|
||||
"model.add(Dense(10, activation='softmax'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.compile(loss='categorical_crossentropy',\n",
|
||||
" optimizer='rmsprop',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train, y_train_cat,\n",
|
||||
" batch_size=32,\n",
|
||||
" epochs=2,\n",
|
||||
" validation_data=(X_test, y_test_cat),\n",
|
||||
" shuffle=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
333
solutions/8 Recurrent Neural Networks Exercises Solutions.ipynb
Normal file
333
solutions/8 Recurrent Neural Networks Exercises Solutions.ipynb
Normal file
@@ -0,0 +1,333 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Recurrent Neural Networks"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Time series forecasting"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from pandas.tseries.offsets import MonthEnd\n",
|
||||
"\n",
|
||||
"df = pd.read_csv('../data/cansim-0800020-eng-6674700030567901031.csv',\n",
|
||||
" skiprows=6, skipfooter=9,\n",
|
||||
" engine='python')\n",
|
||||
"\n",
|
||||
"df['Adjustments'] = pd.to_datetime(df['Adjustments']) + MonthEnd(1)\n",
|
||||
"df = df.set_index('Adjustments')\n",
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"split_date = pd.Timestamp('01-01-2011')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train = df.loc[:split_date, ['Unadjusted']]\n",
|
||||
"test = df.loc[split_date:, ['Unadjusted']]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.preprocessing import MinMaxScaler\n",
|
||||
"\n",
|
||||
"sc = MinMaxScaler()\n",
|
||||
"\n",
|
||||
"train_sc = sc.fit_transform(train)\n",
|
||||
"test_sc = sc.transform(test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_sc_df = pd.DataFrame(train_sc, columns=['Scaled'], index=train.index)\n",
|
||||
"test_sc_df = pd.DataFrame(test_sc, columns=['Scaled'], index=test.index)\n",
|
||||
"\n",
|
||||
"for s in range(1, 13):\n",
|
||||
" train_sc_df['shift_{}'.format(s)] = train_sc_df['Scaled'].shift(s)\n",
|
||||
" test_sc_df['shift_{}'.format(s)] = test_sc_df['Scaled'].shift(s)\n",
|
||||
"\n",
|
||||
"X_train = train_sc_df.dropna().drop('Scaled', axis=1)\n",
|
||||
"y_train = train_sc_df.dropna()[['Scaled']]\n",
|
||||
"\n",
|
||||
"X_test = test_sc_df.dropna().drop('Scaled', axis=1)\n",
|
||||
"y_test = test_sc_df.dropna()[['Scaled']]\n",
|
||||
"\n",
|
||||
"X_train = X_train.values\n",
|
||||
"X_test= X_test.values\n",
|
||||
"\n",
|
||||
"y_train = y_train.values\n",
|
||||
"y_test = y_test.values"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 1\n",
|
||||
"\n",
|
||||
"In the model above we reshaped the input shape to: `(num_samples, 1, 12)`, i.e. we treated a window of 12 months as a vector of 12 coordinates that we simultaneously passed to all the LSTM nodes. An alternative way to look at the problem is to reshape the input to `(num_samples, 12, 1)`. This means we consider each input window as a sequence of 12 values that we will pass in sequence to the LSTM. In principle this looks like a more accurate description of our situation. But does it yield better predictions? Let's check it.\n",
|
||||
"\n",
|
||||
"- Reshape `X_train` and `X_test` so that they represent a set of univariate sequences\n",
|
||||
"- retrain the same LSTM(6) model, you'll have to adapt the `input_shape`\n",
|
||||
"- check the performance of this new model, is it better at predicting the test data?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train_t = X_train.reshape(X_train.shape[0], 12, 1)\n",
|
||||
"X_test_t = X_test.reshape(X_test.shape[0], 12, 1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train_t.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import LSTM, Dense\n",
|
||||
"import tensorflow.keras.backend as K\n",
|
||||
"from tensorflow.keras.callbacks import EarlyStopping"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"model = Sequential()\n",
|
||||
"\n",
|
||||
"model.add(LSTM(6, input_shape=(12, 1)))\n",
|
||||
"\n",
|
||||
"model.add(Dense(1))\n",
|
||||
"\n",
|
||||
"model.compile(loss='mean_squared_error', optimizer='adam')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"early_stop = EarlyStopping(monitor='loss', patience=1, verbose=1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train_t, y_train, epochs=600,\n",
|
||||
" batch_size=32, verbose=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = model.predict(X_test_t)\n",
|
||||
"plt.plot(y_test)\n",
|
||||
"plt.plot(y_pred)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"## Exercise 2\n",
|
||||
"\n",
|
||||
"RNN models can be applied to images too. In general we can apply them to any data where there's a connnection between nearby units. Let's see how we can easily build a model that works with images.\n",
|
||||
"\n",
|
||||
"- Load the MNIST data, by now you should be able to do it blindfolded :)\n",
|
||||
"- reshape it so that an image looks like a long sequence of pixels\n",
|
||||
"- create a recurrent model and train it on the training data\n",
|
||||
"- how does it perform compared to a fully connected? How does it compare to Convolutional Neural Networks?\n",
|
||||
"\n",
|
||||
"(feel free to run this exercise on a cloud GPU if it's too slow on your laptop)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.datasets import mnist\n",
|
||||
"from tensorflow.keras.utils import to_categorical"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"(X_train, y_train), (X_test, y_test) = mnist.load_data()\n",
|
||||
"X_train = X_train.astype('float32') / 255.0\n",
|
||||
"X_test = X_test.astype('float32') / 255.0\n",
|
||||
"y_train_cat = to_categorical(y_train, 10)\n",
|
||||
"y_test_cat = to_categorical(y_test, 10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train = X_train.reshape(X_train.shape[0], -1, 1)\n",
|
||||
"X_test = X_test.reshape(X_test.shape[0], -1, 1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(X_train.shape)\n",
|
||||
"print(X_test.shape)\n",
|
||||
"print(y_train_cat.shape)\n",
|
||||
"print(y_test_cat.shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# define the model\n",
|
||||
"K.clear_session()\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(LSTM(32, input_shape=X_train.shape[1:]))\n",
|
||||
"model.add(Dense(10, activation='softmax'))\n",
|
||||
"\n",
|
||||
"# compile the model\n",
|
||||
"model.compile(loss='categorical_crossentropy',\n",
|
||||
" optimizer='rmsprop',\n",
|
||||
" metrics=['accuracy'])\n",
|
||||
"\n",
|
||||
"model.fit(X_train, y_train_cat,\n",
|
||||
" batch_size=32,\n",
|
||||
" epochs=100,\n",
|
||||
" validation_split=0.3,\n",
|
||||
" shuffle=True,\n",
|
||||
" verbose=2,\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"model.evaluate(X_test, y_test_cat)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
520
solutions/9 Improving performance Exercises Solutions.ipynb
Normal file
520
solutions/9 Improving performance Exercises Solutions.ipynb
Normal file
@@ -0,0 +1,520 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# 9 Improving performance Exercises Solutions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"%matplotlib inline\n",
|
||||
"import matplotlib.pyplot as plt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 1\n",
|
||||
"\n",
|
||||
"- Reload the IMDB data keeping only the first 20000 most common words\n",
|
||||
"- pad the reviews to a shorter length (eg. 70 or 80), this time make sure you keep the first part of the review if it's longer than the maximum length\n",
|
||||
"- re run the model (remember to set max_features correctly)\n",
|
||||
"- does it train faster this time?\n",
|
||||
"- do you get a better performance?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.datasets import imdb\n",
|
||||
"from tensorflow.keras.preprocessing.sequence import pad_sequences\n",
|
||||
"from tensorflow.keras.models import Sequential\n",
|
||||
"from tensorflow.keras.layers import Embedding, LSTM, Dense"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"max_features = 20000\n",
|
||||
"skip_top = 200"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"(X_train, y_train), (X_test, y_test) = imdb.load_data('/tmp/imdb.npz',\n",
|
||||
" num_words=max_features,\n",
|
||||
" start_char=1,\n",
|
||||
" oov_char=2,\n",
|
||||
" index_from=3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"maxlen = 80"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train_pad = pad_sequences(X_train, maxlen=maxlen, truncating='post')\n",
|
||||
"X_test_pad = pad_sequences(X_test, maxlen=maxlen, truncating='post')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model = Sequential()\n",
|
||||
"model.add(Embedding(max_features, 128))\n",
|
||||
"model.add(LSTM(64, dropout=0.2, recurrent_dropout=0.2))\n",
|
||||
"model.add(Dense(1, activation='sigmoid'))\n",
|
||||
"\n",
|
||||
"model.compile(loss='binary_crossentropy',\n",
|
||||
" optimizer='adam',\n",
|
||||
" metrics=['accuracy'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_train[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(X_train_pad, y_train,\n",
|
||||
" batch_size=32,\n",
|
||||
" epochs=2,\n",
|
||||
" validation_split=0.3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"score, acc = model.evaluate(X_test_pad, y_test)\n",
|
||||
"print('Test score:', score)\n",
|
||||
"print('Test accuracy:', acc)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 2\n",
|
||||
"\n",
|
||||
"- Reload the digits data as above\n",
|
||||
"- define a function repeated_training_reg_dropout that adds regularization and dropout to a fully connected network\n",
|
||||
"- compare the performance with/witouth dropout and regularization like we did for batch normalization\n",
|
||||
"- do you get a better performance?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.datasets import load_digits\n",
|
||||
"from tensorflow.keras.utils import to_categorical\n",
|
||||
"from sklearn.model_selection import train_test_split\n",
|
||||
"from tensorflow.keras.layers import Dropout\n",
|
||||
"import tensorflow.keras.backend as K"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"digits = load_digits()\n",
|
||||
"X, y = digits.data, digits.target\n",
|
||||
"y_cat = to_categorical(y)\n",
|
||||
"\n",
|
||||
"X_train, X_test, y_train, y_test = train_test_split(X, y_cat, test_size=0.3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def repeated_training_reg_dropout(X_train,\n",
|
||||
" y_train,\n",
|
||||
" X_test,\n",
|
||||
" y_test,\n",
|
||||
" units=512,\n",
|
||||
" activation='sigmoid',\n",
|
||||
" optimizer='sgd',\n",
|
||||
" do_dropout=False,\n",
|
||||
" rate=0.3,\n",
|
||||
" kernel_regularizer='l2',\n",
|
||||
" epochs=10,\n",
|
||||
" repeats=3):\n",
|
||||
" histories = []\n",
|
||||
" \n",
|
||||
" for repeat in range(repeats):\n",
|
||||
" K.clear_session()\n",
|
||||
"\n",
|
||||
" model = Sequential()\n",
|
||||
" \n",
|
||||
" # first fully connected layer\n",
|
||||
" model.add(Dense(units,\n",
|
||||
" input_shape=X_train.shape[1:],\n",
|
||||
" kernel_initializer='normal',\n",
|
||||
" kernel_regularizer=kernel_regularizer,\n",
|
||||
" activation=activation))\n",
|
||||
" if do_dropout:\n",
|
||||
" model.add(Dropout(rate))\n",
|
||||
"\n",
|
||||
" # second fully connected layer\n",
|
||||
" model.add(Dense(units,\n",
|
||||
" kernel_initializer='normal',\n",
|
||||
" kernel_regularizer=kernel_regularizer,\n",
|
||||
" activation=activation))\n",
|
||||
" if do_dropout:\n",
|
||||
" model.add(Dropout(rate))\n",
|
||||
"\n",
|
||||
" # third fully connected layer\n",
|
||||
" model.add(Dense(units,\n",
|
||||
" kernel_initializer='normal',\n",
|
||||
" kernel_regularizer=kernel_regularizer,\n",
|
||||
" activation=activation))\n",
|
||||
" if do_dropout:\n",
|
||||
" model.add(Dropout(rate))\n",
|
||||
"\n",
|
||||
" # output layer\n",
|
||||
" model.add(Dense(10, activation='softmax'))\n",
|
||||
" \n",
|
||||
" model.compile(optimizer,\n",
|
||||
" 'categorical_crossentropy',\n",
|
||||
" metrics=['accuracy'])\n",
|
||||
"\n",
|
||||
" h = model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=epochs, verbose=0)\n",
|
||||
" histories.append([h.history['accuracy'], h.history['val_accuracy']])\n",
|
||||
" print(repeat, end=' ')\n",
|
||||
"\n",
|
||||
" histories = np.array(histories)\n",
|
||||
" \n",
|
||||
" # calculate mean and standard deviation across repeats:\n",
|
||||
" mean_acc = histories.mean(axis=0)\n",
|
||||
" std_acc = histories.std(axis=0)\n",
|
||||
" print()\n",
|
||||
" \n",
|
||||
" return mean_acc[0], std_acc[0], mean_acc[1], std_acc[1]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"mean_acc, std_acc, mean_acc_val, std_acc_val = repeated_training_reg_dropout(X_train,\n",
|
||||
" y_train,\n",
|
||||
" X_test,\n",
|
||||
" y_test,\n",
|
||||
" do_dropout=False)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"mean_acc_do, std_acc_do, mean_acc_val_do, std_acc_val_do = repeated_training_reg_dropout(X_train,\n",
|
||||
" y_train,\n",
|
||||
" X_test,\n",
|
||||
" y_test,\n",
|
||||
" do_dropout=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def plot_mean_std(m, s):\n",
|
||||
" plt.plot(m)\n",
|
||||
" plt.fill_between(range(len(m)), m-s, m+s, alpha=0.1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plot_mean_std(mean_acc, std_acc)\n",
|
||||
"plot_mean_std(mean_acc_val, std_acc_val)\n",
|
||||
"plot_mean_std(mean_acc_do, std_acc_do)\n",
|
||||
"plot_mean_std(mean_acc_val_do, std_acc_val_do)\n",
|
||||
"plt.ylim(0, 1.01)\n",
|
||||
"plt.title(\"Dropout and Regularization Accuracy\")\n",
|
||||
"plt.xlabel('Epochs')\n",
|
||||
"plt.ylabel('Accuracy')\n",
|
||||
"plt.legend(['Train', 'Test', 'Train with Dropout and Regularization', 'Test with Dropout and Regularization'], loc='best')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Exercise 3\n",
|
||||
"\n",
|
||||
"This is a very long and complex exercise, that should give you an idea of a real world scenario. Feel free to look at the solution if you feel lost. Also, feel free to run this with a GPU, in which case you don't need to download the data.\n",
|
||||
"\n",
|
||||
"If you are running this locally, download and unpack the male/female pictures from [here](https://www.dropbox.com/s/nov493om2jmh2gp/male_female.tgz?dl=0). These images and labels were obtained from [Crowdflower](https://www.crowdflower.com/data-for-everyone/).\n",
|
||||
"\n",
|
||||
"Your goal is to build an image classifier that will recognize the gender of a person from pictures.\n",
|
||||
"\n",
|
||||
"- Have a look at the directory structure and inspect a couple of pictures\n",
|
||||
"- Design a model that will take a color image of size 64x64 as input and return a binary output (female=0/male=1)\n",
|
||||
"- Feel free to introduce any regularization technique in your model (Dropout, Batch Normalization, Weight Regularization)\n",
|
||||
"- Compile your model with an optimizer of your choice\n",
|
||||
"- Using `ImageDataGenerator`, define a train generator that will augment your images with some geometric transformations. Feel free to choose the parameters that make sense to you.\n",
|
||||
"- Define also a test generator, whose only purpose is to rescale the pixels by 1./255\n",
|
||||
"- use the function `flow_from_directory` to generate batches from the train and test folders. Make sure you set the `target_size` to 64x64.\n",
|
||||
"- Use the `model.fit_generator` function to fit the model on the batches generated from the ImageDataGenerator. Since you are streaming and augmenting the data in real time you will have to decide how many batches make an epoch and how many epochs you want to run\n",
|
||||
"- Train your model (you should get to at least 85% accuracy)\n",
|
||||
"- Once you are satisfied with your training, check a few of the misclassified pictures. Are those sensible errors?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# If you are running this locally\n",
|
||||
"# uncomment the next 4 lines to download, extract and set the data path:\n",
|
||||
"# !wget 'https://www.dropbox.com/s/nov493om2jmh2gp/male_female.tgz?dl=1' -O ../data/male_female.tgz\n",
|
||||
"# data_path = '../data/male_female'\n",
|
||||
"# !mkdir -p {data_path}\n",
|
||||
"# !tar -xzvf ../data/male_female.tgz --directory {data_path}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from tensorflow.keras.layers import Conv2D\n",
|
||||
"from tensorflow.keras.layers import MaxPooling2D\n",
|
||||
"from tensorflow.keras.layers import Flatten\n",
|
||||
"from tensorflow.keras.layers import BatchNormalization\n",
|
||||
"from itertools import islice\n",
|
||||
"from tensorflow.keras.preprocessing.image import ImageDataGenerator"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"K.clear_session()\n",
|
||||
"\n",
|
||||
"model = Sequential()\n",
|
||||
"model.add(Conv2D(32, (3, 3), input_shape = (64, 64, 3), activation = 'relu'))\n",
|
||||
"model.add(MaxPooling2D(pool_size = (2, 2)))\n",
|
||||
"model.add(BatchNormalization())\n",
|
||||
"\n",
|
||||
"model.add(Conv2D(64, (3, 3), activation = 'relu'))\n",
|
||||
"model.add(MaxPooling2D(pool_size = (2, 2)))\n",
|
||||
"model.add(BatchNormalization())\n",
|
||||
"\n",
|
||||
"model.add(Conv2D(64, (3, 3), activation = 'relu'))\n",
|
||||
"model.add(MaxPooling2D(pool_size = (2, 2)))\n",
|
||||
"model.add(BatchNormalization())\n",
|
||||
"\n",
|
||||
"model.add(Flatten())\n",
|
||||
"\n",
|
||||
"model.add(Dense(128, activation = 'relu'))\n",
|
||||
"model.add(Dense(1, activation = 'sigmoid'))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.compile(optimizer = 'adam',\n",
|
||||
" loss = 'binary_crossentropy',\n",
|
||||
" metrics = ['accuracy'])\n",
|
||||
"\n",
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_gen = ImageDataGenerator(rescale = 1./255,\n",
|
||||
" width_shift_range=0.1,\n",
|
||||
" height_shift_range=0.1,\n",
|
||||
" rotation_range = 10,\n",
|
||||
" shear_range = 0.2,\n",
|
||||
" zoom_range = 0.2,\n",
|
||||
" horizontal_flip = True)\n",
|
||||
"\n",
|
||||
"test_gen = ImageDataGenerator(rescale = 1./255)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train = train_gen.flow_from_directory(data_path + '/train',\n",
|
||||
" target_size = (64, 64),\n",
|
||||
" batch_size = 16,\n",
|
||||
" class_mode = 'binary')\n",
|
||||
"\n",
|
||||
"test = test_gen.flow_from_directory(data_path + '/test',\n",
|
||||
" target_size = (64, 64),\n",
|
||||
" batch_size = 16,\n",
|
||||
" class_mode = 'binary')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.fit(train,\n",
|
||||
" steps_per_epoch = 800,\n",
|
||||
" epochs = 200,\n",
|
||||
" validation_data = test,\n",
|
||||
" validation_steps = 200)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_test = []\n",
|
||||
"y_test = []\n",
|
||||
"for ts in islice(test, 50):\n",
|
||||
" X_test.append(ts[0])\n",
|
||||
" y_test.append(ts[1])\n",
|
||||
"\n",
|
||||
"X_test = np.concatenate(X_test)\n",
|
||||
"y_test = np.concatenate(y_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_pred = model.predict_classes(X_test).ravel()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"np.argwhere(y_test != y_pred).ravel()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(X_test[14])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.10"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
77
tests/test_nb.py
Normal file
77
tests/test_nb.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# tests that too long to execute on Travis are temporarily commented out
|
||||
# TODO: find a way to fix this
|
||||
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
|
||||
def _exec_notebook(path):
|
||||
with tempfile.NamedTemporaryFile(suffix=".ipynb") as fout:
|
||||
args = ["jupyter", "nbconvert", "--to", "notebook", "--execute",
|
||||
"--ExecutePreprocessor.timeout=1000",
|
||||
"--output", fout.name, path]
|
||||
subprocess.check_call(args)
|
||||
|
||||
|
||||
def test_0():
|
||||
_exec_notebook('course/0_Check_Environment.ipynb')
|
||||
|
||||
|
||||
def test_1():
|
||||
_exec_notebook('course/1 First Deep Learning Model.ipynb')
|
||||
|
||||
|
||||
def test_2():
|
||||
_exec_notebook('course/2 Data.ipynb')
|
||||
|
||||
|
||||
def test_3():
|
||||
_exec_notebook('course/3 Machine Learning.ipynb')
|
||||
|
||||
|
||||
def test_4():
|
||||
_exec_notebook('course/4 Deep Learning Intro.ipynb')
|
||||
|
||||
|
||||
def test_5():
|
||||
_exec_notebook('course/5 Gradient Descent.ipynb')
|
||||
|
||||
|
||||
def test_6():
|
||||
_exec_notebook('course/6 Convolutional Neural Networks.ipynb')
|
||||
|
||||
|
||||
def test_8():
|
||||
_exec_notebook('course/8 Recurrent Neural Networks.ipynb')
|
||||
|
||||
|
||||
def test_9():
|
||||
_exec_notebook('course/9 Improving performance.ipynb')
|
||||
|
||||
|
||||
def test_2_sol():
|
||||
_exec_notebook('solutions/2 Data exploration Exercises Solution.ipynb')
|
||||
|
||||
|
||||
def test_3_sol():
|
||||
_exec_notebook('solutions/3 Machine Learning Exercises Solution.ipynb')
|
||||
|
||||
|
||||
def test_4_sol():
|
||||
_exec_notebook('solutions/4 Deep Learning Intro Exercises Solution.ipynb')
|
||||
|
||||
|
||||
def test_5_sol():
|
||||
_exec_notebook('solutions/5 Gradient Descent Exercises Solution.ipynb')
|
||||
|
||||
|
||||
def test_6_sol():
|
||||
_exec_notebook('solutions/6 Convolutional Neural Networks Exercises Solution.ipynb')
|
||||
|
||||
|
||||
def test_8_sol():
|
||||
_exec_notebook('solutions/8 Recurrent Neural Networks Exercises Solutions.ipynb')
|
||||
|
||||
|
||||
def test_9_sol():
|
||||
_exec_notebook('solutions/9 Improving performance Exercises Solutions.ipynb')
|
||||
Reference in New Issue
Block a user