From ac1995feb6ac45e6dfb31e35e45591ddfbc3b647 Mon Sep 17 00:00:00 2001 From: Sem van der Hoeven Date: Wed, 9 Jun 2021 16:20:50 +0200 Subject: [PATCH] comments --- exercises/Jupyter notebook CVML.ipynb | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/exercises/Jupyter notebook CVML.ipynb b/exercises/Jupyter notebook CVML.ipynb index e6bea95..484935c 100644 --- a/exercises/Jupyter notebook CVML.ipynb +++ b/exercises/Jupyter notebook CVML.ipynb @@ -2376,7 +2376,8 @@ "\n", "we gebruiken de rmsprop optimizer omdat deze goed te gebruiken is voor kleine\n", "batch sizes.\n", - "TODO loss uitleggen\n", + "We gebruiken de categorical crossentropy loss function omdat er sprake is van\n", + "meerdere categorieën.\n", "\"\"\"\n", "model.compile(loss='categorical_crossentropy',\n", " optimizer='rmsprop',\n", @@ -2633,6 +2634,8 @@ "compile het model met de binary crossentropy loss function. Dit doen we omdat\n", "we maar 2 verschillende klassen hebben.\n", "We gebruiken rmsprop als optimizer omdat we een kleine batch size hebben.\n", + "We gebruiken de binary crossentropy loss function omdat we een binaire\n", + "klassificatie hebben.\n", "Deze optimizer balanceert de step size zodat deze niet te groot of te klein \n", "worden.\n", "We gebruiken de accuracy als metric omdat we daarop willen trainen.\n", @@ -2767,13 +2770,7 @@ "3/3 [==============================] - 1s 402ms/step - loss: 0.0020 - accuracy: 1.0000 - val_loss: 1.2960 - val_accuracy: 0.8000\n", "Epoch 57/100\n", "3/3 [==============================] - 1s 316ms/step - loss: 1.9244e-05 - accuracy: 1.0000 - val_loss: 1.3116 - val_accuracy: 0.8000\n", - "Epoch 58/100\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ + "Epoch 58/100\n", "3/3 [==============================] - 1s 324ms/step - loss: 1.8088e-05 - accuracy: 1.0000 - val_loss: 1.3336 - val_accuracy: 0.8000\n", "Epoch 59/100\n", "3/3 [==============================] - 1s 319ms/step - loss: 3.6851e-05 - accuracy: 1.0000 - val_loss: 1.4207 - val_accuracy: 0.8000\n",