打赏
# 冻结直到某一层的所有层 conv_base.trainable = True set_trainable = False for layer in conv_base.layers: if layer.name == 'block5_conv1': set_trainable = True if set_trainable: layer.trainable = True else: layer.trainable = False
model.compile(loss='binary_crossentropy', optimizer=optimizers.RMSprop(lr=1e-5), metrics=['acc'])
test_generator = test_datagen.flow_from_directory( test_dir, target_size=(150, 150), batch_size=20, class_mode='binary') test_loss, test_acc = model.evaluate(test_generator, steps=50) print('test acc:', test_acc)
博客对应课程的视频位置:
import pandas as pd import numpy as np import tensorflow as tf import matplotlib.pyplot as plt
In [2]:
from tensorflow.keras.applications import VGG16 # 把vgg模型弄过来 conv_base = VGG16(weights='imagenet', # include_top=False表示不包含dense层 include_top=False, input_shape=(150, 150, 3)) # C:UsersFan Renyi.kerasmodelsvgg16_weights_tf_dim_ordering_tf_kernels_notop.h5
In [3]:
import os base_dir = 'E:78_recorded_lesson 01_course_githubAI_dataSetdogs-vs-catscats_and_dogs_small' train_dir = os.path.join(base_dir, 'train') validation_dir = os.path.join(base_dir, 'validation') test_dir = os.path.join(base_dir, 'test')
In [4]:
from tensorflow.keras import models from tensorflow.keras import layers model = models.Sequential() model.add(conv_base) model.add(layers.Flatten()) model.add(layers.Dense(256, activation='relu')) model.add(layers.Dense(1, activation='sigmoid')) model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= vgg16 (Functional) (None, 4, 4, 512) 14714688 _________________________________________________________________ flatten (Flatten) (None, 8192) 0 _________________________________________________________________ dense (Dense) (None, 256) 2097408 _________________________________________________________________ dense_1 (Dense) (None, 1) 257 ================================================================= Total params: 16,812,353 Trainable params: 16,812,353 Non-trainable params: 0 _________________________________________________________________
In [5]:
conv_base.trainable = False
In [6]:
from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras import optimizers train_datagen = ImageDataGenerator( rescale=1./255, rotation_range=40, width_shift_range=0.2, height_shift_range=0.2, shear_range=0.2, zoom_range=0.2, horizontal_flip=True, fill_mode='nearest') # 注意,不能增强验证数据 test_datagen = ImageDataGenerator(rescale=1./255) train_generator = train_datagen.flow_from_directory( train_dir, # 目标目录 target_size=(150, 150), # 将所有图像的大小调整为 150×150 batch_size=20, class_mode='binary') # 因为使用了binary_crossentropy 损失,所以需要用二进制标签 validation_generator = test_datagen.flow_from_directory( validation_dir, target_size=(150, 150), batch_size=20, class_mode='binary') model.compile(loss='binary_crossentropy', optimizer=optimizers.RMSprop(lr=2e-5), metrics=['acc'])
Found 2000 images belonging to 2 classes. Found 1000 images belonging to 2 classes.
In [7]:
history = model.fit( train_generator, steps_per_epoch=100, epochs=30, validation_data=validation_generator, validation_steps=50)
Epoch 1/30 100/100 [==============================] - 42s 423ms/step - loss: 0.5806 - acc: 0.7040 - val_loss: 0.4488 - val_acc: 0.8140 Epoch 2/30 100/100 [==============================] - 19s 187ms/step - loss: 0.4826 - acc: 0.7850 - val_loss: 0.3632 - val_acc: 0.8580 Epoch 3/30 100/100 [==============================] - 19s 189ms/step - loss: 0.4398 - acc: 0.7970 - val_loss: 0.3246 - val_acc: 0.8680 Epoch 4/30 100/100 [==============================] - 19s 190ms/step - loss: 0.4014 - acc: 0.8195 - val_loss: 0.3056 - val_acc: 0.8810 Epoch 5/30 100/100 [==============================] - 19s 191ms/step - loss: 0.3747 - acc: 0.8330 - val_loss: 0.2898 - val_acc: 0.8830 Epoch 6/30 100/100 [==============================] - 19s 194ms/step - loss: 0.3599 - acc: 0.8430 - val_loss: 0.2752 - val_acc: 0.8960 Epoch 7/30 100/100 [==============================] - 20s 202ms/step - loss: 0.3449 - acc: 0.8445 - val_loss: 0.2713 - val_acc: 0.8950 Epoch 8/30 100/100 [==============================] - 20s 195ms/step - loss: 0.3396 - acc: 0.8550 - val_loss: 0.2597 - val_acc: 0.8990 Epoch 9/30 100/100 [==============================] - 19s 194ms/step - loss: 0.3313 - acc: 0.8525 - val_loss: 0.2543 - val_acc: 0.8960 Epoch 10/30 100/100 [==============================] - 20s 199ms/step - loss: 0.3263 - acc: 0.8625 - val_loss: 0.2501 - val_acc: 0.9030 Epoch 11/30 100/100 [==============================] - 20s 199ms/step - loss: 0.3264 - acc: 0.8655 - val_loss: 0.2485 - val_acc: 0.8970 Epoch 12/30 100/100 [==============================] - 20s 197ms/step - loss: 0.3368 - acc: 0.8545 - val_loss: 0.2500 - val_acc: 0.8970 Epoch 13/30 100/100 [==============================] - 20s 204ms/step - loss: 0.3348 - acc: 0.8535 - val_loss: 0.2472 - val_acc: 0.9060 Epoch 14/30 100/100 [==============================] - 20s 198ms/step - loss: 0.3185 - acc: 0.8680 - val_loss: 0.2533 - val_acc: 0.8940 Epoch 15/30 100/100 [==============================] - 20s 199ms/step - loss: 0.3137 - acc: 0.8615 - val_loss: 0.2439 - val_acc: 0.9060 Epoch 16/30 100/100 [==============================] - 20s 201ms/step - loss: 0.3147 - acc: 0.8660 - val_loss: 0.2413 - val_acc: 0.9010 Epoch 17/30 100/100 [==============================] - 21s 206ms/step - loss: 0.2998 - acc: 0.8730 - val_loss: 0.2406 - val_acc: 0.9000 Epoch 18/30 100/100 [==============================] - 19s 191ms/step - loss: 0.3185 - acc: 0.8580 - val_loss: 0.2415 - val_acc: 0.9020 Epoch 19/30 100/100 [==============================] - 19s 190ms/step - loss: 0.3115 - acc: 0.8610 - val_loss: 0.2406 - val_acc: 0.8970 Epoch 20/30 100/100 [==============================] - 19s 191ms/step - loss: 0.3036 - acc: 0.8705 - val_loss: 0.2412 - val_acc: 0.9000 Epoch 21/30 100/100 [==============================] - 19s 191ms/step - loss: 0.2945 - acc: 0.8695 - val_loss: 0.2421 - val_acc: 0.8980 Epoch 22/30 100/100 [==============================] - 19s 192ms/step - loss: 0.2923 - acc: 0.8750 - val_loss: 0.2396 - val_acc: 0.9000 Epoch 23/30 100/100 [==============================] - 19s 191ms/step - loss: 0.2822 - acc: 0.8775 - val_loss: 0.2411 - val_acc: 0.9050 Epoch 24/30 100/100 [==============================] - 19s 191ms/step - loss: 0.2969 - acc: 0.8770 - val_loss: 0.2347 - val_acc: 0.9060 Epoch 25/30 100/100 [==============================] - 19s 191ms/step - loss: 0.2897 - acc: 0.8750 - val_loss: 0.2468 - val_acc: 0.8880 Epoch 26/30 100/100 [==============================] - 19s 192ms/step - loss: 0.2938 - acc: 0.8745 - val_loss: 0.2380 - val_acc: 0.9000 Epoch 27/30 100/100 [==============================] - 19s 193ms/step - loss: 0.2869 - acc: 0.8790 - val_loss: 0.2435 - val_acc: 0.9060 Epoch 28/30 100/100 [==============================] - 19s 192ms/step - loss: 0.2713 - acc: 0.8900 - val_loss: 0.2439 - val_acc: 0.8900 Epoch 29/30 100/100 [==============================] - 19s 192ms/step - loss: 0.2850 - acc: 0.8815 - val_loss: 0.2434 - val_acc: 0.9060 Epoch 30/30 100/100 [==============================] - 19s 193ms/step - loss: 0.2723 - acc: 0.8810 - val_loss: 0.2345 - val_acc: 0.9030
In [8]:
acc = history.history['acc'] val_acc = history.history['val_acc'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs = range(1, len(acc) + 1) plt.plot(epochs, acc, 'b--', label='Training acc') plt.plot(epochs, val_acc, 'r-', label='Validation acc') plt.title('Training and validation accuracy') plt.legend() plt.figure() plt.plot(epochs, loss, 'b--', label='Training loss') plt.plot(epochs, val_loss, 'r-', label='Validation loss') plt.title('Training and validation loss') plt.legend() plt.show()
In [9]:
# 冻结直到某一层的所有层 conv_base.trainable = True set_trainable = False for layer in conv_base.layers: if layer.name == 'block5_conv1': set_trainable = True if set_trainable: layer.trainable = True else: layer.trainable = False
In [11]:
for layer in conv_base.layers: print(layer) print(layer.trainable)
<tensorflow.python.keras.engine.input_layer.InputLayer object at 0x000001FA568E0988> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA56AE3CC8> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA56698608> False <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x000001FA56B64588> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA56B6B088> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA56B6EA48> False <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x000001FA56B76588> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA56B7BFC8> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA56B71D48> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA5D053548> False <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x000001FA5D05D0C8> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA5D062B88> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA5D0653C8> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA5D06C188> False <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x000001FA5D06ED88> False <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA5D077888> True <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA5D07A348> True <tensorflow.python.keras.layers.convolutional.Conv2D object at 0x000001FA5D07DF48> True <tensorflow.python.keras.layers.pooling.MaxPooling2D object at 0x000001FA5D085B48> True
In [12]:
model.compile(loss='binary_crossentropy', optimizer=optimizers.RMSprop(lr=1e-5), metrics=['acc'])
In [13]:
history = model.fit( train_generator, steps_per_epoch=100, epochs=100, validation_data=validation_generator, validation_steps=50)
Epoch 1/100 100/100 [==============================] - 18s 184ms/step - loss: 0.2999 - acc: 0.8730 - val_loss: 0.2151 - val_acc: 0.9170 Epoch 2/100 100/100 [==============================] - 18s 184ms/step - loss: 0.2738 - acc: 0.8815 - val_loss: 0.2047 - val_acc: 0.9200 Epoch 3/100 100/100 [==============================] - 19s 191ms/step - loss: 0.2460 - acc: 0.8980 - val_loss: 0.2385 - val_acc: 0.9090 Epoch 4/100 100/100 [==============================] - 19s 191ms/step - loss: 0.2231 - acc: 0.9075 - val_loss: 0.2395 - val_acc: 0.9000 Epoch 5/100 100/100 [==============================] - 19s 194ms/step - loss: 0.2025 - acc: 0.9140 - val_loss: 0.2632 - val_acc: 0.8970 Epoch 6/100 100/100 [==============================] - 20s 195ms/step - loss: 0.1970 - acc: 0.9190 - val_loss: 0.1916 - val_acc: 0.9290 Epoch 7/100 100/100 [==============================] - 19s 194ms/step - loss: 0.1845 - acc: 0.9235 - val_loss: 0.2077 - val_acc: 0.9230 Epoch 8/100 100/100 [==============================] - 19s 194ms/step - loss: 0.1688 - acc: 0.9365 - val_loss: 0.2087 - val_acc: 0.9250 Epoch 9/100 100/100 [==============================] - 20s 195ms/step - loss: 0.1587 - acc: 0.9350 - val_loss: 0.2156 - val_acc: 0.9200 Epoch 10/100 100/100 [==============================] - 20s 197ms/step - loss: 0.1558 - acc: 0.9400 - val_loss: 0.2253 - val_acc: 0.9170 Epoch 11/100 100/100 [==============================] - 19s 194ms/step - loss: 0.1503 - acc: 0.9345 - val_loss: 0.2221 - val_acc: 0.9160 Epoch 12/100 100/100 [==============================] - 19s 195ms/step - loss: 0.1430 - acc: 0.9470 - val_loss: 0.1952 - val_acc: 0.9290 Epoch 13/100 100/100 [==============================] - 20s 197ms/step - loss: 0.1367 - acc: 0.9480 - val_loss: 0.1805 - val_acc: 0.9340 Epoch 14/100 100/100 [==============================] - 20s 196ms/step - loss: 0.1243 - acc: 0.9520 - val_loss: 0.1899 - val_acc: 0.9350 Epoch 15/100 100/100 [==============================] - 20s 197ms/step - loss: 0.1102 - acc: 0.9595 - val_loss: 0.2190 - val_acc: 0.9260 Epoch 16/100 100/100 [==============================] - 20s 197ms/step - loss: 0.1191 - acc: 0.9575 - val_loss: 0.2498 - val_acc: 0.9210 Epoch 17/100 100/100 [==============================] - 20s 197ms/step - loss: 0.1210 - acc: 0.9515 - val_loss: 0.1917 - val_acc: 0.9300 Epoch 18/100 100/100 [==============================] - 20s 198ms/step - loss: 0.1224 - acc: 0.9495 - val_loss: 0.1885 - val_acc: 0.9280 Epoch 19/100 100/100 [==============================] - 20s 199ms/step - loss: 0.1040 - acc: 0.9595 - val_loss: 0.1839 - val_acc: 0.9270 Epoch 20/100 100/100 [==============================] - 20s 198ms/step - loss: 0.0983 - acc: 0.9585 - val_loss: 0.2617 - val_acc: 0.9170 Epoch 21/100 100/100 [==============================] - 20s 204ms/step - loss: 0.1028 - acc: 0.9635 - val_loss: 0.3127 - val_acc: 0.9090 Epoch 22/100 100/100 [==============================] - 20s 198ms/step - loss: 0.0953 - acc: 0.9595 - val_loss: 0.2070 - val_acc: 0.9350 Epoch 23/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0859 - acc: 0.9695 - val_loss: 0.2136 - val_acc: 0.9300 Epoch 24/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0758 - acc: 0.9705 - val_loss: 0.2023 - val_acc: 0.9330 Epoch 25/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0990 - acc: 0.9615 - val_loss: 0.1950 - val_acc: 0.9310 Epoch 26/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0873 - acc: 0.9670 - val_loss: 0.1810 - val_acc: 0.9370 Epoch 27/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0791 - acc: 0.9695 - val_loss: 0.2265 - val_acc: 0.9290 Epoch 28/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0751 - acc: 0.9680 - val_loss: 0.1840 - val_acc: 0.9410 Epoch 29/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0659 - acc: 0.9740 - val_loss: 0.2356 - val_acc: 0.9290 Epoch 30/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0603 - acc: 0.9770 - val_loss: 0.2066 - val_acc: 0.9330 Epoch 31/100 100/100 [==============================] - 20s 198ms/step - loss: 0.0612 - acc: 0.9735 - val_loss: 0.1972 - val_acc: 0.9350 Epoch 32/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0626 - acc: 0.9745 - val_loss: 0.1989 - val_acc: 0.9300 Epoch 33/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0628 - acc: 0.9765 - val_loss: 0.2080 - val_acc: 0.9400 Epoch 34/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0651 - acc: 0.9740 - val_loss: 0.2322 - val_acc: 0.9330 Epoch 35/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0576 - acc: 0.9765 - val_loss: 0.1911 - val_acc: 0.9370 Epoch 36/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0664 - acc: 0.9770 - val_loss: 0.1941 - val_acc: 0.9350 Epoch 37/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0585 - acc: 0.9790 - val_loss: 0.2717 - val_acc: 0.9230 Epoch 38/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0666 - acc: 0.9760 - val_loss: 0.1788 - val_acc: 0.9390 Epoch 39/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0556 - acc: 0.9800 - val_loss: 0.2031 - val_acc: 0.9370 Epoch 40/100 100/100 [==============================] - 20s 198ms/step - loss: 0.0433 - acc: 0.9835 - val_loss: 0.2189 - val_acc: 0.9350 Epoch 41/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0493 - acc: 0.9810 - val_loss: 0.2113 - val_acc: 0.9330 Epoch 42/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0478 - acc: 0.9815 - val_loss: 0.2069 - val_acc: 0.9430 Epoch 43/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0475 - acc: 0.9830 - val_loss: 0.2268 - val_acc: 0.9380 Epoch 44/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0395 - acc: 0.9860 - val_loss: 0.3817 - val_acc: 0.9080 Epoch 45/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0430 - acc: 0.9825 - val_loss: 0.2086 - val_acc: 0.9370 Epoch 46/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0449 - acc: 0.9815 - val_loss: 0.1946 - val_acc: 0.9390 Epoch 47/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0372 - acc: 0.9880 - val_loss: 0.2430 - val_acc: 0.9330 Epoch 48/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0395 - acc: 0.9875 - val_loss: 0.2147 - val_acc: 0.9330 Epoch 49/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0411 - acc: 0.9835 - val_loss: 0.2187 - val_acc: 0.9430 Epoch 50/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0420 - acc: 0.9865 - val_loss: 0.2184 - val_acc: 0.9390 Epoch 51/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0355 - acc: 0.9855 - val_loss: 0.2217 - val_acc: 0.9360 Epoch 52/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0317 - acc: 0.9865 - val_loss: 0.2387 - val_acc: 0.9410 Epoch 53/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0392 - acc: 0.9865 - val_loss: 0.2358 - val_acc: 0.9380 Epoch 54/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0305 - acc: 0.9885 - val_loss: 0.2586 - val_acc: 0.9410 Epoch 55/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0324 - acc: 0.9865 - val_loss: 0.2093 - val_acc: 0.9420 Epoch 56/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0339 - acc: 0.9885 - val_loss: 0.2098 - val_acc: 0.9430 Epoch 57/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0342 - acc: 0.9875 - val_loss: 0.2273 - val_acc: 0.9450 Epoch 58/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0354 - acc: 0.9880 - val_loss: 0.2951 - val_acc: 0.9360 Epoch 59/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0280 - acc: 0.9915 - val_loss: 0.2696 - val_acc: 0.9420 Epoch 60/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0347 - acc: 0.9880 - val_loss: 0.2253 - val_acc: 0.9450 Epoch 61/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0300 - acc: 0.9895 - val_loss: 0.4227 - val_acc: 0.9130 Epoch 62/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0345 - acc: 0.9865 - val_loss: 0.2661 - val_acc: 0.9380 Epoch 63/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0370 - acc: 0.9870 - val_loss: 0.3239 - val_acc: 0.9300 Epoch 64/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0280 - acc: 0.9885 - val_loss: 0.2536 - val_acc: 0.9380 Epoch 65/100 100/100 [==============================] - 20s 203ms/step - loss: 0.0227 - acc: 0.9915 - val_loss: 0.2505 - val_acc: 0.9410 Epoch 66/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0274 - acc: 0.9930 - val_loss: 0.2226 - val_acc: 0.9400 Epoch 67/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0229 - acc: 0.9910 - val_loss: 0.2298 - val_acc: 0.9410 Epoch 68/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0231 - acc: 0.9905 - val_loss: 0.2250 - val_acc: 0.9420 Epoch 69/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0336 - acc: 0.9870 - val_loss: 0.2955 - val_acc: 0.9310 Epoch 70/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0230 - acc: 0.9920 - val_loss: 0.4870 - val_acc: 0.9100 Epoch 71/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0353 - acc: 0.9870 - val_loss: 0.2387 - val_acc: 0.9420 Epoch 72/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0256 - acc: 0.9920 - val_loss: 0.2777 - val_acc: 0.9390 Epoch 73/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0323 - acc: 0.9895 - val_loss: 0.2311 - val_acc: 0.9420 Epoch 74/100 100/100 [==============================] - 19s 193ms/step - loss: 0.0295 - acc: 0.9890 - val_loss: 0.3283 - val_acc: 0.9340 Epoch 75/100 100/100 [==============================] - 19s 194ms/step - loss: 0.0275 - acc: 0.9920 - val_loss: 0.3358 - val_acc: 0.9330 Epoch 76/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0264 - acc: 0.9905 - val_loss: 0.2475 - val_acc: 0.9390 Epoch 77/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0206 - acc: 0.9940 - val_loss: 0.2863 - val_acc: 0.9400 Epoch 78/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0169 - acc: 0.9925 - val_loss: 0.2389 - val_acc: 0.9450 Epoch 79/100 100/100 [==============================] - 20s 199ms/step - loss: 0.0208 - acc: 0.9935 - val_loss: 0.2606 - val_acc: 0.9390 Epoch 80/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0339 - acc: 0.9890 - val_loss: 0.2165 - val_acc: 0.9390 Epoch 81/100 100/100 [==============================] - 21s 207ms/step - loss: 0.0311 - acc: 0.9885 - val_loss: 0.2396 - val_acc: 0.9450 Epoch 82/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0263 - acc: 0.9905 - val_loss: 0.2196 - val_acc: 0.9450 Epoch 83/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0220 - acc: 0.9915 - val_loss: 0.3002 - val_acc: 0.9320 Epoch 84/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0187 - acc: 0.9920 - val_loss: 0.2295 - val_acc: 0.9440 Epoch 85/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0223 - acc: 0.9915 - val_loss: 0.2608 - val_acc: 0.9430 Epoch 86/100 100/100 [==============================] - 20s 202ms/step - loss: 0.0204 - acc: 0.9930 - val_loss: 0.2567 - val_acc: 0.9380 Epoch 87/100 100/100 [==============================] - 20s 202ms/step - loss: 0.0271 - acc: 0.9880 - val_loss: 0.2265 - val_acc: 0.9380 Epoch 88/100 100/100 [==============================] - 20s 202ms/step - loss: 0.0135 - acc: 0.9955 - val_loss: 0.2674 - val_acc: 0.9390 Epoch 89/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0196 - acc: 0.9930 - val_loss: 0.4253 - val_acc: 0.9190 Epoch 90/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0204 - acc: 0.9945 - val_loss: 0.2534 - val_acc: 0.9440 Epoch 91/100 100/100 [==============================] - 20s 202ms/step - loss: 0.0146 - acc: 0.9945 - val_loss: 0.3456 - val_acc: 0.9360 Epoch 92/100 100/100 [==============================] - 20s 202ms/step - loss: 0.0176 - acc: 0.9940 - val_loss: 0.2618 - val_acc: 0.9390 Epoch 93/100 100/100 [==============================] - 20s 203ms/step - loss: 0.0289 - acc: 0.9900 - val_loss: 0.3886 - val_acc: 0.9330 Epoch 94/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0157 - acc: 0.9940 - val_loss: 0.2674 - val_acc: 0.9410 Epoch 95/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0179 - acc: 0.9940 - val_loss: 0.2833 - val_acc: 0.9430 Epoch 96/100 100/100 [==============================] - 20s 202ms/step - loss: 0.0204 - acc: 0.9925 - val_loss: 0.2726 - val_acc: 0.9420 Epoch 97/100 100/100 [==============================] - 20s 200ms/step - loss: 0.0217 - acc: 0.9940 - val_loss: 0.3880 - val_acc: 0.9310 Epoch 98/100 100/100 [==============================] - 20s 202ms/step - loss: 0.0079 - acc: 0.9975 - val_loss: 0.3431 - val_acc: 0.9360 Epoch 99/100 100/100 [==============================] - 20s 202ms/step - loss: 0.0261 - acc: 0.9895 - val_loss: 0.2835 - val_acc: 0.9450 Epoch 100/100 100/100 [==============================] - 20s 201ms/step - loss: 0.0172 - acc: 0.9940 - val_loss: 0.2963 - val_acc: 0.9410
In [14]:
acc = history.history['acc'] val_acc = history.history['val_acc'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs = range(1, len(acc) + 1) plt.plot(epochs, acc, 'b--', label='Training acc') plt.plot(epochs, val_acc, 'r-', label='Validation acc') plt.title('Training and validation accuracy') plt.legend() plt.figure() plt.plot(epochs, loss, 'b--', label='Training loss') plt.plot(epochs, val_loss, 'r-', label='Validation loss') plt.title('Training and validation loss') plt.legend() plt.show()
这些曲线看起来包含噪声。为了让图像更具可读性,你可以将每个损失和精度都替换为指数 移动平均值,从而让曲线变得平滑。
In [15]:
def smooth_curve(points, factor=0.8): smoothed_points = [] for point in points: if smoothed_points: previous = smoothed_points[-1] # 上一个节点*0.8+当前节点*0.2 smoothed_points.append(previous * factor + point * (1 - factor)) else: # 添加point smoothed_points.append(point) return smoothed_points
In [17]:
plt.plot(epochs,smooth_curve(acc), 'b--', label='Smoothed training acc') plt.plot(epochs,smooth_curve(val_acc), 'r-', label='Smoothed validation acc') plt.title('Training and validation accuracy') plt.legend() plt.figure() plt.plot(epochs,smooth_curve(loss), 'b--', label='Smoothed training loss') plt.plot(epochs,smooth_curve(val_loss), 'r-', label='Smoothed validation loss') plt.title('Training and validation loss') plt.legend() plt.show()
In [18]:
test_generator = test_datagen.flow_from_directory( test_dir, target_size=(150, 150), batch_size=20, class_mode='binary') test_loss, test_acc = model.evaluate_generator(test_generator, steps=50) print('test acc:', test_acc)
Found 1000 images belonging to 2 classes. WARNING:tensorflow:From <ipython-input-18-a0a3d19fae47>:7: Model.evaluate_generator (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version. Instructions for updating: Please use Model.evaluate, which supports generators. test acc: 0.9409999847412109
相关知识
使用Python实现深度学习模型:智能宠物监控与管理
paddleocr学习笔记(四)评估、推理
Python学习笔记 2020.10.1
Python学习手册
使用Python实现深度学习模型:用户行为预测与个性化服务
基于YOLOv8深度学习的120种犬类检测与识别系统【python源码+Pyqt5界面+数据集+训练代码】目标检测、深度学习实战、狗类检测、犬种识别
【深度学习】猫狗识别TensorFlow2实验报告
基于深度学习的驾驶行为预测方法
鸟类声音识别技术综述:从传统方法到深度学习
机器学习实战笔记3(决策树与随机森林)
网址: 《python深度学习》笔记 https://m.mcbbbk.com/newsview320073.html
上一篇: 宠物医院的实践报告范文.docx |
下一篇: 训练猫的方法, |