banner
RustyNail

RustyNail

coder. 【blog】https://rustynail.me 【nostr】wss://ts.relays.world/ wss://relays.world/nostr

【调包侠的机器学习】 图片分类

import tensorflow as tf
from d2l import tensorflow as d2l

batch_size = 1000
train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size)
net = tf.keras.models.Sequential()
# 28x28 -> 784
net.add(tf.keras.layers.Flatten(input_shape=(28, 28)))
# 随机
weight_initializer = tf.keras.initializers.RandomNormal(mean=0.0, stddev=0.01)
net.add(tf.keras.layers.Dense(10, kernel_initializer=weight_initializer))
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
trainer = tf.keras.optimizers.SGD(learning_rate=.01)
net.compile(optimizer=trainer, loss=loss, metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
for X, y in train_iter:
    history = net.fit( X, y, batch_size=100, epochs=1, validation_split=0.2, callbacks=[])
net.summary()
8/8 [==============================] - 0s 21ms/step - loss: 2.2385 - sparse_categorical_accuracy: 0.1587 - val_loss: 2.1268 - val_sparse_categorical_accuracy: 0.3500
8/8 [==============================] - 0s 11ms/step - loss: 2.0777 - sparse_categorical_accuracy: 0.4487 - val_loss: 2.0031 - val_sparse_categorical_accuracy: 0.5200
8/8 [==============================] - 0s 9ms/step - loss: 1.9366 - sparse_categorical_accuracy: 0.5688 - val_loss: 1.8533 - val_sparse_categorical_accuracy: 0.6300
8/8 [==============================] - 0s 12ms/step - loss: 1.8252 - sparse_categorical_accuracy: 0.6200 - val_loss: 1.7518 - val_sparse_categorical_accuracy: 0.6700
8/8 [==============================] - 0s 12ms/step - loss: 1.7192 - sparse_categorical_accuracy: 0.6388 - val_loss: 1.6922 - val_sparse_categorical_accuracy: 0.6150
8/8 [==============================] - 0s 12ms/step - loss: 1.6437 - sparse_categorical_accuracy: 0.6350 - val_loss: 1.6507 - val_sparse_categorical_accuracy: 0.6250
8/8 [==============================] - 0s 12ms/step - loss: 1.5718 - sparse_categorical_accuracy: 0.6687 - val_loss: 1.4854 - val_sparse_categorical_accuracy: 0.7150
8/8 [==============================] - 0s 12ms/step - loss: 1.5109 - sparse_categorical_accuracy: 0.6600 - val_loss: 1.5128 - val_sparse_categorical_accuracy: 0.6300
8/8 [==============================] - 0s 15ms/step - loss: 1.4464 - sparse_categorical_accuracy: 0.6712 - val_loss: 1.4625 - val_sparse_categorical_accuracy: 0.6650
8/8 [==============================] - 0s 12ms/step - loss: 1.4263 - sparse_categorical_accuracy: 0.6525 - val_loss: 1.3993 - val_sparse_categorical_accuracy: 0.6850
8/8 [==============================] - 0s 12ms/step - loss: 1.3830 - sparse_categorical_accuracy: 0.6488 - val_loss: 1.3553 - val_sparse_categorical_accuracy: 0.7000
8/8 [==============================] - 0s 12ms/step - loss: 1.3588 - sparse_categorical_accuracy: 0.6637 - val_loss: 1.2715 - val_sparse_categorical_accuracy: 0.7050
8/8 [==============================] - 0s 11ms/step - loss: 1.3309 - sparse_categorical_accuracy: 0.6488 - val_loss: 1.3127 - val_sparse_categorical_accuracy: 0.6250
8/8 [==============================] - 0s 12ms/step - loss: 1.2833 - sparse_categorical_accuracy: 0.6750 - val_loss: 1.2898 - val_sparse_categorical_accuracy: 0.6750
8/8 [==============================] - 0s 9ms/step - loss: 1.2601 - sparse_categorical_accuracy: 0.6463 - val_loss: 1.2530 - val_sparse_categorical_accuracy: 0.6850
8/8 [==============================] - 0s 12ms/step - loss: 1.2196 - sparse_categorical_accuracy: 0.6637 - val_loss: 1.2402 - val_sparse_categorical_accuracy: 0.6650
8/8 [==============================] - 0s 12ms/step - loss: 1.2106 - sparse_categorical_accuracy: 0.6712 - val_loss: 1.1851 - val_sparse_categorical_accuracy: 0.6950
8/8 [==============================] - 0s 12ms/step - loss: 1.1850 - sparse_categorical_accuracy: 0.6712 - val_loss: 1.1842 - val_sparse_categorical_accuracy: 0.6800
8/8 [==============================] - 0s 12ms/step - loss: 1.1708 - sparse_categorical_accuracy: 0.6600 - val_loss: 1.2729 - val_sparse_categorical_accuracy: 0.6350
8/8 [==============================] - 0s 12ms/step - loss: 1.1400 - sparse_categorical_accuracy: 0.6712 - val_loss: 1.1557 - val_sparse_categorical_accuracy: 0.6500
8/8 [==============================] - 0s 10ms/step - loss: 1.1334 - sparse_categorical_accuracy: 0.6625 - val_loss: 1.0864 - val_sparse_categorical_accuracy: 0.7250
8/8 [==============================] - 0s 11ms/step - loss: 1.1640 - sparse_categorical_accuracy: 0.6475 - val_loss: 1.0546 - val_sparse_categorical_accuracy: 0.7000
8/8 [==============================] - 0s 12ms/step - loss: 1.0921 - sparse_categorical_accuracy: 0.6862 - val_loss: 1.0594 - val_sparse_categorical_accuracy: 0.7150
8/8 [==============================] - 0s 9ms/step - loss: 1.0839 - sparse_categorical_accuracy: 0.6725 - val_loss: 1.0929 - val_sparse_categorical_accuracy: 0.6850
8/8 [==============================] - 0s 11ms/step - loss: 1.0720 - sparse_categorical_accuracy: 0.6825 - val_loss: 1.0999 - val_sparse_categorical_accuracy: 0.6450
8/8 [==============================] - 0s 14ms/step - loss: 1.0344 - sparse_categorical_accuracy: 0.7125 - val_loss: 0.9556 - val_sparse_categorical_accuracy: 0.7650
8/8 [==============================] - 0s 12ms/step - loss: 1.0279 - sparse_categorical_accuracy: 0.7013 - val_loss: 1.0564 - val_sparse_categorical_accuracy: 0.6600
8/8 [==============================] - 0s 10ms/step - loss: 1.0327 - sparse_categorical_accuracy: 0.6837 - val_loss: 1.0824 - val_sparse_categorical_accuracy: 0.6450
8/8 [==============================] - 0s 12ms/step - loss: 1.0756 - sparse_categorical_accuracy: 0.6575 - val_loss: 1.0695 - val_sparse_categorical_accuracy: 0.6700
8/8 [==============================] - 0s 14ms/step - loss: 1.0407 - sparse_categorical_accuracy: 0.6750 - val_loss: 1.0049 - val_sparse_categorical_accuracy: 0.6700
8/8 [==============================] - 0s 12ms/step - loss: 1.0265 - sparse_categorical_accuracy: 0.6900 - val_loss: 0.9975 - val_sparse_categorical_accuracy: 0.6800
8/8 [==============================] - 0s 11ms/step - loss: 0.9845 - sparse_categorical_accuracy: 0.6975 - val_loss: 1.0634 - val_sparse_categorical_accuracy: 0.6500
8/8 [==============================] - 0s 12ms/step - loss: 0.9809 - sparse_categorical_accuracy: 0.6913 - val_loss: 1.0408 - val_sparse_categorical_accuracy: 0.6850
8/8 [==============================] - 0s 12ms/step - loss: 0.9675 - sparse_categorical_accuracy: 0.7200 - val_loss: 0.9680 - val_sparse_categorical_accuracy: 0.6950
8/8 [==============================] - 0s 13ms/step - loss: 0.9636 - sparse_categorical_accuracy: 0.7300 - val_loss: 0.9628 - val_sparse_categorical_accuracy: 0.7300
8/8 [==============================] - 0s 12ms/step - loss: 0.9672 - sparse_categorical_accuracy: 0.7025 - val_loss: 0.9196 - val_sparse_categorical_accuracy: 0.7350
8/8 [==============================] - 0s 12ms/step - loss: 0.9950 - sparse_categorical_accuracy: 0.6988 - val_loss: 0.9275 - val_sparse_categorical_accuracy: 0.6900
8/8 [==============================] - 0s 10ms/step - loss: 0.9483 - sparse_categorical_accuracy: 0.6900 - val_loss: 1.0015 - val_sparse_categorical_accuracy: 0.6800
8/8 [==============================] - 0s 13ms/step - loss: 0.9418 - sparse_categorical_accuracy: 0.7212 - val_loss: 0.9645 - val_sparse_categorical_accuracy: 0.6700
8/8 [==============================] - 0s 13ms/step - loss: 0.9259 - sparse_categorical_accuracy: 0.7150 - val_loss: 0.9828 - val_sparse_categorical_accuracy: 0.7100
8/8 [==============================] - 0s 12ms/step - loss: 0.9472 - sparse_categorical_accuracy: 0.7075 - val_loss: 0.9898 - val_sparse_categorical_accuracy: 0.6700
8/8 [==============================] - 0s 11ms/step - loss: 0.9192 - sparse_categorical_accuracy: 0.7125 - val_loss: 0.9631 - val_sparse_categorical_accuracy: 0.6950
8/8 [==============================] - 0s 12ms/step - loss: 0.9454 - sparse_categorical_accuracy: 0.6888 - val_loss: 0.9293 - val_sparse_categorical_accuracy: 0.6950
8/8 [==============================] - 0s 10ms/step - loss: 0.8998 - sparse_categorical_accuracy: 0.7225 - val_loss: 0.8803 - val_sparse_categorical_accuracy: 0.7200
8/8 [==============================] - 0s 12ms/step - loss: 0.9048 - sparse_categorical_accuracy: 0.7325 - val_loss: 0.8654 - val_sparse_categorical_accuracy: 0.7350
8/8 [==============================] - 0s 14ms/step - loss: 0.9083 - sparse_categorical_accuracy: 0.7175 - val_loss: 0.9996 - val_sparse_categorical_accuracy: 0.6650
8/8 [==============================] - 0s 12ms/step - loss: 0.8932 - sparse_categorical_accuracy: 0.7212 - val_loss: 0.8289 - val_sparse_categorical_accuracy: 0.7550
8/8 [==============================] - 0s 12ms/step - loss: 0.8909 - sparse_categorical_accuracy: 0.7400 - val_loss: 0.9616 - val_sparse_categorical_accuracy: 0.6900
8/8 [==============================] - 0s 10ms/step - loss: 0.9210 - sparse_categorical_accuracy: 0.7212 - val_loss: 0.8752 - val_sparse_categorical_accuracy: 0.7350
8/8 [==============================] - 0s 9ms/step - loss: 0.9051 - sparse_categorical_accuracy: 0.6888 - val_loss: 0.8436 - val_sparse_categorical_accuracy: 0.7600
8/8 [==============================] - 0s 9ms/step - loss: 0.8916 - sparse_categorical_accuracy: 0.7163 - val_loss: 0.8928 - val_sparse_categorical_accuracy: 0.6950
8/8 [==============================] - 0s 9ms/step - loss: 0.8682 - sparse_categorical_accuracy: 0.7425 - val_loss: 0.8735 - val_sparse_categorical_accuracy: 0.7550
8/8 [==============================] - 0s 13ms/step - loss: 0.8997 - sparse_categorical_accuracy: 0.7000 - val_loss: 0.8364 - val_sparse_categorical_accuracy: 0.7850
8/8 [==============================] - 0s 13ms/step - loss: 0.8742 - sparse_categorical_accuracy: 0.7275 - val_loss: 0.9289 - val_sparse_categorical_accuracy: 0.6950
8/8 [==============================] - 0s 12ms/step - loss: 0.8503 - sparse_categorical_accuracy: 0.7262 - val_loss: 0.8968 - val_sparse_categorical_accuracy: 0.6900
8/8 [==============================] - 0s 12ms/step - loss: 0.8803 - sparse_categorical_accuracy: 0.7100 - val_loss: 0.7926 - val_sparse_categorical_accuracy: 0.8050
8/8 [==============================] - 0s 12ms/step - loss: 0.8870 - sparse_categorical_accuracy: 0.7312 - val_loss: 0.7938 - val_sparse_categorical_accuracy: 0.7650
8/8 [==============================] - 0s 14ms/step - loss: 0.8537 - sparse_categorical_accuracy: 0.7325 - val_loss: 0.8374 - val_sparse_categorical_accuracy: 0.7000
8/8 [==============================] - 0s 13ms/step - loss: 0.8696 - sparse_categorical_accuracy: 0.7287 - val_loss: 0.7813 - val_sparse_categorical_accuracy: 0.7600
8/8 [==============================] - 0s 14ms/step - loss: 0.8586 - sparse_categorical_accuracy: 0.7250 - val_loss: 0.8754 - val_sparse_categorical_accuracy: 0.7100
Model: "sequential_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 flatten_2 (Flatten)         (None, 784)               0         
                                                                 
 dense_2 (Dense)             (None, 10)                7850      
                                                                 
=================================================================
Total params: 7,850
Trainable params: 7,850
Non-trainable params: 0
_________________________________________________________________
import matplotlib.pyplot as plt
plt.plot(history.history["loss"], label="Training Loss")
plt.plot(history.history["sparse_categorical_accuracy"], label="sparse_categorical_accuracy")
plt.plot(history.history["val_sparse_categorical_accuracy"], label="val_sparse_categorical_accuracy")
plt.legend()
plt.show()
d2l.predict_ch3(net, test_iter)
読み込み中...
文章は、創作者によって署名され、ブロックチェーンに安全に保存されています。