您的位置:首页 > 编程语言 > Python开发

Python实战 | TensorFlow之softmax的实现——手写数字识别

2018-07-11 20:30 756 查看
import tensorflow as tf
import matplotlib.pyplot as plt
import random
from tensorflow.examples.tutorials.mnist import  input_data
mnist=input_data.read_data_sets("data/",one_hot=True)
nb_classes=10

#MNISTdata image of shape 28*28=784
X=tf.placeholder(tf.float32,[None,784])
#0-9 digits recognition =10 classes
Y=tf.placeholder(tf.float32,[None,nb_classes])

W=tf.Variable(tf.random_normal([784,nb_classes]))
b=tf.Variable(tf.random_normal([nb_classes]))

hypothesis=tf.nn.softmax(tf.matmul(X,W)+b)

cost=tf.reduce_mean(-tf.reduce_sum(Y*tf.log(hypothesis),axis=1))
optimizer=tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(cost)

#test model
is_correct=tf.equal(tf.arg_max(hypothesis,1),tf.arg_max(Y,1))

accuracy=tf.reduce_mean(tf.cast(is_correct,tf.float32))

trainIng_eopchs=15
batch_size=100

with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for epoch in range(trainIng_eopchs):
avg_cost=0
total_bacth=int(mnist.train.num_examples/batch_size)
for i in range(total_bacth):
batch_xs,batch_ys=mnist.train.next_batch(batch_size)
c,_=sess.run([cost,optimizer],feed_dict={X:batch_xs,Y:batch_ys})
avg_cost+=c/total_bacth
print("Epoch:","%04d"%(epoch+1),"cost=","{:.9f}".format(avg_cost))
print("Accuraacy:",accuracy.eval(session=sess,feed_dict={X:mnist.test.images,Y:mnist.test.labels}))
r=random.randint(0,mnist.test.num_examples-1)
print("Label:",sess.run(tf.argmax(mnist.test.labels[r:r+1],1)))
print("Prediction:",sess.run(tf.argmax(hypothesis,1),
feed_dict={X:mnist.test.images[r:r+1]}))

plt.imshow(mnist.test.images[r:r+1].reshape(28,28),cmap="Greys",interpolation="nearest")
plt.show()
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息