머신러닝

Statistical softmaxt 텐서플로우 응용 코드: iris_alpha_01.py

coding art 2012. 2. 3. 00:20
728x90


import tensorflow as tf
import numpy as np
import time

start_time = time.time()

def label_encode(label):
 val=[]
 if label == "Iris-setosa":
  val = [1,0,0]
 elif label == "Iris-versicolor":
  val = [0,1,0]
 elif label == "Iris-virginica":
  val = [0,0,1] 
 return val

def data_encode(file):
 X = []
 Y = []
 train_file = open(file, 'r')
 for line in train_file.read().strip().split('\n'):
  line = line.split(',')
  X.append([line[0], line[1], line[2], line[3]])
  Y.append(label_encode(line[4]))
 return X, Y

#Defining a Multilayer Neural Network Model
def model(x, alpha, weights1, bias1,weights2, bias2,weights3, bias3):
    layer_1 = tf.add(tf.matmul(x, weights1["hidden1"]), bias1["hidden1"]) + alpha * tf.add(tf.matmul(x, weights2["hidden2"]), bias2["hidden2"]) * tf.add(tf.matmul(x, weights3["hidden3"]), bias3["hidden3"])
    #layer_1 = tf.nn.relu(layer_1)
    #output_layer = tf.matmul(layer_1, weights1["output1"]) + bias1["output1"] + alpha*tf.add(tf.matmul(layer_1, weights2["output2"]), bias2["output2"]) * tf.add(tf.matmul(layer_1, weights3["output3"]), bias3["output3"])
    output_layer = tf.matmul(layer_1, weights1["output1"]) + bias1["output1"]
    return output_layer

#Training and Testing Data
train_X , train_Y = data_encode('iris.train')
test_X , test_Y = data_encode('iris.test')

#hyperparameter
learning_rate = 0.01
training_epochs = 10000
display_steps = 2000


#Network parameters
n_input = 4
n_hidden = 10
n_output = 3
#alpha = 0.0238
alpha=0.2
#Graph Nodes
X = tf.placeholder("float", [None, n_input])
Y = tf.placeholder("float", [None, n_output])
  
#Weights and Biases
weights1 = {
 "hidden1" : tf.Variable(tf.random_normal([n_input, n_hidden], stddev=0.01), name="weight_hidden1"),
 "output1" : tf.Variable(tf.random_normal([n_hidden, n_output], stddev=0.01), name="weight_output1")
}

bias1 = {
 "hidden1" : tf.Variable(tf.random_normal([n_hidden], stddev=0.01), name="bias_hidden1"),
 "output1" : tf.Variable(tf.random_normal([n_output], stddev=0.01), name="bias_output1")

weights2 = {
 "hidden2" : tf.Variable(tf.random_normal([n_input, n_hidden], stddev=0.01), name="weight_hidden2"),
 "output2" : tf.Variable(tf.random_normal([n_hidden, n_output], stddev=0.01), name="weight_output2")
}

bias2 = {
 "hidden2" : tf.Variable(tf.random_normal([n_hidden], stddev=0.01), name="bias_hidden2"),
 "output2" : tf.Variable(tf.random_normal([n_output], stddev=0.01), name="bias_output2")

weights3 = {
 "hidden3" : tf.Variable(tf.random_normal([n_input, n_hidden], stddev=0.01), name="weight_hidden3"),
 "output3" : tf.Variable(tf.random_normal([n_hidden, n_output], stddev=0.01), name="weight_output3")
}

bias3 = {
 "hidden3" : tf.Variable(tf.random_normal([n_hidden], stddev=0.01), name="bias_hidden3"),
 "output3" : tf.Variable(tf.random_normal([n_output], stddev=0.01), name="bias_output3")

#Define model
pred = model(X,alpha, weights1, bias1,weights2, bias2,weights3, bias3)

#Define loss and optimizer
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate).minimize(cost)

#Initializing global variables
init = tf.global_variables_initializer()

with tf.Session() as sess:
    sess.run(init)

    for epoch in range(training_epochs):
        _, c = sess.run([optimizer, cost], feed_dict={X: train_X, Y: train_Y})
        if(epoch + 1) % display_steps == 0:
            print( "Epoch: ", (epoch+1), "Cost: ", c )
    print("Optimization Finished!")

    test_result = sess.run(pred, feed_dict={X: train_X})
    correct_pred = tf.equal(tf.argmax(test_result, 1), tf.argmax(train_Y, 1))
    accuracy = tf.reduce_mean(tf.cast(correct_pred, "float"))
    print( "Accuracy:", accuracy.eval({X: test_X, Y: test_Y}) )


end_time = time.time()

print( "Completed in ", end_time - start_time , " seconds")

 
 #correct_pred = tf.equal(tf.argmax(test_result, 1), tf.argmax(train_Y, 1))
'''
Epoch:  2000 Cost:  0.046752203
Epoch:  4000 Cost:  0.043881003
Epoch:  6000 Cost:  0.034896933
Epoch:  8000 Cost:  0.00548237
Epoch:  10000 Cost:  0.00016375385
Optimization Finished!
Accuracy: 1.0
Completed in  7.1777873039245605  seconds
'''