forked from GoodLuckDay/inflearn-machin_learning
-
Notifications
You must be signed in to change notification settings - Fork 0
/
MLlab09-1.py
33 lines (25 loc) · 1.29 KB
/
MLlab09-1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
#Nertral Net을 이용을 하여 XOR을 구현
import numpy as np
import tensorflow as tf
x_data = np.array([[0,0],[0,1],[1,0],[1,1]],dtype=np.float32)
y_data = np.array([[0],[1],[1],[0]],dtype=np.float32)
X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)
W1 = tf.Variable(tf.random_normal([2,2]), name='weight1')
b1 = tf.Variable(tf.random_normal([2]), name='bias1')
layer1 = tf.sigmoid(tf.matmul(X,W1) + b1)
W2 = tf.Variable(tf.random_normal([2,1]), name='weight2')
b2 = tf.Variable(tf.random_normal([1]), name='bias2')
hypothesis = tf.sigmoid(tf.matmul(layer1, W2) + b2)
cost = - tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) * tf.log(1-hypothesis))
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost)
predicted = tf.cast(hypothesis > 0.5, dtype=tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for step in range(10001):
sess.run(train, feed_dict={X:x_data, Y:y_data})
if step%100 == 0 :
print(step, sess.run(cost, feed_dict={X:x_data, Y:y_data}), sess.run([W1,W2]))
h, c, a = sess.run([hypothesis, predicted, accuracy],feed_dict={X:x_data, Y:y_data})
print("\nHypothesis: ", h, "\nCorrect: ", c, "\nAccuracy: ",a)