forked from GoodLuckDay/inflearn-machin_learning
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMLlab09-2.py
41 lines (31 loc) · 1.62 KB
/
MLlab09-2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#여러 개의 계층을 이용하여 XOR을 구현
import numpy as np
import tensorflow as tf
x_data = np.array([[0,0],[0,1],[1,0],[1,1]],dtype=np.float32)
y_data = np.array([[0],[1],[1],[0]],dtype=np.float32)
X = tf.placeholder(tf.float32)
Y = tf.placeholder(tf.float32)
W1 = tf.Variable(tf.random_normal([2,10]), name='weight1')
b1 = tf.Variable(tf.random_normal([10]), name='bias1')
layer1 = tf.sigmoid(tf.matmul(X,W1) + b1)
W2 = tf.Variable(tf.random_normal([10,10]), name='weight2')
b2 = tf.Variable(tf.random_normal([10]), name='bias2')
layer2 = tf.sigmoid(tf.matmul(layer1,W2) + b2)
W3 = tf.Variable(tf.random_normal([10,10]), name='weight3')
b3 = tf.Variable(tf.random_normal([10]), name='bias3')
layer3 = tf.sigmoid(tf.matmul(layer2,W3) + b3)
W4 = tf.Variable(tf.random_normal([10,1]), name='weight4')
b4 = tf.Variable(tf.random_normal([1]), name='bias4')
hypothesis = tf.sigmoid(tf.matmul(layer3, W4) + b4)
cost = - tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) * tf.log(1-hypothesis))
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost)
predicted = tf.cast(hypothesis > 0.5, dtype=tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for step in range(10001):
sess.run(train, feed_dict={X:x_data, Y:y_data})
if step%100 == 0 :
print(step, sess.run(cost, feed_dict={X:x_data, Y:y_data}), sess.run([W1,W2,W3,W4]))
h, c, a = sess.run([hypothesis, predicted, accuracy],feed_dict={X:x_data, Y:y_data})
print("\nHypothesis: ", h, "\nCorrect: ", c, "\nAccuracy: ",a)