from numpy
.random
import RandomState
print(RandomState
(1))
<mtrand.RandomState object at 0x7f5bb4443f50>
import tensorflow
as tf
from numpy
.random
import RandomState
batch_size
= 8
w1
= tf
.Variable
(tf
.random_normal
([2, 3], stddev
=1, seed
=1))
w2
= tf
.Variable
(tf
.random_normal
([3, 1], stddev
=1, seed
=1))
x
= tf
.placeholder
(tf
.float32
, shape
=(None, 2), name
="x-input")
y_
= tf
.placeholder
(tf
.float32
, shape
=(None, 1), name
='y-input')
a
= tf
.matmul
(x
, w1
)
y
= tf
.matmul
(a
, w2
)
y
= tf
.sigmoid
(y
)
cross_entropy
= -tf
.reduce_mean
(y_
* tf
.log
(tf
.clip_by_value
(y
, 1e-10, 1.0)) + (1 - y_
) * tf
.log
(tf
.clip_by_value
(1 - y
, 1e-10, 1.0)))
learning_rate
= 0.001
train_step
= tf
.train
.AdamOptimizer
(learning_rate
).minimize
(cross_entropy
)
rdm
= RandomState
(1)
dataset_size
= 128
X
= rdm
.rand
(dataset_size
,2)
Y
= [[int(x1
+x2
< 1)] for (x1
, x2
) in X
]
with tf
.Session
() as sess
:
init_op
= tf
.global_variables_initializer
()
sess
.run
(init_op
)
print("输出目前(未经训练)的参数取值")
print(sess
.run
(w1
))
print(sess
.run
(w2
))
print("\n")
STEPS
= 5000
for i
in range(STEPS
):
start
= (i
*batch_size
) % dataset_size
end
= min(start
+batch_size
,dataset_size
)
sess
.run
(train_step
, feed_dict
={x
: X
[start
:end
], y_
: Y
[start
:end
]})
if i
% 1000 == 0:
total_cross_entropy
= sess
.run
(cross_entropy
, feed_dict
={x
: X
, y_
: Y
})
print("After %d training step(s), cross entropy on all data is %g" % (i
, total_cross_entropy
))
print("\n")
print("输出训练后的参数取值")
print(sess
.run
(w1
))
print(sess
.run
(w2
))
writer
= tf
.summary
.FileWriter
("/home/mikowoo/Desktop/log/networks/",tf
.get_default_graph
())
writer
.close
()
输出目前(未经训练)的参数取值
[[-0.8113182 1.4845988 0.06532937]
[-2.4427042 0.0992484 0.5912243 ]]
[[-0.8113182 ]
[ 1.4845988 ]
[ 0.06532937]]
After 0 training step(s), cross entropy on all data is 1.89805
After 1000 training step(s), cross entropy on all data is 0.655075
After 2000 training step(s), cross entropy on all data is 0.626172
After 3000 training step(s), cross entropy on all data is 0.615096
After 4000 training step(s), cross entropy on all data is 0.610309
输出训练后的参数取值
[[ 0.02476984 0.5694868 1.6921942 ]
[-2.1977348 -0.23668921 1.1143897 ]]
[[-0.45544702]
[ 0.4911093 ]
[-0.9811033 ]]
STEPS
= 5000
for i
in range(STEPS
):
start
= (i
*batch_size
) % dataset_size
end
= min(start
+batch_size
,dataset_size
)
print(str(start
) +'\t'+ str(end
) + '\n')
0 8
8 16
16 24
24 32
32 40
40 48
48 56
56 64
64 72
72 80
80 88
88 96
96 104
104 112
112 120
120 128
0 8
8 16
16 24
24 32
32 40
40 48
48 56
56 64
64 72
72 80
80 88
88 96
96 104
104 112
112 120
120 128
0 8
8 16
...
...