본문 바로가기

Programming/TensorFlow

TensorFlow Linear Regression with place holder

As we studied before, we can use 'placeholder' as a source of input value. So, I changed the source code uploaded on this blog with placeholder. You might know the differences if you check on here.

import tensorflow as tf

# X and Y data
#x_train = [1,2,3]
#y_train = [1,2,3]

W = tf.Variable(tf.random_normal([1]), name = 'weight')
b = tf.Variable(tf.random_normal([1]), name = 'bias')
X = tf.placeholder(tf.float32, shape=[None])
Y = tf.placeholder(tf.float32, shape=[None])

# Our hypothesis XW + b
hypothesis = X * W + b

# cost/Loss function
cost = tf.reduce_mean(tf.square(hypothesis - Y))

# Minimize
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train = optimizer.minimize(cost)

# Launch the graph in a session.
sess = tf.Session()

# Initializes global variables in the graph.
sess.run(tf.global_variables_initializer())

# Fit the line with new training data
for step in range(2001):
    cost_val, W_val, b_val, _ = sess.run([cost,W,b,train],
        feed_dict={X: [1,2,3,4,5],Y:[2.1,3.1,4.1,5.1,6.1]})
    
    if step % 20 == 0:
        print(step, cost_val, W_val, b_val)

0 50.1015 [ 0.26059604] [-2.03533888]
20 1.18652 [ 1.6977576] [-1.44534516]
40 1.03533 [ 1.65833342] [-1.27691102]
60 0.904159 [ 1.61524701] [-1.1212374]
80 0.789608 [ 1.57495403] [-0.97576672]
100 0.689571 [ 1.53729987] [-0.83982295]
120 0.602207 [ 1.50211155] [-0.71278226]
140 0.525912 [ 1.46922791] [-0.59406161]
160 0.459283 [ 1.43849778] [-0.48311609]
180 0.401095 [ 1.40978014] [-0.37943652]
200 0.350279 [ 1.38294327] [-0.282547]
220 0.305902 [ 1.35786402] [-0.19200291]
240 0.267146 [ 1.33442724] [-0.10738858]
260 0.233301 [ 1.31252527] [-0.0283157]
280 0.203743 [ 1.29205775] [ 0.04557868]
300 0.177931 [ 1.27293074] [ 0.11463354]
320 0.155388 [ 1.25505626] [ 0.17916597]
340 0.135701 [ 1.23835242] [ 0.23947211]
360 0.118509 [ 1.22274256] [ 0.29582879]
380 0.103495 [ 1.2081548] [ 0.34849465]
400 0.0903829 [ 1.19452274] [ 0.39771131]
420 0.078932 [ 1.18178332] [ 0.44370466]
440 0.0689319 [ 1.16987813] [ 0.48668599]
460 0.0601987 [ 1.15875268] [ 0.52685255]
480 0.052572 [ 1.14835572] [ 0.56438845]
500 0.0459115 [ 1.13863993] [ 0.59946603]
520 0.0400949 [ 1.12956023] [ 0.63224638]
540 0.0350152 [ 1.12107527] [ 0.66287982]
560 0.030579 [ 1.11314583] [ 0.69150722]
580 0.0267049 [ 1.1057359] [ 0.71825981]
600 0.0233216 [ 1.09881115] [ 0.74326026]
620 0.0203669 [ 1.09233987] [ 0.76662332]
640 0.0177866 [ 1.08629251] [ 0.7884565]
660 0.0155331 [ 1.08064115] [ 0.80885965]
680 0.0135652 [ 1.07535994] [ 0.82792664]
700 0.0118466 [ 1.07042456] [ 0.84574479]
720 0.0103457 [ 1.06581247] [ 0.86239606]
740 0.00903502 [ 1.06150246] [ 0.87795687]
760 0.00789035 [ 1.05747449] [ 0.89249873]
780 0.00689069 [ 1.05371046] [ 0.90608811]
800 0.00601769 [ 1.05019295] [ 0.9187876]
820 0.00525529 [ 1.04690576] [ 0.93065536]
840 0.00458949 [ 1.04383385] [ 0.94174582]
860 0.00400803 [ 1.04096305] [ 0.95210999]
880 0.00350024 [ 1.03828049] [ 0.96179545]
900 0.00305679 [ 1.0357734] [ 0.97084659]
920 0.00266952 [ 1.03343058] [ 0.97930491]
940 0.00233132 [ 1.0312413] [ 0.9872092]
960 0.00203595 [ 1.02919507] [ 0.994596]
980 0.00177801 [ 1.02728319] [ 1.00149906]
1000 0.00155276 [ 1.02549636] [ 1.00794983]
1020 0.00135604 [ 1.0238266] [ 1.01397836]
1040 0.00118423 [ 1.02226615] [ 1.01961207]
1060 0.0010342 [ 1.02080798] [ 1.02487683]
1080 0.000903161 [ 1.01944506] [ 1.02979696]
1100 0.000788738 [ 1.01817167] [ 1.0343945]
1120 0.000688808 [ 1.0169816] [ 1.0386914]
1140 0.000601537 [ 1.01586926] [ 1.04270661]
1160 0.000525326 [ 1.01483011] [ 1.04645872]
1180 0.000458776 [ 1.01385891] [ 1.04996502]
1200 0.000400654 [ 1.01295114] [ 1.05324197]
1220 0.000349893 [ 1.01210296] [ 1.0563041]
1240 0.000305565 [ 1.01131046] [ 1.05916572]
1260 0.000266852 [ 1.01056969] [ 1.06183994]
1280 0.00023304 [ 1.00987732] [ 1.06433916]
1300 0.00020352 [ 1.00923049] [ 1.06667459]
1320 0.000177737 [ 1.0086261] [ 1.06885695]
1340 0.000155217 [ 1.00806117] [ 1.07089651]
1360 0.000135551 [ 1.00753319] [ 1.07280266]
1380 0.000118377 [ 1.0070399] [ 1.07458389]
1400 0.000103381 [ 1.0065788] [ 1.07624829]
1420 9.02837e-05 [ 1.00614798] [ 1.07780385]
1440 7.88485e-05 [ 1.00574541] [ 1.07925713]
1460 6.88592e-05 [ 1.00536919] [ 1.08061552]
1480 6.01342e-05 [ 1.00501752] [ 1.08188498]
1500 5.25176e-05 [ 1.00468898] [ 1.08307135]
1520 4.58648e-05 [ 1.0043819] [ 1.08417988]
1540 4.00537e-05 [ 1.00409484] [ 1.08521605]
1560 3.49787e-05 [ 1.00382686] [ 1.08618414]
1580 3.05477e-05 [ 1.00357616] [ 1.08708882]
1600 2.66761e-05 [ 1.00334203] [ 1.08793449]
1620 2.32983e-05 [ 1.00312304] [ 1.08872461]
1640 2.03461e-05 [ 1.0029186] [ 1.089463]
1660 1.77679e-05 [ 1.00272739] [ 1.09015322]
1680 1.55168e-05 [ 1.00254869] [ 1.09079814]
1700 1.3551e-05 [ 1.0023818] [ 1.09140086]
1720 1.18338e-05 [ 1.00222588] [ 1.09196401]
1740 1.03342e-05 [ 1.00208008] [ 1.09249032]
1760 9.02474e-06 [ 1.00194383] [ 1.09298205]
1780 7.88156e-06 [ 1.00181651] [ 1.09344161]
1800 6.88377e-06 [ 1.00169754] [ 1.093871]
1820 6.0114e-06 [ 1.00158644] [ 1.09427249]
1840 5.24977e-06 [ 1.00148261] [ 1.09464753]
1860 4.58481e-06 [ 1.00138545] [ 1.094998]
1880 4.00421e-06 [ 1.00129473] [ 1.09532547]
1900 3.49681e-06 [ 1.00120997] [ 1.0956316]
1920 3.05392e-06 [ 1.00113082] [ 1.0959177]
1940 2.6671e-06 [ 1.00105679] [ 1.09618497]
1960 2.32921e-06 [ 1.00098753] [ 1.09643471]
1980 2.0343e-06 [ 1.00092292] [ 1.096668]
2000 1.77696e-06 [ 1.00086248] [ 1.09688616]

-Reference-

https://github.com/hunkim/DeepLearningZeroToAll/