mirror of
https://github.com/autistic-symposium/tensorflow-for-deep-learning-py.git
synced 2025-05-11 03:04:59 -04:00
add new deep dream stuff from aws repo
This commit is contained in:
parent
0d8220a392
commit
305e85e8b7
25 changed files with 30870 additions and 517 deletions
36
TensorFlow/intro_linear_model.py
Normal file
36
TensorFlow/intro_linear_model.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
import numpy as np
|
||||
import tensorflow as tf
|
||||
|
||||
# Model linear regression y = Wx + b
|
||||
x = tf.placeholder(tf.float32, [None, 1])
|
||||
W = tf.Variable(tf.zeros([1,1]))
|
||||
b = tf.Variable(tf.zeros([1]))
|
||||
product = tf.matmul(x,W)
|
||||
y = product + b
|
||||
y_ = tf.placeholder(tf.float32, [None, 1])
|
||||
|
||||
# Cost function sum((y_-y)**2)
|
||||
cost = tf.reduce_mean(tf.square(y_-y))
|
||||
|
||||
# Training using Gradient Descent to minimize cost
|
||||
train_step = tf.train.GradientDescentOptimizer(0.0000001).minimize(cost)
|
||||
|
||||
sess = tf.Session()
|
||||
init = tf.initialize_all_variables()
|
||||
sess.run(init)
|
||||
steps = 1000
|
||||
for i in range(steps):
|
||||
|
||||
# Create fake data for y = W.x + b where W = 2, b = 0
|
||||
xs = np.array([[i]])
|
||||
ys = np.array([[2*i]])
|
||||
|
||||
# Train
|
||||
feed = { x: xs, y_: ys }
|
||||
sess.run(train_step, feed_dict=feed)
|
||||
|
||||
print("After %d iteration:" % i)
|
||||
print("W: %f" % sess.run(W))
|
||||
print("b: %f" % sess.run(b))
|
||||
|
||||
print("cost: %f" % sess.run(cost, feed_dict=feed))
|
Loading…
Add table
Add a link
Reference in a new issue