5.Declaring Operations
# Operations
#----------------------------------
#
# This function introduces various operations
# in TensorFlow
# Declaring Operations
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops
ops.reset_default_graph()
# Open graph session
sess = tf.Session()
# div() vs truediv() vs floordiv()
print(sess.run(tf.div(3,4)))
print(sess.run(tf.truediv(3,4)))
print(sess.run(tf.floordiv(3.0,4.0)))
# Mod function
print(sess.run(tf.mod(22.0,5.0)))
# Cross Product
print(sess.run(tf.cross([1.,0.,0.],[0.,1.,0.])))
# Trig functions
print(sess.run(tf.sin(3.1416)))
print(sess.run(tf.cos(3.1416)))
# Tangent
print(sess.run(tf.div(tf.sin(3.1416/4.), tf.cos(3.1416/4.))))
# Custom operation
test_nums = range(15)
#from tensorflow.python.ops import math_ops
#print(sess.run(tf.equal(test_num, 3)))
def custom_polynomial(x_val):
# Return 3x^2 - x + 10
#return(tf.sub(3 * tf.square(x_val), x_val) + 10)
return(tf.subtract(3 * tf.square(x_val), x_val) + 10)
print(sess.run(custom_polynomial(11)))
# What should we get with list comprehension
expected_output = [3*x*x-x+10 for x in test_nums]
print(expected_output)
# TensorFlow custom function output
for num in test_nums:
print(sess.run(custom_polynomial(num)))
AttributeError: module 'tensorflow' has no attribute 'sub'
0
0.75
0.0
2.0
[ 0. 0. 1.]
-7.23998e-06
-1.0
1.0
362
[10, 12, 20, 34, 54, 80, 112, 150, 194, 244, 300, 362, 430, 504, 584]
10
12
20
34
54
80
112
150
194
244
300
362
430
504
584
6. Activation Functions
This script plots many various activation functions in TensorFlow.
# 06_activation_functions.py
# Activation Functions
#----------------------------------
#
# This function introduces activation
# functions in TensorFlow
# Implementing Activation Functions
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops
ops.reset_default_graph()
# Open graph session
sess = tf.Session()
# X range
x_vals = np.linspace(start=-10., stop=10., num=100)
# ReLU activation
print(sess.run(tf.nn.relu([-3., 3., 10.])))
y_relu = sess.run(tf.nn.relu(x_vals))
# ReLU-6 activation
print(sess.run(tf.nn.relu6([-3., 3., 10.])))
y_relu6 = sess.run(tf.nn.relu6(x_vals))
# Sigmoid activation
print(sess.run(tf.nn.sigmoid([-1., 0., 1.])))
y_sigmoid = sess.run(tf.nn.sigmoid(x_vals))
# Hyper Tangent activation
print(sess.run(tf.nn.tanh([-1., 0., 1.])))
y_tanh = sess.run(tf.nn.tanh(x_vals))
# Softsign activation
print(sess.run(tf.nn.softsign([-1., 0., 1.])))
y_softsign = sess.run(tf.nn.softsign(x_vals))
# Softplus activation
print(sess.run(tf.nn.softplus([-1., 0., 1.])))
y_softplus = sess.run(tf.nn.softplus(x_vals))
# Exponential linear activation
print(sess.run(tf.nn.elu([-1., 0., 1.])))
y_elu = sess.run(tf.nn.elu(x_vals))
# Plot the different functions
plt.plot(x_vals, y_softplus, 'r--', label='Softplus', linewidth=2)
plt.plot(x_vals, y_relu, 'b:', label='ReLU', linewidth=2)
plt.plot(x_vals, y_relu6, 'g-.', label='ReLU6', linewidth=2)
plt.plot(x_vals, y_elu, 'k-', label='ExpLU', linewidth=0.5)
plt.ylim([-1.5,7])
plt.legend(loc='top left')
plt.show()
plt.plot(x_vals, y_sigmoid, 'r--', label='Sigmoid', linewidth=2)
plt.plot(x_vals, y_tanh, 'b:', label='Tanh', linewidth=2)
plt.plot(x_vals, y_softsign, 'g-.', label='Softsign', linewidth=2)
plt.ylim([-2,2])
plt.legend(loc='top left')
plt.show()
[ 0. 3. 10.]
[ 0. 3. 6.]
[ 0.26894143 0.5 0.7310586 ]
[-0.76159418 0. 0.76159418]
[-0.5 0. 0.5]
[ 0.31326166 0.69314718 1.31326163]
[-0.63212055 0. 1. ]
UserWarning: Unrecognized location "top left". Falling back on "best"; valid locations are
upper center
center left
center
lower center
lower right
upper right
lower left
right
upper left
center right
best
six.iterkeys(self.codes))))