Activation Functions
In [1]:
import tensorflow.compat.v1 as tf
     import matplotlib.pyplot as plt
     import numpy as np
     from tensorflow.python.framework import ops
     ops.reset_default_graph()
     tf.disable_eager_execution()
     sess = tf.Session()
     
/home/ma-user/anaconda3/envs/TensorFlow-2.1.0/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
       from ._conv import register_converters as _register_converters
     
In [2]:
x_vals = np.linspace(start=-10,stop=10,num=100)
     print(sess.run(tf.nn.relu([-3.,3.,10.])))
     y_relu = sess.run(tf.nn.relu(x_vals))
     
[ 0.  3. 10.]
     
In [3]:
plt.plot(x_vals, y_relu, 'b:', label='ReLU', linewidth=2)
     plt.ylim([-5,11])
     plt.legend(loc='upper left')
     plt.show()
     
In [4]:
x_vals = np.linspace(start=-10,stop=10,num=100)
     print(sess.run(tf.nn.relu6([-3.,3.,10.])))
     y_relu6 = sess.run(tf.nn.relu6(x_vals))
     
[0. 3. 6.]
     
In [6]:
plt.plot(x_vals, y_relu6, 'g-.', label='ReLU6', linewidth=2)
     plt.ylim([-5,11])
     plt.legend(loc='upper left')
     plt.show()
     
In [7]:
x_vals = np.linspace(start=-10,stop=10,num=100)
     print(sess.run(tf.nn.sigmoid([-1.,0.,1.])))
     y_sigmoid = sess.run(tf.nn.sigmoid(x_vals))
     
[0.26894143 0.5        0.7310586 ]
     
In [8]:
plt.plot(x_vals, y_sigmoid, 'y-..', label='Sigmoid', linewidth=2)
     plt.ylim([0,1])
     plt.legend(loc='upper left')
     plt.show()
     
In [9]:
x_vals = np.linspace(start=-10,stop=10,num=100)
     print(sess.run(tf.nn.tanh([-1.,0.,1.])))
     y_tanh = sess.run(tf.nn.tanh(x_vals))
     
[-0.7615942  0.         0.7615942]
     
In [10]:
plt.plot(x_vals, y_tanh, 'b:', label='Tanh', linewidth=2)
     plt.ylim([-2,2])
     plt.legend(loc='upper left')
     plt.show()
     
In [11]:
x_vals = np.linspace(start=-10,stop=10,num=100)
     print(sess.run(tf.nn.softsign([-1.,0.,1.])))
     y_softsign = sess.run(tf.nn.softsign(x_vals))
     
[-0.5  0.   0.5]
     
In [13]:
plt.plot(x_vals, y_softsign, 'g-.', label='Softsign', linewidth=2)
     plt.ylim([-1,1])
     plt.legend(loc='upper left')
     plt.show()
     
In [18]:
x_vals = np.linspace(start=-10,stop=15,num=100)
     print(sess.run(tf.nn.softplus([-1.,0.,1.])))
     y_softplus = sess.run(tf.nn.softplus(x_vals))
     
[0.31326166 0.6931472  1.3132616 ]
     
In [19]:
plt.plot(x_vals, y_softplus, 'r--', label='Softplus', linewidth=2)
     plt.ylim([-2,15])
     plt.legend(loc='upper left')
     plt.show()
     
In [20]:
x_vals = np.linspace(start=-10,stop=10,num=100)
     print(sess.run(tf.nn.elu([-1., 0., 1.])))
     y_elu = sess.run(tf.nn.elu(x_vals))
     
[-0.63212055  0.          1.        ]
     
In [22]:
plt.plot(x_vals, y_elu, 'k-', label='ExpLU', linewidth=0.5)
     plt.ylim([-2,10])
     plt.legend(loc='upper left')
     plt.show()
     
In [12]:
plt.plot(x_vals, y_softplus, 'r--', label='Softplus', linewidth=2)
     plt.plot(x_vals, y_relu, 'b:', label='ReLU', linewidth=2)
     plt.plot(x_vals, y_relu6, 'g-.', label='ReLU6', linewidth=2)
     plt.plot(x_vals, y_elu, 'k-', label='ExpLU', linewidth=0.5)
     plt.ylim([-5,11])
     plt.legend(loc='upper left')
     plt.show()
     
In [11]:
plt.plot(x_vals, y_sigmoid, 'r--', label='Sigmoid', linewidth=2)
     plt.plot(x_vals, y_tanh, 'b:', label='Tanh', linewidth=2)
     plt.plot(x_vals, y_softsign, 'g-.', label='Softsign', linewidth=2)
     plt.ylim([-2,2])
     plt.legend(loc='upper left')
     plt.show()
     
In [ ]: