-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathactivationFunction.py
37 lines (33 loc) · 1.27 KB
/
activationFunction.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
__author__ = 'shay-macbook'
from numpy import *
class ActivationFunction:
def __init__(self, activationFunction):
self.activationFunction = activationFunction
if activationFunction != 'tanh' and activationFunction != 'sigmoid' and activationFunction != 'relu' and activationFunction != 'linear':
print 'Errror! no activation function found'
def activate(self,input):
if (self.activationFunction == 'tanh'):
output = tanh(input)
elif (self.activationFunction == 'sigmoid'):
output = 1 / (1 + exp(-input))
elif (self.activationFunction == 'linear'):
output = input
elif (self.activationFunction == 'relu'):
output = max(0, input)
else:
print 'error'
return output
def derivative(self, input):
if (self.activationFunction == 'tanh'):
x = tanh(input)
output = 1 - x * x
elif (self.activationFunction == 'sigmoid'):
x = 1 / (1 + exp(-input))
output = x * (1 - x)
elif (self.activationFunction == 'linear'):
output = input
elif (self.activationFunction == 'relu'):
output = 1. * (input > 0)
else:
print 'error'
return output