Documenti di Didattica
Documenti di Professioni
Documenti di Cultura
3
Molla Hafizur Rahman
2. Source code:
"""
Created on Tue Mar 5 09:39:38 2019
@author: mhrahman
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#activation function
def sigmoid(X):
return 1/(1 + np.exp(-X))
# Gradient of sigma
def Grad_sigm(X):
return (sigmoid(X)*(1 - sigmoid(X)))
# Backpropagation
def back_prop(Y_pred, Y_real,X,bias,W_1,W_2,H,Z_1, lamb):
X = np.concatenate((bias,X),axis=1)
regu_W_1 = W_1
regu_W_1[:,0] = 0
regu_W_2 = W_2
regu_W_2[:,0] = 0
b_2 = Y_pred - Y_real
b_1 = np.dot(b_2,W_2[:,1:])*Grad_sigm(Z_1)
Del_W2 = 1/len(Y_pred)*(np.dot(b_2.T,H) + lamb*(regu_W_2))
Del_W1 = 1/len(Y_pred)*(np.dot(b_1.T,X) + lamb*(regu_W_1))
return Del_W1, Del_W2
## Back prop check
#
========================================================
=====================
# W1_grad = pd.read_csv('./data/W1_grad.csv',index_col = False, header =
None)
# W2_grad = pd.read_csv('./data/W2_grad.csv',index_col = False, header =
None)
# W1_grad = W1_grad.values
# W2_grad = W2_grad.values
#
========================================================
=====================
final = batch_grad(X,b,Y,initial_W1,initial_W2,3,0.2,500)
plt.plot(final)
plt.title("Loss vs number of iterations")
plt.xlabel("Iteraions")
plt.ylabel("Loss")