input_list = []
label = []
for i in range(0,256):
# print('{0:08b}'.format(i))
input_list.append([int(x) for x in '{0:08b}'.format(i)])
label.append(['{0:08b}'.format(i).count('1') % 2])
input_list = np.asarray(input_list)
label = np.asarray(label)
#linear function
class Linear:
def __init__(self,m,n):
self.W, self.b = np.random.randn(m, n), np.random.randn(1,n)
self.dW, self.db = None, None
def forward(self, x):
self.x = x
out = np.dot(x, self.W)+self.b
return out
def backward(self, dout):
dx = np.dot(dout, self.W.T)
self.dW = np.dot(self.x.T, dout)
self.db = np.sum(dout, axis=0)
return dx
#relu function
class ReLU:
def __init__(self):
pass
def forward(self, x):
self.mask=(x<=0)
out = x
out[self.mask] = 0
return out
def backward(self, dout):
dx = dout
dx[self.mask] = 0
return dx
#sigmoid function
class Sigmoid:
def __init__(self):
pass
def forward(self, x):
out = 1.0/(1.0+np.exp(-x))
self.o = out
return out
def backward(self, dout):
dx = dout*self.o*(1-self.o)
return dx
#tanh function
class Tanh:
def __init__(self):
pass
def forward(self, x):
# find from website
# out -> kXn : kXn( (exp(x)-exp(-x)) / (exp(x)-exp(-x)) )
out = (np.exp(x) - np.exp(-x)) / (np.exp(x) + np.exp(-x))
self.out = out
return out
def backward(self, dout):
# derivatives of tanh
# dx -> kXn : kXn( dout * ((1-o)**2) )
dx = dout*(1.0-(self.out)**2)
return dx
#loss function
class Loss:
def __init__(self):
pass
# MSE
def forward(self, y, ybar):
self.ybar = ybar
self.y = y
return np.sum((y-ybar)**2)
def backward(self, dout):
dy = -(2*(self.y-self.ybar))
return dy
class TwoLayer:
def __init__(self,m,n,o):
self.linear1, self.linear2 = Linear(m, n), Linear(n, o)
self.act1, self.act2 = Tanh(), Sigmoid()
self.loss = Loss()
self.last_dW1 , self.last_dW2 = 0, 0
self.last_db1 , self.last_db2 = 0, 0
def forward(self, x):
# -- L1 ---------------------
x = self.linear1.forward(x)
x = self.act1.forward(x)
# -- output ---------------------
x = self.linear2.forward(x)
self.ybar = self.act2.forward(x)
return self.ybar
def backward(self, y):
# --output-------------
self.L = self.loss.forward(y, self.ybar)
g = self.loss.backward(1)
# --L2-------------
g = self.act2.backward(g)
g = self.linear2.backward(g)
# --input-------------
g = self.act1.backward(g)
g = self.linear1.backward(g)
def update(self, eta, alpha):
# --L1--
self.linear1.W = self.linear1.W - eta*self.linear1.dW + alpha*self.last_dW1
self.linear1.b = self.linear1.b - eta*self.linear1.db + alpha*self.last_db1
self.last_dW1 = eta*self.linear1.dW
self.last_db1 = eta*self.linear1.db
# --L2--
self.linear2.W = self.linear2.W - eta*self.linear2.dW + alpha*self.last_dW2
self.linear2.b = self.linear2.b - eta*self.linear2.db + alpha*self.last_db2
self.last_dW2 = eta*self.linear2.dW
self.last_db2 = eta*self.linear2.db
class ThreeLayer:
def __init__(self,m,n,o,p):
self.linear1, self.linear2, self.linear3 = Linear(m, n), Linear(n, o), Linear(o, p)
self.act1, self.act2, self.act3=ReLU(), Tanh(), Sigmoid()
self.loss = Loss()
self.last_dW1, self.last_dW2, self.last_dW3 = 0.0, 0.0, 0.0
self.last_db1, self.last_db2, self.last_db3 = 0.0, 0.0, 0.0
def forward(self, x):
# -- L1 -----------------------
x = self.linear1.forward(x)
x = self.act1.forward(x)
# -- L2 -----------------------
x = self.linear2.forward(x)
x = self.act2.forward(x)
# -- output -----------------------
x = self.linear3.forward(x)
self.ybar = self.act3.forward(x)
return self.ybar
def backward(self, y):
# --output--
self.L = self.loss.forward(y, self.ybar)
g = self.loss.backward(1)
# --L3--
g = self.act3.backward(g)
g = self.linear3.backward(g)
# --L2--
g = self.act2.backward(g)
g = self.linear2.backward(g)
# --input--
g = self.act1.backward(g)
g = self.linear1.backward(g)
def update(self, eta, alpha):
# --L1--
self.linear1.W = self.linear1.W - eta*self.linear1.dW + alpha*self.last_dW1
self.linear1.b = self.linear1.b - eta*self.linear1.db + alpha*self.last_db1
self.last_dW1 = eta*self.linear1.dW
self.last_db1 = eta*self.linear1.db
# --L2--
self.linear2.W = self.linear2.W - eta*self.linear2.dW + alpha*self.last_dW2
self.linear2.b = self.linear2.b - eta*self.linear2.db + alpha*self.last_db2
self.last_dW2 = eta*self.linear2.dW
self.last_db2 = eta*self.linear2.db
# --L3--
self.linear3.W = self.linear3.W - eta*self.linear3.dW + alpha*self.last_dW3
self.linear3.b = self.linear3.b - eta*self.linear3.db + alpha*self.last_db3
self.last_dW3 = eta*self.linear3.dW
self.last_db3 = eta*self.linear3.db
#initialize the model
max_epochs, chk_epochs = 15000, 1000
last_dW, last_db = 0.0, 0.0
eta, alpha = 0.003, 0.001
loss_min = []
epoch_min = []
while(keepRetraining >= minLoss):
loss = []
epoch = []
model = TwoLayer(8, 10, 1)
for e in range(max_epochs):
model.forward(input_list)
model.backward(label)
model.update(eta, alpha)
loss.append(model.L)
epoch.append(e)
if (e+1)%chk_epochs==0:
print(model.ybar.T)
print('Epoch %3d: loss=%.6f'%(e+1, model.L))
if(model.L < minLoss):
minLoss = model.L
loss_min = loss
epoch_min = epoch
minOutput = model.ybar.T
fp = open("D:\\Justin\'s_University\\大四上\\深度學習\\HW1\\output\\"+str(minLoss)+".txt", "a")
fp.write(str(minLoss))
fp.close()
fp = open("D:\\Justin\'s_University\\大四上\\深度學習\\HW1\\output\\"+str(minLoss)+"_output.txt", "a")
fp.write(str(minOutput))
fp.close()
plt.style.use("ggplot")
plt.figure()
plt.plot(epoch_min, loss_min, label = "Training_Error")
plt.xlabel("Epochs")
plt.ylabel("Training Error")
plt.legend()
picname = str(minLoss)
plt.savefig("D:\\Justin\'s_University\\大四上\\深度學習\\HW1\\output\\"+picname+".jpg")
print(str(minLoss)+".png is saved")
plt.show()
2 layers
Loss = 1.3197356837354326
3 layers
Loss = 1.0003163010187124
4 layers
Loss = 0.03333781335491803
Loss = 0.018032712342026344
5 layers
ReLU(128)->Tanh(64)->Sigmoid(32)->ReLU(16)->Sigmoid(1)
Loss = 0.0025766262860609504
$0 < \epsilon_1 < 1$ $0 < \epsilon_2 < 1$ $LCSS_{\delta,\epsilon_1,\epsilon2,t,l}(S_p,S_p)$ denoted as $M(S_p,S_q)$ $M(S_p,S_q) = \cases{ 0, \ \ \ \ if\ \ S_p=\emptyset\ \ or\ \ S_q=\emptyset \ \ local+M(S_p-S_{pn},\ S_q-S_{qn}), \ \ \ \ if\ a\ \le \epsilon_1\
Dec 15, 2021Kafka_QuickStart (include Download) step1: download $ tar -xzf kafka_2.13-2.6.0.tgz $ cd kafka_2.13-2.6.0 step2: (1.)Start "ZooKeeper" Server $ bin/zookeeper-server-start.sh config/zookeeper.properties
Jan 6, 2021Ubuntu20.04 Hadoop3.2.1 Spark3.0.0 https://towardsdatascience.com/assembling-a-personal-data-science-big-data-laboratory-in-a-raspberry-pi-4-or-vms-cluster-e4c5a0473025 https://www.linode.com/docs/databases/hadoop/how-to-install-and-set-up-hadoop-cluster/ https://medium.com/@jootorres_11979/how-to-set-up-a-hadoop-3-2-1-multi-node-cluster-on-ubuntu-18-04-2-nodes-567ca44a3b12
Dec 17, 2020Settings [TOC] Master Node sudo gedit /etc/network/interfaces or sudo gedit /etc/networks (testing) #interfaces(5) file used by ifup(8) abd ifdown(8)
Dec 17, 2020or
By clicking below, you agree to our terms of service.
New to HackMD? Sign up