-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathNNLayer.m
More file actions
69 lines (62 loc) · 2.53 KB
/
NNLayer.m
File metadata and controls
69 lines (62 loc) · 2.53 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
classdef NNLayer < handle
% Create default neural network layer framework
% Layer Object = NNLayer(input neuron number, output neuron number, activation funciton)
properties (Access=public)
X {mustBeNumeric} % Input data
neuronNum {mustBeNumeric} % Number of neurons
W {mustBeNumeric} % Weight
b {mustBeNumeric} % Bias
h {mustBeNumeric} % Hidden State
actFcnMethod {mustBeMember(actFcnMethod, {'tanh', 'sigmoid', 'none'})}= 'tanh' % Activation funcion method
actFcn = @tanh % Activation function
end
methods
function self = NNLayer(inputSize, outputSize, actFcnMethod)
self.neuronNum = inputSize;
% Currently, only allow single neural in the output layer
if outputSize == 1 %TODO
self.W = rand(1,inputSize);
self.b = zeros(outputSize, 1);
self.actFcnMethod = 'none';
self.actFcn = @(x)x;
elseif nargin > 2
self.actFcnMethod = actFcnMethod;
switch actFcnMethod
case 'tanh'
self.actFcn = @tanh;
case 'sigmoid'
self.actFcn = @(x) 1./(1+e.^x);
case 'none'
self.actFcn = @(x)x;
end
self.W = rand(outputSize, inputSize);
self.b = rand(outputSize,1);
else
self.W = rand(outputSize, inputSize);
self.b = rand(outputSize,1);
end
end
% Forward function
function y = forward(self, x, varargin)
if size(x,1) ~= self.neuronNum
error('Input size must equal to neurons')
end
if isempty(varargin)
self.X = x;
y = self.actFcn(self.W * self.X + self.b);
self.h = self.W * self.X + self.b;
else
switch inputname(2)
case 'X_temp' % Manual input X data
y = self.actFcn(self.W * x + self.b);
case 'W_temp' % Manual input weight data
y = self.actFcn(x * self.X + self.b);
self.h = self.W * self.X + self.b;
case 'b_temp' % Manual input bias data
y = self.actFcn(self.W * self.X + x);
self.h = self.W * self.X + self.b;
end
end
end
end
end