From: Chris Troutner on
Just for posterity, here is an example of the use of a custom NN using a tapped delay line.
-Chris Troutner



%=========================================================================>
% CREATE A CUSTOM NEURAL NETWORK

%Create a new network
global net;
net = network;

%2 Inputs
net.numInputs = 2;

%2 Layers
net.numlayers = 2;

%All Layers Biased
net.biasConnect = ones(net.numlayers,1);

%Connect the different inputs to the first set of layers
net.inputConnect(1,1) = 1; %Input 1 to layer 1
net.inputConnect(1,2) = 1; %Input 2 to layer 1

%Interconnect the hidden layers
net.layerConnect(2,1) = 1; %Connect Layer 1 to Layer 2
% net.layerConnect(3,2) = 1; %Connect Layer 2 to Layer 3


%Assign Output Nodes
net.outputConnect = [0 1]; %Set Layer 2 as the output.

%Assign Target Nodes for Training
net.targetConnect = [0 1]; %Set Layer 2 as the target output.

%These ranges are the input min and max values.
net.inputs{1}.range = minmax(SCORE(:,1:20)'); %Range for Input 1
net.inputs{2}.range = minmax(BuySellSig'); %Range for Input 2


%Initialize the network with the initlay function
net.initFcn = 'initlay';


%Set the layer properties for layer 1
net.layers{1}.size = HiddenLayerSize;
net.layers{1}.transferFcn = 'tansig';
net.layers{1}.initFcn = 'initwb';

% %Set the layer properties for layer 2
% net.layers{2}.size = 2;
% net.layers{2}.transferFcn = 'tansig';
% net.layers{2}.initFcn = 'initwb';

%Set the layer properties for layer 2
net.layers{2}.size = 1;
net.layers{2}.transferFcn = 'purelin';
net.layers{2}.initFcn = 'initwb';

%Tapped Delay Lines
%[0 1 2] = u(t), u(t-1), u(t-2) as inputs
net.inputWeights{1,1}.delays = [0 1 2]; %Delays from Input 1 to Layer 1
net.inputWeights{1,2}.delays = [0 1 2]; %Delays from Input 2 to Layer 1


%Set each weight matrix to be random
net.inputWeights{1,1}.initFcn = 'rands'; %Input Weights from Input 1 to Layer 1
net.inputWeights{1,2}.initFcn = 'rands'; %Input Weights from Input 2 to Layer 1


%Initialize each layer weight as random.
net.layerWeights{2, 1}.initFcn = 'rands'; %Layer 1 to Layer 2
% net.layerWeights{3, 2}.initFcn = 'rands'; %Layer 2 to Layer 3

%Set each Bias weight to be random.
net.biases{:}.initFcn = 'rands'; %Biases


%Network Performance Function
net.performFcn = 'mse'; %mse = mean squared error

%Set the network training parameters
net.trainFcn = 'traingdx';
net.trainParam.goal = 0.00001;
net.trainParam.epochs = 500;
net.trainParam.show = NaN; %NaN means don't show the graph
net.trainParam.min_grad = 0;
%net.trainParam.mu_max = inf;
net.trainParam.max_fail = 3; %Validation error
net.trainParam.time = 2*60; %2 minutes per session


% clear('TrainingInput', 'TrainingOutput', 'ValidationInput', ...
% 'ValidationOutput', 'TestingInput', 'TestingOutput', ...
% 'SCORE', 'BuySellSig');
% pack;
%<=========================================================================