Home > SuperSegger > segmentation > houseNeuralSimulation.m

houseNeuralSimulation

PURPOSE ^

houseNeuralSimulation : calculates output for neural network.

SYNOPSIS ^

function [Y,Xf,Af] = houseNeuralSimulation(X,net)

DESCRIPTION ^

 houseNeuralSimulation : calculates output for neural network.
 Improvement in speed from using net (X).

 Generated by Neural Network Toolbox function genFunction, 14-Mar-2016 14:57:51.
 
 [Y] = houseFcn(X,~,~) takes these arguments:
 
   X = 1xTS cell, 1 inputs over TS timesteps
   Each X{1,ts} = 21xQ matrix, input #1 at timestep ts.
 
 and returns:
   Y = 1xTS cell of 1 outputs over TS timesteps.
   Each Y{1,ts} = 2xQ matrix, output #1 at timestep ts.
 
 where Q is number of samples (or series) and TS is the number of timesteps.
 This file is part of SuperSeggerOpti.

CROSS-REFERENCE INFORMATION ^

This function calls: This function is called by:

SUBFUNCTIONS ^

SOURCE CODE ^

0001 function [Y,Xf,Af] = houseNeuralSimulation(X,net)
0002 % houseNeuralSimulation : calculates output for neural network.
0003 % Improvement in speed from using net (X).
0004 %
0005 % Generated by Neural Network Toolbox function genFunction, 14-Mar-2016 14:57:51.
0006 %
0007 % [Y] = houseFcn(X,~,~) takes these arguments:
0008 %
0009 %   X = 1xTS cell, 1 inputs over TS timesteps
0010 %   Each X{1,ts} = 21xQ matrix, input #1 at timestep ts.
0011 %
0012 % and returns:
0013 %   Y = 1xTS cell of 1 outputs over TS timesteps.
0014 %   Each Y{1,ts} = 2xQ matrix, output #1 at timestep ts.
0015 %
0016 % where Q is number of samples (or series) and TS is the number of timesteps.
0017 % This file is part of SuperSeggerOpti.
0018 
0019 
0020 % ===== NEURAL NETWORK CONSTANTS =====
0021 
0022 % Input 1
0023 if isfield(net.input.processSettings{1},'xoffset')
0024     x1_step1_xoffset = net.input.processSettings{1}.xoffset;
0025 else
0026     disp('no net offset');
0027     x1_step1_xoffset =0 ;
0028 end
0029 
0030 if isfield(net.input.processSettings{1},'gain')
0031     x1_step1_gain = net.input.processSettings{1}.gain;
0032 else
0033     disp('no net gain');
0034     x1_step1_gain =0 ;
0035 end
0036 
0037 if isfield(net.input.processSettings{1},'ymin')
0038     x1_step1_gain = net.input.processSettings{1}.ymin;
0039 else
0040     disp('no net ymin');
0041     x1_step1_ymin =0 ;
0042 end
0043 
0044 % Layer 1
0045 b1 = net.b{1};
0046 IW1_1 = net.IW{1};
0047 
0048 % Layer 2
0049 b2 = net.b{2};
0050 LW2_1 = net.LW{2};
0051 
0052 % ===== SIMULATION ========
0053 
0054 if isfield(net.input.processSettings{1},'keep')
0055     keep = net.input.processSettings{1}.keep;
0056     X = X(keep,:);
0057 end
0058 
0059 
0060 % Format Input Arguments
0061 isCellX = iscell(X);
0062 if ~isCellX, X = {X}; end;
0063 
0064 % Dimensions
0065 TS = size(X,2); % timesteps
0066 if ~isempty(X)
0067     Q = size(X{1},2); % samples/series
0068 else
0069     Q = 0;
0070 end
0071 
0072 % Allocate Outputs
0073 Y = cell(1,TS);
0074 
0075 
0076 
0077 
0078 % Time loop
0079 for ts=1:TS
0080 
0081     % Input 1
0082     Xp1 = mapminmax_apply(X{1,ts},x1_step1_gain,x1_step1_xoffset,x1_step1_ymin);
0083     
0084     % Layer 1
0085     a1 = tansig_apply(repmat(b1,1,Q) + IW1_1*Xp1);
0086     
0087     % Layer 2
0088     a2 = softmax_apply(repmat(b2,1,Q) + LW2_1*a1);
0089     
0090     % Output 1
0091     Y{1,ts} = a2;
0092 end
0093 
0094 % Final Delay States
0095 Xf = cell(1,0);
0096 Af = cell(2,0);
0097 
0098 % Format Output Arguments
0099 if ~isCellX, Y = cell2mat(Y); end
0100 end
0101 
0102 % ===== MODULE FUNCTIONS ========
0103 
0104 % Map Minimum and Maximum Input Processing Function
0105 function y = mapminmax_apply(x,settings_gain,settings_xoffset,settings_ymin)
0106 y = bsxfun(@minus,x,settings_xoffset);
0107 y = bsxfun(@times,y,settings_gain);
0108 y = bsxfun(@plus,y,settings_ymin);
0109 end
0110 
0111 % Competitive Soft Transfer Function
0112 function a = softmax_apply(n)
0113 nmax = max(n,[],1);
0114 n = bsxfun(@minus,n,nmax);
0115 numer = exp(n);
0116 denom = sum(numer,1); 
0117 denom(denom == 0) = 1;
0118 a = bsxfun(@rdivide,numer,denom);
0119 end
0120 
0121 % Sigmoid Symmetric Transfer Function
0122 function a = tansig_apply(n)
0123 a = 2 ./ (1 + exp(-2*n)) - 1;
0124 end

Generated on Thu 19-Jan-2017 13:55:21 by m2html © 2005