From: <sch...@us...> - 2009-08-04 09:45:07
|
Revision: 6074 http://octave.svn.sourceforge.net/octave/?rev=6074&view=rev Author: schloegl Date: 2009-08-04 09:44:49 +0000 (Tue, 04 Aug 2009) Log Message: ----------- bug fix Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2009-08-04 05:01:52 UTC (rev 6073) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2009-08-04 09:44:49 UTC (rev 6074) @@ -624,7 +624,7 @@ if strncmpi(MODE.TYPE,'LD',2) || strncmpi(MODE.TYPE,'FDA',3), %if NC(1)==2, NC(1)=1; end; % linear two class problem needs only one discriminant - CC.weights = repmat(NaN,NC(2),NC(1)); % memory allocation + CC.weights = repmat(NaN,NC(2),NC(3)); % memory allocation type = MODE.TYPE(3)-'0'; ECM0 = squeeze(sum(ECM,3)); %decompose ECM This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sch...@us...> - 2009-08-05 19:47:37
|
Revision: 6078 http://octave.svn.sourceforge.net/octave/?rev=6078&view=rev Author: schloegl Date: 2009-08-05 19:47:24 +0000 (Wed, 05 Aug 2009) Log Message: ----------- add PLA and Winnow algorithm Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2009-08-05 09:11:41 UTC (rev 6077) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2009-08-05 19:47:24 UTC (rev 6078) @@ -44,6 +44,12 @@ % 'NBC' Naive Bayesian Classifier [6] % 'aNBC' Augmented Naive Bayesian Classifier [6] % 'NBPW' Naive Bayesian Parzen Window [9] +% +% 'PLA' Perceptron Learning Algorithm [11] +% MODE.hyperparameter.alpha = +% w = w + alpha * e'*x +% 'Winnow2' Winnow2 algorithm [12] +% % 'PSVM' Proximal SVM [8] % MODE.hyperparameter.nu (default: 1.0) % 'LPM' Linear Programming Machine @@ -115,7 +121,11 @@ % [10] R.-E. Fan, K.-W. Chang, C.-J. Hsieh, X.-R. Wang, and C.-J. Lin. % LIBLINEAR: A Library for Large Linear Classification, Journal of Machine Learning Research 9(2008), 1871-1874. % Software available at http://www.csie.ntu.edu.tw/~cjlin/liblinear - +% [11] http://en.wikipedia.org/wiki/Perceptron#Learning_algorithm +% [12] Littlestone, N. (1988) +% "Learning Quickly When Irrelevant Attributes Abound: A New Linear-threshold Algorithm" +% Machine Learning 285-318(2) +% http://en.wikipedia.org/wiki/Winnow_(algorithm) % $Id: train_sc.m 2140 2009-07-02 12:03:55Z schloegl $ % Copyright (C) 2005,2006,2007,2008,2009 by Alois Schloegl <a.s...@ie...> @@ -244,7 +254,54 @@ CC.hyperparameter.c_value = MODE.hyperparameter.c_value; CC.datatype = ['classifier:',lower(MODE.TYPE)]; + +elseif ~isempty(strfind(lower(MODE.TYPE),'pla')) + % Perceptron Learning Algorithm + + M = length(CC.Labels); + CC.weights = zeros(size(D,2)+1,M); + if ~isfield(MODE.hyperparameter,'alpha') && isempty(W) + for k = 1:size(D,1), + e = [1, D(k,:)] * CC.weights - (classlabel(k)==CC.Labels(k)); + CC.weights = CC.weights + [1,D(k,:)]' * e ; + end; + + elseif isfield(MODE.hyperparameter,'alpha') && isempty(W) + a = MODE.hyperparameter.alpha; + for k = 1:size(D,1), + e = [1, D(k,:)] * CC.weights - (classlabel(k)==CC.Labels(k)); + CC.weights = CC.weights + a * [1,D(k,:)]' * e ; + end; + + elseif ~isempty(W) + if isfield(MODE.hyperparameter,'alpha') + W = W*MODE.hyperparameter.alpha; + end; + for k = 1:size(D,1), + e = [1, D(k,:)] * CC.weights - (classlabel(k)==CC.Labels(k)); + CC.weights = CC.weights + W(k) * [1,D(k,:)]' * e ; + end; + end + CC.datatype = ['classifier:',lower(MODE.TYPE)]; + + +elseif ~isempty(strfind(lower(MODE.TYPE),'winnow')) + % winnow algorithm + + M = length(CC.Labels); + CC.weights = ones(size(D,2),M); + theta = size(D,2)/2; + + for k = 1:size(D,1), + e = (1 + sign(D(k,:) * CC.weights - theta))/2 - (classlabel(k)==CC.Labels(k)); + CC.weights = CC.weights.* 2^(D(k,:)' * e); + end; + + CC.weights = [zeros(1,M), CC.weights]; + CC.datatype = ['classifier:',lower(MODE.TYPE)]; + + elseif ~isempty(strfind(lower(MODE.TYPE),'pls')) || ~isempty(strfind(lower(MODE.TYPE),'reg')) % 4th version: support for weighted samples - work well with unequally distributed data: % regression analysis, can handle sparse data, too. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sch...@us...> - 2009-08-29 23:17:45
|
Revision: 6172 http://octave.svn.sourceforge.net/octave/?rev=6172&view=rev Author: schloegl Date: 2009-08-29 23:17:25 +0000 (Sat, 29 Aug 2009) Log Message: ----------- PSVM: support weighting samples Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2009-08-29 21:44:44 UTC (rev 6171) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2009-08-29 23:17:25 UTC (rev 6172) @@ -491,9 +491,12 @@ elseif ~isempty(strfind(lower(MODE.TYPE),'psvm')) if ~isempty(W) - error(sprintf('Error TRAIN_SC: Classifier (%s) does not support weighted samples.',MODE.TYPE)); + %%% error(sprintf('Error TRAIN_SC: Classifier (%s) does not support weighted samples.',MODE.TYPE)); + warning(sprintf('Warning TRAIN_SC: Classifier (%s) in combination with weighted samples is not tested.',MODE.TYPE)); end; - if isfield(MODE.hyperparameters,'nu') + if ~isfield(MODE,'hyperparameters') + nu = 1; + elseif isfield(MODE.hyperparameters,'nu') nu = MODE.hyperparameter.nu; else nu = 1; @@ -503,10 +506,14 @@ for k = 1:length(CC.Labels), d = sparse(1:m,1:m,(classlabel==CC.Labels(k))*2-1); H = d * [-ones(m,1),D]; - r = sum(H,1)'; - r = (speye(n+1)/nu + H' * H)\r; %solve (I/nu+H’*H)r=H’*e + %%% r = sum(H,1)'; + r = sumskipnan(H,1,W)'; + %%% r = (speye(n+1)/nu + H' * H)\r; %solve (I/nu+H’*H)r=H’*e + [HTH, nn] = covm(H,H,'M',W); + r = (speye(n+1)/nu + HTH)\r; %solve (I/nu+H’*H)r=H’*e u = nu*(1-(H*r)); - CC.weights(:,k) = u'*H; + %%% CC.weights(:,k) = u'*H; + [CC.weights(:,k),nn] = covm(u,H,'M',W); end; CC.hyperparameter.nu = nu; CC.datatype = ['classifier:',lower(MODE.TYPE)]; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sch...@us...> - 2010-01-27 19:26:52
|
Revision: 6803 http://octave.svn.sourceforge.net/octave/?rev=6803&view=rev Author: schloegl Date: 2010-01-27 19:26:32 +0000 (Wed, 27 Jan 2010) Log Message: ----------- several LDA improvements: ill-defined covariances better supported, a litte speed-up Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2010-01-27 06:38:42 UTC (rev 6802) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2010-01-27 19:26:32 UTC (rev 6803) @@ -705,7 +705,7 @@ else t = 0; if length(MODE.TYPE)>7, t=MODE.TYPE(8)-'0'; end; - if (t<0 || t>4) t=0; end; + if (t<0 || t>6) t=0; end; CC.options = sprintf('-s %i -B 1 -c %f -q',t, MODE.hyperparameter.c_value); % C-SVC, C=1, linear kernel, degree = 1, end; model = train(W, cl, sparse(D), CC.options); % C-SVC, C=1, linear kernel, degree = 1, @@ -801,7 +801,7 @@ end; ECM = CC.MD./CC.NN; - NC = size(ECM); + NC = size(CC.MD); if strncmpi(MODE.TYPE,'LD',2) || strncmpi(MODE.TYPE,'FDA',3) || strncmpi(MODE.TYPE,'FLDA',3), %if NC(1)==2, NC(1)=1; end; % linear two class problem needs only one discriminant @@ -809,32 +809,28 @@ type = MODE.TYPE(3)-'0'; ECM0 = squeeze(sum(ECM,3)); %decompose ECM - [M0,sd,COV0] = decovm(ECM0); for k = 1:NC(3); - ecm = squeeze(ECM(:,:,k)); - [M1,sd,COV1] = decovm(ECM0-ecm); - N1 = ECM0(1,1)-ecm(1,1); - [M2,sd,COV2] = decovm(ecm); - N2 = ecm(1,1); + ix = [1:k-1,k+1:NC(3)]; + dM = CC.MD(:,1,k)./CC.NN(:,1,k) - sum(CC.MD(:,1,ix),3)./sum(CC.NN(:,1,ix),3); switch (type) case 2 % LD2 - cov = (COV1+COV2)/2; + ecm0 = (sum(ECM(:,:,ix),3)/(NC(3)-1) + ECM(:,:,k)); case 4 % LD4 - cov = (COV1*N1+COV2*N2)/(N1+N2); + ecm0 = 2*(sum(ECM(:,:,ix),3) + ECM(:,:,k))/NC(3); + % ecm0 = sum(CC.MD,3)./sum(CC.NN,3); case 5 % LD5 - cov = COV2; + ecm0 = ECM(:,:,k); case 6 % LD6 - cov = COV1; + ecm0 = sum(CC.MD(:,:,ix),3)./sum(CC.NN(:,:,ix),3); otherwise % LD3, LDA, FDA - cov = COV0; + ecm0 = ECM0; end if isfield(MODE.hyperparameter,'gamma') - cov = cov + mean(diag(cov))*eye(size(cov))*MODE.hyperparameter.gamma; - end + ecm0 = ecm0 + mean(diag(ecm0))*eye(size(ecm0))*MODE.hyperparameter.gamma; + end - w = cov\(M2-M1)'; - w0 = -M0*w; - CC.weights(:,k) = [w0; w]; + CC.weights(:,k) = ecm0\dM; + end; %CC.weights = sparse(CC.weights); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sch...@us...> - 2010-08-12 21:02:30
|
Revision: 7518 http://octave.svn.sourceforge.net/octave/?rev=7518&view=rev Author: schloegl Date: 2010-08-12 21:02:22 +0000 (Thu, 12 Aug 2010) Log Message: ----------- fix for single column classlabel with elements {-1,1} Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2010-08-12 19:24:49 UTC (rev 7517) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2010-08-12 21:02:22 UTC (rev 7518) @@ -784,7 +784,7 @@ else % Linear and Quadratic statistical classifiers CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; - [classlabel,CC.Labels] = CL1M(classlabel); + [classlabel,CC.Labels] = CL1M(classlabel); CC.MD = repmat(NaN,[sz(2)+[1,1],length(CC.Labels)]); CC.NN = CC.MD; for k = 1:length(CC.Labels), @@ -924,21 +924,22 @@ %% convert classlabels to 1..M encoding if (all(classlabel>=0) && all(classlabel==fix(classlabel)) && (size(classlabel,2)==1)) cl1m = classlabel; - + elseif all((classlabel==1) | (classlabel==-1) | (classlabel==0) ) CL101 = classlabel; M = size(classlabel,2); if any(sum(classlabel==1,2)>1) warning('invalid format of classlabel - at most one category may have +1'); end; - [tmp, cl1m] = max(classlabel,[],2); if (M==1), cl1m = (classlabel==-1) + 2*(classlabel==+1); + else + [tmp, cl1m] = max(classlabel,[],2); + if any(tmp ~= 1) + warning('some class might not be properly represented - you might what to add another column to classlabel = [max(classlabel,[],2)<1,classlabel]'); + end; + cl1m(tmp<1)= 0; %% or NaN ??? end; - if any(tmp ~= 1) - warning('some class might not be properly represented - you might what to add another column to classlabel = [max(classlabel,[],2)<1,classlabel]'); - end; - cl1m(tmp<1)= 0; %% or NaN ??? else classlabel error('format of classlabel unsupported'); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sch...@us...> - 2010-09-19 21:33:37
|
Revision: 7756 http://octave.svn.sourceforge.net/octave/?rev=7756&view=rev Author: schloegl Date: 2010-09-19 21:33:31 +0000 (Sun, 19 Sep 2010) Log Message: ----------- remove obsolete test for weight vector in LibLinear Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2010-09-19 20:15:24 UTC (rev 7755) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2010-09-19 21:33:31 UTC (rev 7756) @@ -619,10 +619,6 @@ elseif ~isempty(strfind(lower(MODE.TYPE),'svm:lin4')) - if ~isempty(W) -% error('Classifier (%s) does not support weighted samples.',MODE.TYPE); - end; - if ~isfield(MODE.hyperparameter,'c_value') MODE.hyperparameter.c_value = 1; end @@ -677,10 +673,6 @@ error('No SVM training algorithm available. Install OSV-SVM, or LOO-SVM, or libSVM for Matlab.\n'); end; - if ~strncmp(MODE.TYPE, 'SVM:LIN',7) && ~isempty(W) - error('Classifier (%s) does not support weighted samples.',MODE.TYPE); - end; - %%CC = train_svm(D,classlabel,MODE); [CL101,CC.Labels] = cl101(classlabel); M = size(CL101,2); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sch...@us...> - 2010-09-27 07:55:18
|
Revision: 7776 http://octave.svn.sourceforge.net/octave/?rev=7776&view=rev Author: schloegl Date: 2010-09-27 07:55:11 +0000 (Mon, 27 Sep 2010) Log Message: ----------- fix SVM for data with missing values (deletion mode) Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2010-09-26 13:01:00 UTC (rev 7775) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2010-09-27 07:55:11 UTC (rev 7776) @@ -686,7 +686,7 @@ s(1,:) = -m.*r; for k = 1:M, - cl = CL101(:,k); + cl = CL101(rix,k); if strncmp(MODE.TYPE, 'SVM:LIN',7); if isfield(MODE,'options') CC.options = MODE.options; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sch...@us...> - 2011-06-06 14:20:27
|
Revision: 8314 http://octave.svn.sourceforge.net/octave/?rev=8314&view=rev Author: schloegl Date: 2011-06-06 14:20:21 +0000 (Mon, 06 Jun 2011) Log Message: ----------- change default settings for usage of bioinfo-tb based on user feedback Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2011-06-06 10:31:56 UTC (rev 8313) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2011-06-06 14:20:21 UTC (rev 8314) @@ -714,7 +714,10 @@ Bias = model.rho * cl(1); elseif strcmp(MODE.TYPE, 'SVM:bioinfo'); - CC.SVMstruct = svmtrain(D, cl,'AUTOSCALE', 0); % + % SVM classifier from bioinformatics toolbox. + % Settings suggested by Ian Daly, 2011-06-06 + options = optimset('Display','iter','maxiter',20000, 'largescale','off'); + CC.SVMstruct = svmtrain(D, cl, 'AUTOSCALE', 0, 'quadprog_opts', options, 'Method', 'LS', 'kernel_function', 'polynomial'); Bias = -CC.SVMstruct.Bias; w = -CC.SVMstruct.Alpha'*CC.SVMstruct.SupportVectors; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <car...@us...> - 2011-11-15 16:07:21
|
Revision: 9106 http://octave.svn.sourceforge.net/octave/?rev=9106&view=rev Author: carandraug Date: 2011-11-15 16:07:10 +0000 (Tue, 15 Nov 2011) Log Message: ----------- train_sc: replaced nested subfunction by subfunctions to avoid warning during installation, replaced spaces per tabs and removed trailing whitespace Modified Paths: -------------- trunk/octave-forge/extra/NaN/inst/train_sc.m Modified: trunk/octave-forge/extra/NaN/inst/train_sc.m =================================================================== --- trunk/octave-forge/extra/NaN/inst/train_sc.m 2011-11-15 15:55:00 UTC (rev 9105) +++ trunk/octave-forge/extra/NaN/inst/train_sc.m 2011-11-15 16:07:10 UTC (rev 9106) @@ -160,787 +160,782 @@ % along with this program; if not, write to the Free Software % Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA 02110-1301, USA. -if nargin<2, - error('insufficient input arguments\n\tusage: train_sc(D,C,...)\n'); -end; -if nargin<3, MODE = 'LDA'; end; -if nargin<4, W = []; end; -if ischar(MODE) - tmp = MODE; - clear MODE; - MODE.TYPE = tmp; -elseif ~isfield(MODE,'TYPE') - MODE.TYPE=''; -end; + if nargin<2, + error('insufficient input arguments\n\tusage: train_sc(D,C,...)\n'); + end + if nargin<3, MODE = 'LDA'; end + if nargin<4, W = []; end + if ischar(MODE) + tmp = MODE; + clear MODE; + MODE.TYPE = tmp; + elseif ~isfield(MODE,'TYPE') + MODE.TYPE=''; + end -if isfield(MODE,'hyperparameters') && ~isfield(MODE,'hyperparameter'), - %% for backwards compatibility, this might become obsolete - warning('MODE.hyperparameters are used, You should use MODE.hyperparameter instead!!!'); - MODE.hyperparameter = MODE.hyperparameters; -end; + if isfield(MODE,'hyperparameters') && ~isfield(MODE,'hyperparameter'), + %% for backwards compatibility, this might become obsolete + warning('MODE.hyperparameters are used, You should use MODE.hyperparameter instead!!!'); + MODE.hyperparameter = MODE.hyperparameters; + end -sz = size(D); -if sz(1)~=size(classlabel,1), - error('length of data and classlabel does not fit'); -end; + sz = size(D); + if sz(1)~=size(classlabel,1), + error('length of data and classlabel does not fit'); + end -% remove all NaN's -if 1, - % several classifier can deal with NaN's, there is no need to remove them. -elseif isempty(W) - %% TODO: some classifiers can deal with NaN's in D. Test whether this can be relaxed. - %ix = any(isnan([classlabel]),2); - ix = any(isnan([D,classlabel]),2); - D(ix,:) = []; - classlabel(ix,:)=[]; - W = []; -else - %ix = any(isnan([classlabel]),2); - ix = any(isnan([D,classlabel]),2); - D(ix,:)=[]; - classlabel(ix,:)=[]; - W(ix,:)=[]; - warning('support for weighting of samples is still experimental'); -end; + % remove all NaN's + if 1, + % several classifier can deal with NaN's, there is no need to remove them. + elseif isempty(W) + %% TODO: some classifiers can deal with NaN's in D. Test whether this can be relaxed. + %ix = any(isnan([classlabel]),2); + ix = any(isnan([D,classlabel]),2); + D(ix,:) = []; + classlabel(ix,:)=[]; + W = []; + else + %ix = any(isnan([classlabel]),2); + ix = any(isnan([D,classlabel]),2); + D(ix,:)=[]; + classlabel(ix,:)=[]; + W(ix,:)=[]; + warning('support for weighting of samples is still experimental'); + end -sz = size(D); -if sz(1)~=length(classlabel), - error('length of data and classlabel does not fit'); -end; -if ~isfield(MODE,'hyperparameter') - MODE.hyperparameter = []; -end + sz = size(D); + if sz(1)~=length(classlabel), + error('length of data and classlabel does not fit'); + end + if ~isfield(MODE,'hyperparameter') + MODE.hyperparameter = []; + end -if 0, - ; -elseif ~isempty(strfind(lower(MODE.TYPE),'/delet')) - POS1 = find(MODE.TYPE=='/'); - [rix,cix] = row_col_deletion(D); - if ~isempty(W), W=W(rix); end; - CC = train_sc(D(rix,cix),classlabel(rix,:),MODE.TYPE(1:POS1(1)-1),W); - CC.G = sparse(cix, 1:length(cix), 1, size(D,2), length(cix)); - if isfield(CC,'weights') - W = [CC.weights(1,:); CC.weights(2:end,:)]; - CC.weights = sparse(size(D,2)+1, size(W,2)); - CC.weights([1,cix+1],:) = W; - CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; - else - CC.datatype = [CC.datatype,'/delet']; - end; - -elseif ~isempty(strfind(lower(MODE.TYPE),'nbpw')) - error('NBPW not implemented yet') - %%%% Naive Bayesian Parzen Window Classifier. - [classlabel,CC.Labels] = CL1M(classlabel); - for k = 1:length(CC.Labels), - [d,CC.MEAN(k,:)] = center(D(classlabel==CC.Labels(k),:),1); - [CC.VAR(k,:),CC.N(k,:)] = sumskipnan(d.^2,1); - h2_opt = (4./(3*CC.N(k,:))).^(2/5).*CC.VAR(k,:); - %%% TODO - end; + if 0, + ; + elseif ~isempty(strfind(lower(MODE.TYPE),'/delet')) + POS1 = find(MODE.TYPE=='/'); + [rix,cix] = row_col_deletion(D); + if ~isempty(W), W=W(rix); end + CC = train_sc(D(rix,cix),classlabel(rix,:),MODE.TYPE(1:POS1(1)-1),W); + CC.G = sparse(cix, 1:length(cix), 1, size(D,2), length(cix)); + if isfield(CC,'weights') + W = [CC.weights(1,:); CC.weights(2:end,:)]; + CC.weights = sparse(size(D,2)+1, size(W,2)); + CC.weights([1,cix+1],:) = W; + CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; + else + CC.datatype = [CC.datatype,'/delet']; + end + elseif ~isempty(strfind(lower(MODE.TYPE),'nbpw')) + error('NBPW not implemented yet') + %%%% Naive Bayesian Parzen Window Classifier. + [classlabel,CC.Labels] = CL1M(classlabel); + for k = 1:length(CC.Labels), + [d,CC.MEAN(k,:)] = center(D(classlabel==CC.Labels(k),:),1); + [CC.VAR(k,:),CC.N(k,:)] = sumskipnan(d.^2,1); + h2_opt = (4./(3*CC.N(k,:))).^(2/5).*CC.VAR(k,:); + %%% TODO + end -elseif ~isempty(strfind(lower(MODE.TYPE),'nbc')) - %%%% Naive Bayesian Classifier. - if ~isempty(strfind(lower(MODE.TYPE),'anbc')) - %%%% Augmented Naive Bayesian classifier. - [CC.V,L] = eig(covm(D,'M',W)); - D = D*CC.V; - else - CC.V = eye(size(D,2)); - end; - [classlabel,CC.Labels] = CL1M(classlabel); - for k = 1:length(CC.Labels), - ix = classlabel==CC.Labels(k); - %% [d,CC.MEAN(k,:)] = center(D(ix,:),1); - if ~isempty(W) - [s,n] = sumskipnan(D(ix,:),1,W(ix)); - CC.MEAN(k,:) = s./n; - d = D(ix,:) - CC.MEAN(repmat(k,sum(ix),1),:); - [CC.VAR(k,:),CC.N(k,:)] = sumskipnan(d.^2,1,W(ix)); - else - [s,n] = sumskipnan(D(ix,:),1); - CC.MEAN(k,:) = s./n; - d = D(ix,:) - CC.MEAN(repmat(k,sum(ix),1),:); - [CC.VAR(k,:),CC.N(k,:)] = sumskipnan(d.^2,1); - end - end; - CC.VAR = CC.VAR./max(CC.N-1,0); - CC.datatype = ['classifier:',lower(MODE.TYPE)]; + elseif ~isempty(strfind(lower(MODE.TYPE),'nbc')) + %%%% Naive Bayesian Classifier + if ~isempty(strfind(lower(MODE.TYPE),'anbc')) + %%%% Augmented Naive Bayesian classifier. + [CC.V,L] = eig(covm(D,'M',W)); + D = D*CC.V; + else + CC.V = eye(size(D,2)); + end + [classlabel,CC.Labels] = CL1M(classlabel); + for k = 1:length(CC.Labels), + ix = classlabel==CC.Labels(k); + %% [d,CC.MEAN(k,:)] = center(D(ix,:),1); + if ~isempty(W) + [s,n] = sumskipnan(D(ix,:),1,W(ix)); + CC.MEAN(k,:) = s./n; + d = D(ix,:) - CC.MEAN(repmat(k,sum(ix),1),:); + [CC.VAR(k,:),CC.N(k,:)] = sumskipnan(d.^2,1,W(ix)); + else + [s,n] = sumskipnan(D(ix,:),1); + CC.MEAN(k,:) = s./n; + d = D(ix,:) - CC.MEAN(repmat(k,sum(ix),1),:); + [CC.VAR(k,:),CC.N(k,:)] = sumskipnan(d.^2,1); + end + end + CC.VAR = CC.VAR./max(CC.N-1,0); + CC.datatype = ['classifier:',lower(MODE.TYPE)]; -elseif ~isempty(strfind(lower(MODE.TYPE),'lpm')) - if ~isempty(W) - error('Error TRAIN_SC: Classifier (%s) does not support weighted samples.',MODE.TYPE); - end; - % linear programming machine - % CPLEX optimizer: ILOG solver, ilog cplex 6.5 reference manual http://www.ilog.com - MODE.TYPE = 'LPM'; - if ~isfield(MODE.hyperparameter,'c_value') - MODE.hyperparameter.c_value = 1; - end - [classlabel,CC.Labels] = CL1M(classlabel); - M = length(CC.Labels); - if M==2, M=1; end; % For a 2-class problem, only 1 Discriminant is needed - for k = 1:M, - %LPM = train_LPM(D,(classlabel==CC.Labels(k)),'C',MODE.hyperparameter.c_value); - LPM = train_LPM(D',(classlabel'==CC.Labels(k))); - CC.weights(:,k) = [-LPM.b; LPM.w(:)]; - end; - CC.hyperparameter.c_value = MODE.hyperparameter.c_value; - CC.datatype = ['classifier:',lower(MODE.TYPE)]; + elseif ~isempty(strfind(lower(MODE.TYPE),'lpm')) + if ~isempty(W) + error('Error TRAIN_SC: Classifier (%s) does not support weighted samples.',MODE.TYPE); + end + % linear programming machine + % CPLEX optimizer: ILOG solver, ilog cplex 6.5 reference manual http://www.ilog.com + MODE.TYPE = 'LPM'; + if ~isfield(MODE.hyperparameter,'c_value') + MODE.hyperparameter.c_value = 1; + end + [classlabel,CC.Labels] = CL1M(classlabel); + M = length(CC.Labels); + if M==2, M=1; end % For a 2-class problem, only 1 Discriminant is needed + for k = 1:M, + %LPM = train_LPM(D,(classlabel==CC.Labels(k)),'C',MODE.hyperparameter.c_value); + LPM = train_LPM(D',(classlabel'==CC.Labels(k))); + CC.weights(:,k) = [-LPM.b; LPM.w(:)]; + end + CC.hyperparameter.c_value = MODE.hyperparameter.c_value; + CC.datatype = ['classifier:',lower(MODE.TYPE)]; -elseif ~isempty(strfind(lower(MODE.TYPE),'pla')), - % Perceptron Learning Algorithm - [rix,cix] = row_col_deletion(D); - [CL101,CC.Labels] = cl101(classlabel); - M = size(CL101,2); - weights = sparse(length(cix)+1,M); + elseif ~isempty(strfind(lower(MODE.TYPE),'pla')), + % Perceptron Learning Algorithm - %ix = randperm(size(D,1)); %% randomize samples ??? - if ~isfield(MODE.hyperparameter,'alpha') - if isfield(MODE.hyperparameter,'alpha') - alpha = MODE.hyperparameter.alpha; - else - alpha = 1; - end; - for k = rix(:)', - %e = ((classlabel(k)==(1:M))-.5) - sign([1, D(k,cix)] * weights)/2; - e = CL101(k,:) - sign([1, D(k,cix)] * weights); - weights = weights + alpha * [1,D(k,cix)]' * e ; - end; - - else %if ~isempty(W) - if isfield(MODE.hyperparameter,'alpha') - W = W*MODE.hyperparameter.alpha; - end; - for k = rix(:)', - %e = ((classlabel(k)==(1:M))-.5) - sign([1, D(k,cix)] * weights)/2; - e = CL101(k,:) - sign([1, D(k,cix)] * weights); - weights = weights + W(k) * [1,D(k,cix)]' * e ; - end; - end - CC.weights = sparse(size(D,2)+1,M); - CC.weights([1,cix+1],:) = weights; - CC.datatype = ['classifier:',lower(MODE.TYPE)]; + [rix,cix] = row_col_deletion(D); + [CL101,CC.Labels] = cl101(classlabel); + M = size(CL101,2); + weights = sparse(length(cix)+1,M); + %ix = randperm(size(D,1)); %% randomize samples ??? + if ~isfield(MODE.hyperparameter,'alpha') + if isfield(MODE.hyperparameter,'alpha') + alpha = MODE.hyperparameter.alpha; + else + alpha = 1; + end + for k = rix(:)', + %e = ((classlabel(k)==(1:M))-.5) - sign([1, D(k,cix)] * weights)/2; + e = CL101(k,:) - sign([1, D(k,cix)] * weights); + weights = weights + alpha * [1,D(k,cix)]' * e ; + end -elseif ~isempty(strfind(lower(MODE.TYPE),'adaline')) || ~isempty(strfind(lower(MODE.TYPE),'lms')), - % adaptive linear elemente, least mean squares, delta rule, Widrow-Hoff, + else %if ~isempty(W) + if isfield(MODE.hyperparameter,'alpha') + W = W*MODE.hyperparameter.alpha; + end + for k = rix(:)', + %e = ((classlabel(k)==(1:M))-.5) - sign([1, D(k,cix)] * weights)/2; + e = CL101(k,:) - sign([1, D(k,cix)] * weights); + weights = weights + W(k) * [1,D(k,cix)]' * e ; + end + end + CC.weights = sparse(size(D,2)+1,M); + CC.weights([1,cix+1],:) = weights; + CC.datatype = ['classifier:',lower(MODE.TYPE)]; - [rix,cix] = row_col_deletion(D); - [CL101,CC.Labels] = cl101(classlabel); - M = size(CL101,2); - weights = sparse(length(cix)+1,M); - %ix = randperm(size(D,1)); %% randomize samples ??? - if isempty(W) - if isfield(MODE.hyperparameter,'alpha') - alpha = MODE.hyperparameter.alpha; - else - alpha = 1; - end; - for k = rix(:)', - %e = (classlabel(k)==(1:M)) - [1, D(k,cix)] * weights; - e = CL101(k,:) - sign([1, D(k,cix)] * weights); - weights = weights + alpha * [1,D(k,cix)]' * e ; - end; + elseif ~isempty(strfind(lower(MODE.TYPE),'adaline')) || ~isempty(strfind(lower(MODE.TYPE),'lms')), + % adaptive linear elemente, least mean squares, delta rule, Widrow-Hoff, - else %if ~isempty(W) - if isfield(MODE.hyperparameter,'alpha') - W = W*MODE.hyperparameter.alpha; - end; - for k = rix(:)', - %e = (classlabel(k)==(1:M)) - [1, D(k,cix)] * weights; - e = CL101(k,:) - sign([1, D(k,cix)] * weights); - weights = weights + W(k) * [1,D(k,cix)]' * e ; - end; - end - CC.weights = sparse(size(D,2)+1,M); - CC.weights([1,cix+1],:) = weights; - CC.datatype = ['classifier:',lower(MODE.TYPE)]; + [rix,cix] = row_col_deletion(D); + [CL101,CC.Labels] = cl101(classlabel); + M = size(CL101,2); + weights = sparse(length(cix)+1,M); + %ix = randperm(size(D,1)); %% randomize samples ??? + if isempty(W) + if isfield(MODE.hyperparameter,'alpha') + alpha = MODE.hyperparameter.alpha; + else + alpha = 1; + end + for k = rix(:)', + %e = (classlabel(k)==(1:M)) - [1, D(k,cix)] * weights; + e = CL101(k,:) - sign([1, D(k,cix)] * weights); + weights = weights + alpha * [1,D(k,cix)]' * e ; + end -elseif ~isempty(strfind(lower(MODE.TYPE),'winnow')) - % winnow algorithm - if ~isempty(W) - error('Classifier (%s) does not support weighted samples.',MODE.TYPE); - end; + else %if ~isempty(W) + if isfield(MODE.hyperparameter,'alpha') + W = W*MODE.hyperparameter.alpha; + end + for k = rix(:)', + %e = (classlabel(k)==(1:M)) - [1, D(k,cix)] * weights; + e = CL101(k,:) - sign([1, D(k,cix)] * weights); + weights = weights + W(k) * [1,D(k,cix)]' * e ; + end + end + CC.weights = sparse(size(D,2)+1,M); + CC.weights([1,cix+1],:) = weights; + CC.datatype = ['classifier:',lower(MODE.TYPE)]; - [rix,cix] = row_col_deletion(D); - [CL101,CC.Labels] = cl101(classlabel); - M = size(CL101,2); - weights = ones(length(cix),M); - theta = size(D,2)/2; - for k = rix(:)', - e = CL101(k,:) - sign(D(k,cix) * weights - theta); - weights = weights.* 2.^(D(k,cix)' * e); - end; - - CC.weights = sparse(size(D,2)+1,M); - CC.weights(cix+1,:) = weights; - CC.datatype = ['classifier:',lower(MODE.TYPE)]; - + elseif ~isempty(strfind(lower(MODE.TYPE),'winnow')) + % winnow algorithm + if ~isempty(W) + error('Classifier (%s) does not support weighted samples.',MODE.TYPE); + end -elseif ~isempty(strfind(lower(MODE.TYPE),'pls')) || ~isempty(strfind(lower(MODE.TYPE),'reg')) - % 4th version: support for weighted samples - work well with unequally distributed data: - % regression analysis, can handle sparse data, too. + [rix,cix] = row_col_deletion(D); + [CL101,CC.Labels] = cl101(classlabel); + M = size(CL101,2); + weights = ones(length(cix),M); + theta = size(D,2)/2; - if nargin<4, - W = []; - end; - [rix, cix] = row_col_deletion(D); - wD = [ones(length(rix),1),D(rix,cix)]; + for k = rix(:)', + e = CL101(k,:) - sign(D(k,cix) * weights - theta); + weights = weights.* 2.^(D(k,cix)' * e); + end - if ~isempty(W) - %% wD = diag(W)*wD - W = W(:); - for k=1:size(wD,2) - wD(:,k) = W(rix).*wD(:,k); - end; - end; - [CL101, CC.Labels] = cl101(classlabel(rix,:)); - M = size(CL101,2); - CC.weights = sparse(sz(2)+1,M); + CC.weights = sparse(size(D,2)+1,M); + CC.weights(cix+1,:) = weights; + CC.datatype = ['classifier:',lower(MODE.TYPE)]; - %[rix, cix] = row_col_deletion(wD); - [q,r] = qr(wD,0); + elseif ~isempty(strfind(lower(MODE.TYPE),'pls')) || ~isempty(strfind(lower(MODE.TYPE),'reg')) + % 4th version: support for weighted samples - work well with unequally distributed data: + % regression analysis, can handle sparse data, too. - if isempty(W) - CC.weights([1,cix+1],:) = r\(q'*CL101); - else - CC.weights([1,cix+1],:) = r\(q'*(W(rix,ones(1,M)).*CL101)); - end; - %for k = 1:M, - % CC.weights(cix,k) = r\(q'*(W.*CL101(rix,k))); - %end; - CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; + if nargin<4, + W = []; + end + [rix, cix] = row_col_deletion(D); + wD = [ones(length(rix),1),D(rix,cix)]; + if ~isempty(W) + %% wD = diag(W)*wD + W = W(:); + for k=1:size(wD,2) + wD(:,k) = W(rix).*wD(:,k); + end + end + [CL101, CC.Labels] = cl101(classlabel(rix,:)); + M = size(CL101,2); + CC.weights = sparse(sz(2)+1,M); -elseif ~isempty(strfind(MODE.TYPE,'WienerHopf')) - % Q: equivalent to LDA - % equivalent to Regression, except regression can not deal with NaN's - [CL101,CC.Labels] = cl101(classlabel); - M = size(CL101,2); - CC.weights = sparse(size(D,2)+1,M); - cc = covm(D,'E',W); - %c1 = classlabel(~isnan(classlabel)); - %c2 = ones(sum(~isnan(classlabel)),M); - %for k = 1:M, - % c2(:,k) = c1==CC.Labels(k); - %end; - %CC.weights = cc\covm([ones(size(c2,1),1),D(~isnan(classlabel),:)],2*real(c2)-1,'M',W); - CC.weights = cc\covm([ones(size(D,1),1),D],CL101,'M',W); - CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; + %[rix, cix] = row_col_deletion(wD); + [q,r] = qr(wD,0); + if isempty(W) + CC.weights([1,cix+1],:) = r\(q'*CL101); + else + CC.weights([1,cix+1],:) = r\(q'*(W(rix,ones(1,M)).*CL101)); + end + %for k = 1:M, + % CC.weights(cix,k) = r\(q'*(W.*CL101(rix,k))); + %end + CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; -elseif ~isempty(strfind(lower(MODE.TYPE),'/gsvd')) - if ~isempty(W) - error('Classifier (%s) does not support weighted samples.',MODE.TYPE); - end; - % [2] Peg Howland and Haesun Park, 2004. - % Generalizing Discriminant Analysis Using the Generalized Singular Value Decomposition - % IEEE Transactions on Pattern Analysis and Machine Intelligence, 26(8), 2004. - % dx.doi.org/10.1109/TPAMI.2004.46 - % [3] http://www-static.cc.gatech.edu/~kihwan23/face_recog_gsvd.htm - [classlabel,CC.Labels] = CL1M(classlabel); - [rix,cix] = row_col_deletion(D); + elseif ~isempty(strfind(MODE.TYPE,'WienerHopf')) + % Q: equivalent to LDA + % equivalent to Regression, except regression can not deal with NaN's + [CL101,CC.Labels] = cl101(classlabel); + M = size(CL101,2); + CC.weights = sparse(size(D,2)+1,M); + cc = covm(D,'E',W); + %c1 = classlabel(~isnan(classlabel)); + %c2 = ones(sum(~isnan(classlabel)),M); + %for k = 1:M, + % c2(:,k) = c1==CC.Labels(k); + %end + %CC.weights = cc\covm([ones(size(c2,1),1),D(~isnan(classlabel),:)],2*real(c2)-1,'M',W); + CC.weights = cc\covm([ones(size(D,1),1),D],CL101,'M',W); + CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; - Hw = zeros(length(rix)+length(CC.Labels), length(cix)); - Hb = []; - m0 = mean(D(rix,cix)); - K = length(CC.Labels); - N = zeros(1,K); - for k = 1:K, - ix = find(classlabel(rix)==CC.Labels(k)); - N(k) = length(ix); - [Hw(ix,:), mu] = center(D(rix(ix),cix)); - %Hb(k,:) = sqrt(N(k))*(mu(k,:)-m0); - Hw(length(rix)+k,:) = sqrt(N(k))*(mu-m0); % Hb(k,:) - end; - try - [P,R,Q] = svd(Hw,'econ'); - catch % needed because SVD(..,'econ') not supported in Matlab 6.x - [P,R,Q] = svd(Hw,0); - end; - t = rank(R); - clear Hw Hb mu; - %[size(D);size(P);size(Q);size(R)] - R = R(1:t,1:t); - %P = P(1:size(D,1),1:t); - %Q = Q(1:t,:); - [U,E,W] = svd(P(1:length(rix),1:t),0); - %[size(U);size(E);size(W)] - clear U E P; - %[size(Q);size(R);size(W)] + elseif ~isempty(strfind(lower(MODE.TYPE),'/gsvd')) + if ~isempty(W) + error('Classifier (%s) does not support weighted samples.',MODE.TYPE); + end + % [2] Peg Howland and Haesun Park, 2004 + % Generalizing Discriminant Analysis Using the Generalized Singular Value Decomposition + % IEEE Transactions on Pattern Analysis and Machine Intelligence, 26(8), 2004. + % dx.doi.org/10.1109/TPAMI.2004.46 + % [3] http://www-static.cc.gatech.edu/~kihwan23/face_recog_gsvd.htm - %G = Q(1:t,:)'*[R\W']; - G = Q(:,1:t)*(R\W'); % this works as well and needs only 'econ'-SVD - %G = G(:,1:t); % not needed - - % do not use this, gives very bad results for Medline database - %G = G(:,1:K); this seems to be a typo in [2] and [3]. - CC = train_sc(D(:,cix)*G,classlabel,MODE.TYPE(1:find(MODE.TYPE=='/')-1)); - CC.G = sparse(size(D,2),size(G,2)); - CC.G(cix,:) = G; - if isfield(CC,'weights') - CC.weights = sparse([CC.weights(1,:); CC.G*CC.weights(2:end,:)]); - CC.datatype = ['classifier:statistical:', lower(MODE.TYPE)]; - else - CC.datatype = [CC.datatype,'/gsvd']; - end; + [classlabel,CC.Labels] = CL1M(classlabel); + [rix,cix] = row_col_deletion(D); + Hw = zeros(length(rix)+length(CC.Labels), length(cix)); + Hb = []; + m0 = mean(D(rix,cix)); + K = length(CC.Labels); + N = zeros(1,K); + for k = 1:K, + ix = find(classlabel(rix)==CC.Labels(k)); + N(k) = length(ix); + [Hw(ix,:), mu] = center(D(rix(ix),cix)); + %Hb(k,:) = sqrt(N(k))*(mu(k,:)-m0); + Hw(length(rix)+k,:) = sqrt(N(k))*(mu-m0); % Hb(k,:) + end + try + [P,R,Q] = svd(Hw,'econ'); + catch % needed because SVD(..,'econ') not supported in Matlab 6.x + [P,R,Q] = svd(Hw,0); + end + t = rank(R); -elseif ~isempty(strfind(lower(MODE.TYPE),'sparse')) - if ~isempty(W) - error('Classifier (%s) does not support weighted samples.',MODE.TYPE); - end; - % [5] J.D. Tebbens and P.Schlesinger (2006), - % Improving Implementation of Linear Discriminant Analysis for the Small Sample Size Problem - % http://www.cs.cas.cz/mweb/download/publi/JdtSchl2006.pdf + clear Hw Hb mu; + %[size(D);size(P);size(Q);size(R)] + R = R(1:t,1:t); + %P = P(1:size(D,1),1:t); + %Q = Q(1:t,:); + [U,E,W] = svd(P(1:length(rix),1:t),0); + %[size(U);size(E);size(W)] + clear U E P; + %[size(Q);size(R);size(W)] - [classlabel,CC.Labels] = CL1M(classlabel); - [rix,cix] = row_col_deletion(D); + %G = Q(1:t,:)'*[R\W']; + G = Q(:,1:t)*(R\W'); % this works as well and needs only 'econ'-SVD + %G = G(:,1:t); % not needed - warning('sparse LDA is sensitive to linear transformations') - M = length(CC.Labels); - G = sparse([],[],[],length(rix),M,length(rix)); - for k = 1:M, - G(classlabel(rix)==CC.Labels(k),k) = 1; - end; - tol = 1e-10; + % do not use this, gives very bad results for Medline database + %G = G(:,1:K); this seems to be a typo in [2] and [3]. + CC = train_sc(D(:,cix)*G,classlabel,MODE.TYPE(1:find(MODE.TYPE=='/')-1)); + CC.G = sparse(size(D,2),size(G,2)); + CC.G(cix,:) = G; + if isfield(CC,'weights') + CC.weights = sparse([CC.weights(1,:); CC.G*CC.weights(2:end,:)]); + CC.datatype = ['classifier:statistical:', lower(MODE.TYPE)]; + else + CC.datatype = [CC.datatype,'/gsvd']; + end - G = train_lda_sparse(D(rix,cix),G,1,tol); - CC.datatype = 'classifier:slda'; - POS1 = find(MODE.TYPE=='/'); - %G = v(:,1:size(G.trafo,2)).*G.trafo; - %CC.weights = s * CC.weights(2:end,:) + sparse(1,1:M,CC.weights(1,:),sz(2)+1,M); - CC = train_sc(D(rix,cix)*G.trafo,classlabel(rix),MODE.TYPE(1:POS1(1)-1)); - CC.G = sparse(size(D,2),size(G.trafo,2)); - CC.G(cix,:) = G.trafo; - if isfield(CC,'weights') - CC.weights = sparse([CC.weights(1,:); CC.G*CC.weights(2:end,:)]); - CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; - else - CC.datatype = [CC.datatype,'/sparse']; - end; + elseif ~isempty(strfind(lower(MODE.TYPE),'sparse')) + if ~isempty(W) + error('Classifier (%s) does not support weighted samples.',MODE.TYPE); + end + % [5] J.D. Tebbens and P.Schlesinger (2006), + % Improving Implementation of Linear Discriminant Analysis for the Small Sample Size Problem + % http://www.cs.cas.cz/mweb/download/publi/JdtSchl2006.pdf -elseif ~isempty(strfind(lower(MODE.TYPE),'rbf')) - if ~isempty(W) - error('Classifier (%s) does not support weighted samples.',MODE.TYPE); - end; + [classlabel,CC.Labels] = CL1M(classlabel); + [rix,cix] = row_col_deletion(D); - % Martin Hieden's RBF-SVM - if exist('svmpredict_mex','file')==3, - MODE.TYPE = 'SVM:LIB:RBF'; - else - error('No SVM training algorithm available. Install LibSVM for Matlab.\n'); - end; - CC.options = '-t 2 -q'; %use RBF kernel, set C, set gamma - if isfield(MODE.hyperparameter,'gamma') - CC.options = sprintf('%s -c %g', CC.options, MODE.hyperparameter.c_value); % set C - end - if isfield(MODE.hyperparameter,'c_value') - CC.options = sprintf('%s -g %g', CC.options, MODE.hyperparameter.gamma); % set C - end + warning('sparse LDA is sensitive to linear transformations') + M = length(CC.Labels); + G = sparse([],[],[],length(rix),M,length(rix)); + for k = 1:M, + G(classlabel(rix)==CC.Labels(k),k) = 1; + end + tol = 1e-10; - % pre-whitening - [D,r,m]=zscore(D,1); - CC.prewhite = sparse(2:sz(2)+1,1:sz(2),r,sz(2)+1,sz(2),2*sz(2)); - CC.prewhite(1,:) = -m.*r; + G = train_lda_sparse(D(rix,cix),G,1,tol); + CC.datatype = 'classifier:slda'; + POS1 = find(MODE.TYPE=='/'); + %G = v(:,1:size(G.trafo,2)).*G.trafo; + %CC.weights = s * CC.weights(2:end,:) + sparse(1,1:M,CC.weights(1,:),sz(2)+1,M); - [classlabel,CC.Labels] = CL1M(classlabel); - CC.model = svmtrain_mex(classlabel, D, CC.options); % Call the training mex File - CC.datatype = ['classifier:',lower(MODE.TYPE)]; + CC = train_sc(D(rix,cix)*G.trafo,classlabel(rix),MODE.TYPE(1:POS1(1)-1)); + CC.G = sparse(size(D,2),size(G.trafo,2)); + CC.G(cix,:) = G.trafo; + if isfield(CC,'weights') + CC.weights = sparse([CC.weights(1,:); CC.G*CC.weights(2:end,:)]); + CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; + else + CC.datatype = [CC.datatype,'/sparse']; + end + elseif ~isempty(strfind(lower(MODE.TYPE),'rbf')) + if ~isempty(W) + error('Classifier (%s) does not support weighted samples.',MODE.TYPE); + end -elseif ~isempty(strfind(lower(MODE.TYPE),'svm11')) - if ~isempty(W) - error('Classifier (%s) does not support weighted samples.',MODE.TYPE); - end; - % 1-versus-1 scheme - if ~isfield(MODE.hyperparameter,'c_value') - MODE.hyperparameter.c_value = 1; - end + % Martin Hieden's RBF-SVM + if exist('svmpredict_mex','file')==3, + MODE.TYPE = 'SVM:LIB:RBF'; + else + error('No SVM training algorithm available. Install LibSVM for Matlab.\n'); + end + CC.options = '-t 2 -q'; %use RBF kernel, set C, set gamma + if isfield(MODE.hyperparameter,'gamma') + CC.options = sprintf('%s -c %g', CC.options, MODE.hyperparameter.c_value); % set C + end + if isfield(MODE.hyperparameter,'c_value') + CC.options = sprintf('%s -g %g', CC.options, MODE.hyperparameter.gamma); % set C + end - CC.options=sprintf('-c %g -t 0 -q',MODE.hyperparameter.c_value); %use linear kernel, set C - CC.hyperparameter.c_value = MODE.hyperparameter.c_value; + % pre-whitening + [D,r,m]=zscore(D,1); + CC.prewhite = sparse(2:sz(2)+1,1:sz(2),r,sz(2)+1,sz(2),2*sz(2)); + CC.prewhite(1,:) = -m.*r; - % pre-whitening - [D,r,m]=zscore(D,1); - CC.prewhite = sparse(2:sz(2)+1,1:sz(2),r,sz(2)+1,sz(2),2*sz(2)); - CC.prewhite(1,:) = -m.*r; + [classlabel,CC.Labels] = CL1M(classlabel); + CC.model = svmtrain_mex(classlabel, D, CC.options); % Call the training mex File + CC.datatype = ['classifier:',lower(MODE.TYPE)]; - [classlabel,CC.Labels] = CL1M(classlabel); - CC.model = svmtrain_mex(classlabel, D, CC.options); % Call the training mex File - - FUN = 'SVM:LIB:1vs1'; - CC.datatype = ['classifier:',lower(FUN)]; + elseif ~isempty(strfind(lower(MODE.TYPE),'svm11')) + if ~isempty(W) + error('Classifier (%s) does not support weighted samples.',MODE.TYPE); + end + % 1-versus-1 scheme + if ~isfield(MODE.hyperparameter,'c_value') + MODE.hyperparameter.c_value = 1; + end -elseif ~isempty(strfind(lower(MODE.TYPE),'psvm')) - if ~isempty(W) - %%% error('Classifier (%s) does not support weighted samples.',MODE.TYPE); - warning('Classifier (%s) in combination with weighted samples is not tested.',MODE.TYPE); - end; - if ~isfield(MODE,'hyperparameter') - nu = 1; - elseif isfield(MODE.hyperparameter,'nu') - nu = MODE.hyperparameter.nu; - else - nu = 1; - end; - [m,n] = size(D); - [CL101,CC.Labels] = cl101(classlabel); - CC.weights = sparse(n+1,size(CL101,2)); - M = size(CL101,2); - for k = 1:M, - d = sparse(1:m,1:m,CL101(:,k)); - H = d * [ones(m,1),D]; - %%% r = sum(H,1)'; - r = sumskipnan(H,1,W)'; - %%% r = (speye(n+1)/nu + H' * H)\r; %solve (I/nu+H’*H)r=H’*e - [HTH, nn] = covm(H,H,'M',W); - r = (speye(n+1)/nu + HTH)\r; %solve (I/nu+H’*H)r=H’*e - u = nu*(1-(H*r)); - %%% CC.weights(:,k) = u'*H; - [c,nn] = covm(u,H,'M',W); - CC.weights(:,k) = c'; - end; - CC.hyperparameter.nu = nu; - CC.datatype = ['classifier:',lower(MODE.TYPE)]; - + CC.options=sprintf('-c %g -t 0 -q',MODE.hyperparameter.c_value); %use linear kernel, set C + CC.hyperparameter.c_value = MODE.hyperparameter.c_value; -elseif ~isempty(strfind(lower(MODE.TYPE),'svm:lin4')) - if ~isfield(MODE.hyperparameter,'c_value') - MODE.hyperparameter.c_value = 1; - end + % pre-whitening + [D,r,m]=zscore(D,1); + CC.prewhite = sparse(2:sz(2)+1,1:sz(2),r,sz(2)+1,sz(2),2*sz(2)); + CC.prewhite(1,:) = -m.*r; - [classlabel,CC.Labels] = CL1M(classlabel); - M = length(CC.Labels); - CC.weights = sparse(size(D,2)+1,M); + [classlabel,CC.Labels] = CL1M(classlabel); + CC.model = svmtrain_mex(classlabel, D, CC.options); % Call the training mex File - [rix,cix] = row_col_deletion(D); + FUN = 'SVM:LIB:1vs1'; + CC.datatype = ['classifier:',lower(FUN)]; - % pre-whitening - [D,r,m]=zscore(D(rix,cix),1); - sz2 = length(cix); - s = sparse(2:sz2+1,1:sz2,r,sz2+1,sz2,2*sz2); - s(1,:) = -m.*r; - CC.options = sprintf('-s 4 -B 1 -c %f -q', MODE.hyperparameter.c_value); % C-SVC, C=1, linear kernel, degree = 1, - model = train(W, classlabel, sparse(D), CC.options); % C-SVC, C=1, linear kernel, degree = 1, - weights = model.w([end,1:end-1],:)'; + elseif ~isempty(strfind(lower(MODE.TYPE),'psvm')) + if ~isempty(W) + %%% error('Classifier (%s) does not support weighted samples.',MODE.TYPE); + warning('Classifier (%s) in combination with weighted samples is not tested.',MODE.TYPE); + end + if ~isfield(MODE,'hyperparameter') + nu = 1; + elseif isfield(MODE.hyperparameter,'nu') + nu = MODE.hyperparameter.nu; + else + nu = 1; + end + [m,n] = size(D); + [CL101,CC.Labels] = cl101(classlabel); + CC.weights = sparse(n+1,size(CL101,2)); + M = size(CL101,2); + for k = 1:M, + d = sparse(1:m,1:m,CL101(:,k)); + H = d * [ones(m,1),D]; + %%% r = sum(H,1)'; + r = sumskipnan(H,1,W)'; + %%% r = (speye(n+1)/nu + H' * H)\r; %solve (I/nu+H’*H)r=H’*e + [HTH, nn] = covm(H,H,'M',W); + r = (speye(n+1)/nu + HTH)\r; %solve (I/nu+H’*H)r=H’*e + u = nu*(1-(H*r)); + %%% CC.weights(:,k) = u'*H; + [c,nn] = covm(u,H,'M',W); + CC.weights(:,k) = c'; + end + CC.hyperparameter.nu = nu; + CC.datatype = ['classifier:',lower(MODE.TYPE)]; - CC.weights([1,cix+1],:) = s * weights(2:end,:) + sparse(1,1:M,weights(1,:),sz2+1,M); % include pre-whitening transformation - CC.weights([1,cix+1],:) = s * CC.weights(cix+1,:) + sparse(1,1:M,CC.weights(1,:),sz2+1,M); % include pre-whitening transformation - CC.hyperparameter.c_value = MODE.hyperparameter.c_value; - CC.datatype = ['classifier:',lower(MODE.TYPE)]; + elseif ~isempty(strfind(lower(MODE.TYPE),'svm:lin4')) + if ~isfield(MODE.hyperparameter,'c_value') + MODE.hyperparameter.c_value = 1; + end + [classlabel,CC.Labels] = CL1M(classlabel); + M = length(CC.Labels); + CC.weights = sparse(size(D,2)+1,M); -elseif ~isempty(strfind(lower(MODE.TYPE),'svm')) + [rix,cix] = row_col_deletion(D); - if ~isfield(MODE.hyperparameter,'c_value') - MODE.hyperparameter.c_value = 1; - end - if any(MODE.TYPE==':'), - % nothing to be done - elseif exist('train','file')==3, - MODE.TYPE = 'SVM:LIN'; %% liblinear - elseif exist('svmtrain_mex','file')==3, - MODE.TYPE = 'SVM:LIB'; - elseif (exist('svmtrain','file')==3), - MODE.TYPE = 'SVM:LIB'; - fprintf(1,'You need to rename %s to svmtrain_mex.mex !! \n Press any key to continue !!!\n',which('svmtrain.mex')); - elseif exist('svmtrain','file')==2, - MODE.TYPE = 'SVM:bioinfo'; - elseif exist('mexSVMTrain','file')==3, - MODE.TYPE = 'SVM:OSU'; - elseif exist('svcm_train','file')==2, - MODE.TYPE = 'SVM:LOO'; - elseif exist('svmclass','file')==2, - MODE.TYPE = 'SVM:KM'; - elseif exist('svc','file')==2, - MODE.TYPE = 'SVM:Gunn'; - else - error('No SVM training algorithm available. Install OSV-SVM, or LOO-SVM, or libSVM for Matlab.\n'); - end; + % pre-whitening + [D,r,m]=zscore(D(rix,cix),1); + sz2 = length(cix); + s = sparse(2:sz2+1,1:sz2,r,sz2+1,sz2,2*sz2); + s(1,:) = -m.*r; - %%CC = train_svm(D,classlabel,MODE); - [CL101,CC.Labels] = cl101(classlabel); - M = size(CL101,2); - [rix,cix] = row_col_deletion(D); - CC.weights = sparse(sz(2)+1, M); + CC.options = sprintf('-s 4 -B 1 -c %f -q', MODE.hyperparameter.c_value); % C-SVC, C=1, linear kernel, degree = 1, + model = train(W, classlabel, sparse(D), CC.options); % C-SVC, C=1, linear kernel, degree = 1, + weights = model.w([end,1:end-1],:)'; - % pre-whitening - [D,r,m]=zscore(D(rix,cix),1); - sz2 = length(cix); - s = sparse(2:sz2+1,1:sz2,r,sz2+1,sz2,2*sz2); - s(1,:) = -m.*r; + CC.weights([1,cix+1],:) = s * weights(2:end,:) + sparse(1,1:M,weights(1,:),sz2+1,M); % include pre-whitening transformation + CC.weights([1,cix+1],:) = s * CC.weights(cix+1,:) + sparse(1,1:M,CC.weights(1,:),sz2+1,M); % include pre-whitening transformation + CC.hyperparameter.c_value = MODE.hyperparameter.c_value; + CC.datatype = ['classifier:',lower(MODE.TYPE)]; - for k = 1:M, - cl = CL101(rix,k); - if strncmp(MODE.TYPE, 'SVM:LIN',7); - if isfield(MODE,'options') - CC.options = MODE.options; - else - t = 0; - if length(MODE.TYPE)>7, t=MODE.TYPE(8)-'0'; end; - if (t<0 || t>6) t=0; end; - CC.options = sprintf('-s %i -B 1 -c %f -q',t, MODE.hyperparameter.c_value); % C-SVC, C=1, linear kernel, degree = 1, - end; - model = train(W, cl, sparse(D), CC.options); % C-SVC, C=1, linear kernel, degree = 1, - w = -model.w'; - Bias = -model.bias; - w = -model.w(:,1:end-1)'; - Bias = -model.w(:,end)'; - elseif strcmp(MODE.TYPE, 'SVM:LIB'); %% tested with libsvm-mat-2.9-1 - if isfield(MODE,'options') - CC.options = MODE.options; - else - CC.options = sprintf('-s 0 -c %f -t 0 -d 1 -q', MODE.hyperparameter.c_value); % C-SVC, C=1, linear kernel, degree = 1, - end; - model = svmtrain_mex(cl, D, CC.options); % C-SVC, C=1, linear kernel, degree = 1, - w = cl(1) * model.SVs' * model.sv_coef; %Calculate decision hyperplane weight vector - % ensure correct sign of weight vector and Bias according to class label - Bias = model.rho * cl(1); + elseif ~isempty(strfind(lower(MODE.TYPE),'svm')) - elseif strcmp(MODE.TYPE, 'SVM:bioinfo'); - % SVM classifier from bioinformatics toolbox. - % Settings suggested by Ian Daly, 2011-06-06 - options = optimset('Display','iter','maxiter',20000, 'largescale','off'); - CC.SVMstruct = svmtrain(D, cl, 'AUTOSCALE', 0, 'quadprog_opts', options, 'Method', 'LS', 'kernel_function', 'polynomial'); - Bias = -CC.SVMstruct.Bias; - w = -CC.SVMstruct.Alpha'*CC.SVMstruct.SupportVectors; + if ~isfield(MODE.hyperparameter,'c_value') + MODE.hyperparameter.c_value = 1; + end + if any(MODE.TYPE==':'), + % nothing to be done + elseif exist('train','file')==3, + MODE.TYPE = 'SVM:LIN'; %% liblinear + elseif exist('svmtrain_mex','file')==3, + MODE.TYPE = 'SVM:LIB'; + elseif (exist('svmtrain','file')==3), + MODE.TYPE = 'SVM:LIB'; + fprintf(1,'You need to rename %s to svmtrain_mex.mex !! \n Press any key to continue !!!\n',which('svmtrain.mex')); + elseif exist('svmtrain','file')==2, + MODE.TYPE = 'SVM:bioinfo'; + elseif exist('mexSVMTrain','file')==3, + MODE.TYPE = 'SVM:OSU'; + elseif exist('svcm_train','file')==2, + MODE.TYPE = 'SVM:LOO'; + elseif exist('svmclass','file')==2, + MODE.TYPE = 'SVM:KM'; + elseif exist('svc','file')==2, + MODE.TYPE = 'SVM:Gunn'; + else + error('No SVM training algorithm available. Install OSV-SVM, or LOO-SVM, or libSVM for Matlab.\n'); + end - elseif strcmp(MODE.TYPE, 'SVM:OSU'); - [AlphaY, SVs, Bias] = mexSVMTrain(D', cl', [0 1 1 1 MODE.hyperparameter.c_value]); % Linear Kernel, C=1; degree=1, c-SVM - w = -SVs * AlphaY'*cl(1); %Calculate decision hyperplane weight vector - % ensure correct sign of weight vector and Bias according to class label - Bias = -Bias * cl(1); + %%CC = train_svm(D,classlabel,MODE); + [CL101,CC.Labels] = cl101(classlabel); + M = size(CL101,2); + [rix,cix] = row_col_deletion(D); + CC.weights = sparse(sz(2)+1, M); - elseif strcmp(MODE.TYPE, 'SVM:LOO'); - [a, Bias, g, inds] = svcm_train(D, cl, MODE.hyperparameter.c_value); % C = 1; - w = D(inds,:)' * (a(inds).*cl(inds)) ; + % pre-whitening + [D,r,m]=zscore(D(rix,cix),1); + sz2 = length(cix); + s = sparse(2:sz2+1,1:sz2,r,sz2+1,sz2,2*sz2); + s(1,:) = -m.*r; - elseif strcmp(MODE.TYPE, 'SVM:Gunn'); - [nsv, alpha, Bias,svi] = svc(D, cl, 1, MODE.hyperparameter.c_value); % linear kernel, C = 1; - w = D(svi,:)' * alpha(svi) * cl(1); - Bias = mean(D*w); + for k = 1:M, + cl = CL101(rix,k); + if strncmp(MODE.TYPE, 'SVM:LIN',7); + if isfield(MODE,'options') + CC.options = MODE.options; + else + t = 0; + if length(MODE.TYPE)>7, t=MODE.TYPE(8)-'0'; end + if (t<0 || t>6) t=0; end + CC.options = sprintf('-s %i -B 1 -c %f -q',t, MODE.hyperparameter.c_value); % C-SVC, C=1, linear kernel, degree = 1, + end + model = train(W, cl, sparse(D), CC.options); % C-SVC, C=1, linear kernel, degree = 1, + w = -model.w'; + Bias = -model.bias; + w = -model.w(:,1:end-1)'; + Bias = -model.w(:,end)'; - elseif strcmp(MODE.TYPE, 'SVM:KM'); - [xsup,w1,Bias,inds] = svmclass(D, cl, MODE.hyperparameter.c_value, 1, 'poly', 1); % C = 1; - w = -D(inds,:)' * w1; + elseif strcmp(MODE.TYPE, 'SVM:LIB'); %% tested with libsvm-mat-2.9-1 + if isfield(MODE,'options') + CC.options = MODE.options; + else + CC.options = sprintf('-s 0 -c %f -t 0 -d 1 -q', MODE.hyperparameter.c_value); % C-SVC, C=1, linear kernel, degree = 1, + end + model = svmtrain_mex(cl, D, CC.options); % C-SVC, C=1, linear kernel, degree = 1, + w = cl(1) * model.SVs' * model.sv_coef; %Calculate decision hyperplane weight vector + % ensure correct sign of weight vector and Bias according to class label + Bias = model.rho * cl(1); - else - fprintf(2,'Error TRAIN_SVM: no SVM training algorithm available\n'); - return; - end + elseif strcmp(MODE.TYPE, 'SVM:bioinfo'); + % SVM classifier from bioinformatics toolbox. + % Settings suggested by Ian Daly, 2011-06-06 + options = optimset('Display','iter','maxiter',20000, 'largescale','off'); + CC.SVMstruct = svmtrain(D, cl, 'AUTOSCALE', 0, 'quadprog_opts', options, 'Method', 'LS', 'kernel_function', 'polynomial'); + Bias = -CC.SVMstruct.Bias; + w = -CC.SVMstruct.Alpha'*CC.SVMstruct.SupportVectors; - CC.weights(1,k) = -Bias; - CC.weights(cix+1,k) = w; - end; - CC.weights([1,cix+1],:) = s * CC.weights(cix+1,:) + sparse(1,1:M,CC.weights(1,:),sz2+1,M); % include pre-whitening transformation - CC.hyperparameter.c_value = MODE.hyperparameter.c_value; - CC.datatype = ['classifier:',lower(MODE.TYPE)]; + elseif strcmp(MODE.TYPE, 'SVM:OSU'); + [AlphaY, SVs, Bias] = mexSVMTrain(D', cl', [0 1 1 1 MODE.hyperparameter.c_value]); % Linear Kernel, C=1; degree=1, c-SVM + w = -SVs * AlphaY'*cl(1); %Calculate decision hyperplane weight vector + % ensure correct sign of weight vector and Bias according to class label + Bias = -Bias * cl(1); + elseif strcmp(MODE.TYPE, 'SVM:LOO'); + [a, Bias, g, inds] = svcm_train(D, cl, MODE.hyperparameter.c_value); % C = 1; + w = D(inds,:)' * (a(inds).*cl(inds)) ; -elseif ~isempty(strfind(lower(MODE.TYPE),'csp')) - CC.datatype = ['classifier:',lower(MODE.TYPE)]; - [classlabel,CC.Labels] = CL1M(classlabel); - CC.MD = repmat(NaN,[sz(2)+[1,1],length(CC.Labels)]); - CC.NN = CC.MD; - for k = 1:length(CC.Labels), - %% [CC.MD(k,:,:),CC.NN(k,:,:)] = covm(D(classlabel==CC.Labels(k),:),'E'); - ix = classlabel==CC.Labels(k); - if isempty(W) - [CC.MD(:,:,k),CC.NN(:,:,k)] = covm(D(ix,:), 'E'); - else - [CC.MD(:,:,k),CC.NN(:,:,k)] = covm(D(ix,:), 'E', W(ix)); - end; - end; - ECM = CC.MD./CC.NN; - W = csp(ECM,'CSP3'); - %%% ### This is a hack ### - CC.FiltA = 50; - CC.FiltB = ones(CC.FiltA,1); - d = filtfilt(CC.FiltB,CC.FiltA,(D*W).^2); - CC.csp_w = W; - CC.CSP = train_sc(log(d),classlabel); + elseif strcmp(MODE.TYPE, 'SVM:Gunn'); + [nsv, alpha, Bias,svi] = svc(D, cl, 1, MODE.hyperparameter.c_value); % linear kernel, C = 1; + w = D(svi,:)' * alpha(svi) * cl(1); + Bias = mean(D*w); + elseif strcmp(MODE.TYPE, 'SVM:KM'); + [xsup,w1,Bias,inds] = svmclass(D, cl, MODE.hyperparameter.c_value, 1, 'poly', 1); % C = 1; + w = -D(inds,:)' * w1; -else % Linear and Quadratic statistical classifiers - CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; - [classlabel,CC.Labels] = CL1M(classlabel); - CC.MD = repmat(NaN,[sz(2)+[1,1],length(CC.Labels)]); - CC.NN = CC.MD; - for k = 1:length(CC.Labels), - ix = classlabel==CC.Labels(k); - if isempty(W) - [CC.MD(:,:,k),CC.NN(:,:,k)] = covm(D(ix,:), 'E'); - else - [CC.MD(:,:,k),CC.NN(:,:,k)] = covm(D(ix,:), 'E', W(ix)); - end; - end; + else + fprintf(2,'Error TRAIN_SVM: no SVM training algorithm available\n'); + return; + end - ECM = CC.MD./CC.NN; - NC = size(CC.MD); - if strncmpi(MODE.TYPE,'LD',2) || strncmpi(MODE.TYPE,'FDA',3) || strncmpi(MODE.TYPE,'FLDA',3), + CC.weights(1,k) = -Bias; + CC.weights(cix+1,k) = w; + end + CC.weights([1,cix+1],:) = s * CC.weights(cix+1,:) + sparse(1,1:M,CC.weights(1,:),sz2+1,M); % include pre-whitening transformation + CC.hyperparameter.c_value = MODE.hyperparameter.c_value; + CC.datatype = ['classifier:',lower(MODE.TYPE)]; - %if NC(1)==2, NC(1)=1; end; % linear two class problem needs only one discriminant - CC.weights = repmat(NaN,NC(2),NC(3)); % memory allocation - type = MODE.TYPE(3)-'0'; - ECM0 = squeeze(sum(ECM,3)); %decompose ECM - for k = 1:NC(3); - ix = [1:k-1,k+1:NC(3)]; - dM = CC.MD(:,1,k)./CC.NN(:,1,k) - sum(CC.MD(:,1,ix),3)./sum(CC.NN(:,1,ix),3); - switch (type) - case 2 % LD2 - ecm0 = (sum(ECM(:,:,ix),3)/(NC(3)-1) + ECM(:,:,k)); - case 4 % LD4 - ecm0 = 2*(sum(ECM(:,:,ix),3) + ECM(:,:,k))/NC(3); - % ecm0 = sum(CC.MD,3)./sum(CC.NN,3); - case 5 % LD5 - ecm0 = ECM(:,:,k); - case 6 % LD6 - ecm0 = sum(CC.MD(:,:,ix),3)./sum(CC.NN(:,:,ix),3); - otherwise % LD3, LDA, FDA - ecm0 = ECM0; + elseif ~isempty(strfind(lower(MODE.TYPE),'csp')) + CC.datatype = ['classifier:',lower(MODE.TYPE)]; + [classlabel,CC.Labels] = CL1M(classlabel); + CC.MD = repmat(NaN,[sz(2)+[1,1],length(CC.Labels)]); + CC.NN = CC.MD; + for k = 1:length(CC.Labels), + %% [CC.MD(k,:,:),CC.NN(k,:,:)] = covm(D(classlabel==CC.Labels(k),:),'E'); + ix = classlabel==CC.Labels(k); + if isempty(W) + [CC.MD(:,:,k),CC.NN(:,:,k)] = covm(D(ix,:), 'E'); + else + [CC.MD(:,:,k),CC.NN(:,:,k)] = covm(D(ix,:), 'E', W(ix)); end - if isfield(MODE.hyperparameter,'gamma') - ecm0 = ecm0 + mean(diag(ecm0))*eye(size(ecm0))*MODE.hyperparameter.gamma; - end + end + ECM = CC.MD./CC.NN; + W = csp(ECM,'CSP3'); + %%% ### This is a hack ### + CC.FiltA = 50; + CC.FiltB = ones(CC.FiltA,1); + d = filtfilt(CC.FiltB,CC.FiltA,(D*W).^2); + CC.csp_w = W; + CC.CSP = train_sc(log(d),classlabel); - CC.weights(:,k) = ecm0\dM; - end; - %CC.weights = sparse(CC.weights); - - - elseif strcmpi(MODE.TYPE,'RDA'); - if isfield(MODE,'hyperparameter') - CC.hyperparameter = MODE.hyperparameter; - end; - % default values - if ~isfield(CC.hyperparameter,'gamma') - CC.hyperparameter.gamma = 0; - end; - if ~isfield(CC.hyperparameter,'lambda') - CC.hyperparameter.lambda = 1; - end; - else - ECM0 = sum(ECM,3); - nn = ECM0(1,1,1); % number of samples in training set for class k - XC = squeeze(ECM0(:,:,1))/nn; % normalize correlation matrix - M = XC(1,2:NC(2)); % mean - S = XC(2:NC(2),2:NC(2)) - M'*M;% covariance matrix + else % Linear and Quadratic statistical classifiers + CC.datatype = ['classifier:statistical:',lower(MODE.TYPE)]; + [classlabel,CC.Labels] = CL1M(classlabel); + CC.MD = repmat(NaN,[sz(2)+[1,1],length(CC.Labels)]); + CC.NN = CC.MD; + for k = 1:length(CC.Labels), + ix = classlabel==CC.Labels(k); + if isempty(W) + [CC.MD(:,:,k),CC.NN(:,:,k)] = covm(D(ix,:), 'E'); + else + [CC.MD(:,:,k),CC.NN(:,:,k)] = covm(D(ix,:), 'E', W(ix)); + end + end - try - [v,d]=eig(S); - U0 = v(diag(d)==0,:); - CC.iS2 = U0*U0'; - end; + ECM = CC.MD./CC.NN; + NC = size(CC.MD); + if strncmpi(MODE.TYPE,'LD',2) || strncmpi(MODE.TYPE,'FDA',3) || strncmpi(MODE.TYPE,'FLDA',3), - %M = M/nn; S=S/(nn-1); - ICOV0 = inv(S); - CC.iS0 = ICOV0; - % ICOV1 = zeros(size(S)); - for k = 1:NC(3), - %[M,sd,S,xc,N] = decovm(ECM{k}); %decompose ECM - %c = size(ECM,2); - nn = ECM(1,1,k);% number of samples in training set for class k - XC = squeeze(ECM(:,:,k))/nn;% normalize correlation matrix - M = XC(1,2:NC(2));% mean + %if NC(1)==2, NC(1)=1; end % linear two class problem needs only one discriminant + CC.weights = repmat(NaN,NC(2),NC(3)); % memory allocation + type = MODE.TYPE(3)-'0'; + + ECM0 = squeeze(sum(ECM,3)); %decompose ECM + for k = 1:NC(3); + ix = [1:k-1,k+1:NC(3)]; + dM = CC.MD(:,1,k)./CC.NN(:,1,k) - sum(CC.MD(:,1,ix),3)./sum(CC.NN(:,1,ix),3); + switch (type) + case 2 % LD2 + ecm0 = (sum(ECM(:,:,ix),3)/(NC(3)-1) + ECM(:,:,k)); + case 4 % LD4 + ecm0 = 2*(sum(ECM(:,:,ix),3) + ECM(:,:,k))/NC(3); + % ecm0 = sum(CC.MD,3)./sum(CC.NN,3); + case 5 % LD5 + ecm0 = ECM(:,:,k); + case 6 % LD6 + ecm0 = sum(CC.MD(:,:,ix),3)./sum(CC.NN(:,:,ix),3); + otherwise % LD3, LDA, FDA + ecm0 = ECM0; + end + if isfield(MODE.hyperparameter,'gamma') + ecm0 = ecm0 + mean(diag(ecm0))*eye(size(ecm0))*MODE.hyperparameter.gamma; + end + + CC.weights(:,k) = ecm0\dM; + + end + %CC.weights = sparse(CC.weights); + + elseif strc... [truncated message content] |