www.gusucode.com > stats 源码程序 matlab案例代码 > stats/SpecifyCustomClassificationLossECOCExample.m

    %% Specify Custom Classification Loss
%%
% Load the NLP data set.  Transpose the predictor data.
load nlpdata
X = X';
%%
% For simplicity, use the label 'others' for all observations in |Y| that
% are not |'simulink'|, |'dsp'|, or |'comm'|.
Y(~(ismember(Y,{'simulink','dsp','comm'}))) = 'others';
%%
% Create a linear classification model template that specifies to solve the
% objective function using SpaRSA.
t = templateLinear('Solver','sparsa');
%%
% Cross-validate an ECOC model of linear classification models using 5-fold
% cross-validation.  Solve the objective function using SpaRSA. Specify
% that the predictor observations correspond to columns.
rng(1); % For reproducibility 
CVMdl = fitcecoc(X,Y,'Learners',t,'KFold',5,'ObservationsIn','columns');
CMdl1 = CVMdl.Trained{1}
%%
% |CVMdl| is a |ClassificationPartitionedLinearECOC| model. It contains
% the property |Trained|, which is a 5-by-1 cell array holding a
% |CompactClassificationECOC| models that the software trained using the
% training set of each fold.
%%
% Create a function that takes the minimal loss for each observation, and
% then averages the minimal losses across all observations.  Because the
% function does not use the class-identifier matrix (|C|), observation
% weights (|W|), and classification cost (|Cost|), use |~| to have
% |kfoldLoss| ignore its their positions.
lossfun = @(~,S,~,~)mean(min(-S,[],2));
%%
% Estimate the average cross-validated classification loss using the
% minimal loss per observation function.  Also, obtain the loss for each
% fold.
ce = kfoldLoss(CVMdl,'LossFun',lossfun)
ceFold = kfoldLoss(CVMdl,'LossFun',lossfun,'Mode','individual')