www.gusucode.com > nnet 案例源码 matlab代码程序 > nnet/ExtractFeaturesFromTrainedConvolutionalNeuralNetworkExample.m

    %% Extract Features from Trained Convolutional Neural Network  

%% 
% *NOTE:* Training a convolutional neural network requires Parallel
% Computing Toolbox(TM) and a CUDA(R)-enabled NVIDIA(R) GPU with compute capability
% 3.0 or higher.
%%
% Load the sample data. 
[XTrain,TTrain] = digitTrain4DArrayData; 

%%
% |digitTrain4DArrayData| loads the digit training set as 4-D array data.
% |XTrain| is a 28-by-28-by-1-by-4940 array, where 28 is the height and
% 28 is the width of the images. 1 is the number of channels and 4940 is
% the number of synthetic images of handwritten digits. |TTrain| is a categorical
% vector containing the labels for each observation.  

%% 
% Construct the convolutional neural network architecture. 
layers = [imageInputLayer([28 28 1]);
          convolution2dLayer(5,20);
          reluLayer();
          maxPooling2dLayer(2,'Stride',2);
          fullyConnectedLayer(10);
          softmaxLayer();
          classificationLayer()];  

%% 
% Set the options to default settings for the stochastic gradient descent
% with momentum. 
options = trainingOptions('sgdm');  

%% 
% Train the network. 
rng('default')
net = trainNetwork(XTrain,TTrain,layers,options);  

%% 
% Make predictions, but rather than taking the output from the last layer,
% specify the second ReLU layer (the sixth layer) as the output layer. 
trainFeatures = activations(net,XTrain,6); 

%%
% These predictions from an inner layer are known as _activations_ or
% _features_ . |activations| method, by default, uses a CUDA-enabled GPU with
% compute ccapability 3.0, when available. You can also choose to run
% activations on a CPU using the |'ExecutionEnvironment','cpu'| name-value
% pair argument.

%% 
% You can use the returned features to train a support vector machine using
% the Statistics and Machine Learning Toolbox(TM) function |<docid:stats_ug.bue3oc9
% fitcecoc>|. 
svm = fitcecoc(trainFeatures,TTrain);  

%% 
% Load the test data. 
[XTest,TTest]= digitTest4DArrayData;  

%% 
% Extract the features from the same ReLU layer (the sixth layer) for test
% data and use the returned features to train a support vector machine. 
testFeatures = activations(net,XTest,6);
testPredictions = predict(svm,testFeatures);  

%% 
% Plot the confusion matrix. 
% Convert the data into the format plotconfusion accepts
ttest = dummyvar(double(TTest))'; % dummyvar requires Statistics and Machine Learning Toolbox
tpredictions = dummyvar(double(testPredictions))';
plotconfusion(ttest,tpredictions);    

%%
% The overall accuracy for the test data using the trained network |net|
% is 99.4%.  

%% 
% Manually compute the overall accuracy. 
accuracy = sum(TTest == testPredictions)/numel(TTest)