www.gusucode.com > stats 源码程序 matlab案例代码 > stats/IncludeTiesInANearestNeighborsSearchExample.m

    %% Include Ties in Nearest Neighbors Search
%%
% Load Fisher's iris data set.

% Copyright 2015 The MathWorks, Inc.

load fisheriris
%%
% Remove five irises randomly from the predictor data to use as a query set. 
rng(4);                     % For reproducibility
n = size(meas,1);           % Sample size
qIdx = randsample(n,5);     % Indices of query data
X = meas(~ismember(1:n,qIdx),:);
Y = meas(qIdx,:);
%%
% Grow a four-dimensional _K_ d-tree using the training data.  Specify to use the Minkowski
% distance for finding nearest neighbors later.
Mdl = KDTreeSearcher(X);
%%
% |Mdl| is a |KDTreeSearcher| model object.  By default, the distance metric for
% finding nearest neighbors is the Euclidean metric.
%%
% Find the indices of the training data (|X|) that are the seven nearest
% neighbors of each point in the query data (|Y|).
[Idx,D] = knnsearch(Mdl,Y,'K',7,'IncludeTies',true);
%%
% |Idx| and |D| are five-element cell arrays of vectors, with each vector
% having at least seven elements.
%%
% Display the lengths of the vectors in |Idx|.
cellfun('length',Idx)
%%
% Because cell |1| contains a vector with length greater than _k_ = 7, query
% observation 1 (|Y(1,:)|) is equally close to at least two 
% observations in |X|.
%%
% Display the indices of the nearest neighbors to |Y(1,:)| and their
% distances.
nn5 = Idx{1}
nn5d = D{1}
%%
% Training observations |88| and |95| are 0.3873 cm away from query
% observation |1|.