-
Notifications
You must be signed in to change notification settings - Fork 1
/
caltestda.m
321 lines (285 loc) · 9.97 KB
/
caltestda.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
function [objetos,Xcal,Xtest,ycal,ytest]=caltestda(X,y,ncal,alg,rep,method)
% algoritmo para selecionar amostras de calibração e teste para problemas
% de classificação.
% X : matriz de dados espectrais;
% y : vetor contendo as classes; (ordenado)
% ncal : percentagem de amostras de treinamento; default: 70
% alg : tipo de algoritmo a ser utilizado para repartição dos dados:
% kenston('k'); duplex('d') ou segmentado (s); default: 'k'
% rep : vetor indicando as réplicas espectrais; default: [] sem réplicas;
% method : método de pre-processamento dos dados antes da separação dos
% conjuntos de calibração e teste; default: 'none'.
%
% exemplos:
% [objetos,Xcal,Xtest,ycal,ytest]=caltestda(X,y,70,'k',rep,{'none'});
% [objetos,Xcal,Xtest,ycal,ytest]=caltestda(X,y,70,'k',rep);
% [objetos,Xcal,Xtest,ycal,ytest]=caltestda(X,y,70,'d',rep,);
% [objetos,Xcal,Xtest,ycal,ytest]=caltestda(X,y,70,'s',rep,{'none'});
% [objetos,Xcal,Xtest,ycal,ytest]=caltestda(X,y,70,'k');
%
% Paulo R. Filgueiras - 13/08/2014
%
if nargin<3, ncal=70; end
if nargin<4; alg='k'; end
if nargin<5; rep=[]; end
if nargin<6; method={'none'}; end
% pre-processamento dos espectros.
X2=pretrat(X,[],method);
% verificação das réplicas a partir do vetor rep
if isvector(rep)
y2.sample=[];y2.sample=[y2.sample;1];
y2.numero=[];
for ki=2:size(X2,1)
if sum(rep(ki,:)==rep(ki-1,:))==size(rep,2)
y2.numero=ki;
else
y2.sample=[y2.sample;ki];
end
end
aa2=y2.sample(2:end)-1;aa2=[aa2;y2.numero];
y2.sample=[y2.sample,aa2,aa2-y2.sample+1]; % vetor com as posições iniciais e finais para as réplicas.
clear aa2 ki
% Calculando a média espectral
y2.medioX=[];y2.medioy=[];y2.class=[];
for ki=1:size(y2.sample,1)
posicao_medio = y2.sample(ki,1):y2.sample(ki,2);
matriz_medio = mean(X2(posicao_medio,:));
y2.medioX=[y2.medioX;matriz_medio]; % matriz com os espectros médios.
y2.medioy=[y2.medioy;rep(y2.sample(ki,1))];
y2.class=[y2.class;y(y2.sample(ki),:)]; % vetor com as classes.
end
else
y2.medioX=X2; % matriz com os espectros médios.
y2.medioy=ones(size(X2,1),1);
y2.class=y; % vetor com as classes.
end
% determinando o número de amostras em cada classe a ser selecionada para
% treinamento
classes.classes=unique(y,'rows');
classes.amostras{1,1}='Classes';
classes.amostras{1,2}='Amostras';
classes.amostras{1,3}='Treinamento';
for ki=1:length(classes.classes)
amostras = find(y2.class==classes.classes(ki));
classes.amostras{ki+1,1}=classes.classes(ki);
classes.amostras{ki+1,2}=amostras;
classes.amostras{ki+1,3}=round(ncal*length(amostras)/100);
end
% Selecionando as amostras de treinamento pelo algoritmo
% alg : tipo de algoritmo a ser utilizado para separação dos dados:
classes.train=[];
for ki=1:length(classes.classes)
X3=y2.medioX(classes.amostras{ki+1,2},:);
y3=y2.medioy(classes.amostras{ki+1,2},:);
if strcmp(alg,'k')
classes.train{ki,1}=classes.amostras{ki+1,1};
classes.train{ki,2}=kenston(X3,classes.amostras{ki+1,3},1,0,y3); %não gera gráfico de saída
classes.train{ki,3}=setxor(1:length(y3),classes.train{ki,2});
classes.train{ki,2}=classes.amostras{ki+1,2}(classes.train{ki,2});
classes.train{ki,2}=sort(classes.train{ki,2}); %classes.train{ki,2}=classes.train{ki,2}';
classes.train{ki,3}=classes.amostras{ki+1,2}(classes.train{ki,3});
classes.train{ki,3}=sort(classes.train{ki,3});
elseif strcmp(alg,'d')
classes.train{ki,1}=classes.amostras{ki+1,1};
classes.train{ki,2}=duplex(X3,length(y3)-classes.amostras{ki+1,3});
classes.train{ki,3}=setxor(1:length(y3),classes.train{ki,2});
classes.train{ki,2}=classes.amostras{ki+1,2}(classes.train{ki,2});
classes.train{ki,2}=sort(classes.train{ki,2}); %classes.train{ki,2}=classes.train{ki,2}';
classes.train{ki,3}=classes.amostras{ki+1,2}(classes.train{ki,3});
classes.train{ki,3}=sort(classes.train{ki,3});
elseif strcmp(alg,'s')
classes.train{ki,1}=classes.amostras{ki+1,1};
aa2=repmat((1:ncal)',length(y3),1); aa2=aa2(1:length(y3));
classes.train{ki,2}=find(aa2~=1);
classes.train{ki,3}=find(aa2==1);
classes.train{ki,2}=classes.amostras{ki+1,2}(classes.train{ki,2});
classes.train{ki,2}=sort(classes.train{ki,2}); %classes.train{ki,2}=classes.train{ki,2}';
classes.train{ki,3}=classes.amostras{ki+1,2}(classes.train{ki,3});
classes.train{ki,3}=sort(classes.train{ki,3});
end
end
% Organizando as amostras de treinamento e teste.
classes.train2=[];
classes.teste2=[];
for ki=1:length(classes.classes)
classes.train2=[classes.train2;classes.train{ki,2}];
classes.teste2=[classes.teste2;classes.train{ki,3}];
end
classes.train2=sort(classes.train2);
classes.teste2=sort(classes.teste2);
% Separando as matrizes de treinamento e teste.
if isvector(rep)
objetos.treinamento=[];
for ki=1:length(classes.train2);
aa1=classes.train2(ki);
aa2=y2.sample(aa1,1):y2.sample(aa1,2);
objetos.treinamento=[objetos.treinamento,aa2];
end
objetos.treinamento=objetos.treinamento';
objetos.teste=setxor(1:size(X,1),objetos.treinamento);
else
objetos.treinamento=classes.train2;
objetos.teste=classes.teste2;
end
objetos.classes=classes;
objetos.classes2=y2;
Xcal=X(objetos.treinamento,:);
Xtest=X(objetos.teste,:);
ycal=y(objetos.treinamento,:);
ytest=y(objetos.teste,:);
function [objeto,xm,ym,xt,yt]=kenston(X,numero_pontos,mm,pll,Y)
% Maiores informações em: REF : R. W. Kennard and L. A. Stone
% Technometrics Vol. 11, No. 1, 1969
%
% INPUT:
% X= All Y matrix
% numero_pontos = Número de pontos a serem selecionados
% men: position of the first point selected
% (1 closest to mean; 0 furthest from mean)
% mm = ponto de partida, 1 proximo da média e 0 o ponto mais afastado
% da média
% pll = Verbose = 1 plot, 0 sem plot
% Y = All Y matrix
%Exemplo
% [object,xcal,ycal,xval,yval]=kenston(X,30,1,1,yy);
[n,m]=size(X);
t=X;
% Kennard and Stone method to select objects
% starting (1st) point is the closest (or furthest) from the centroid
meant=mean(t);
t1=t-ones(n,1)*meant;
for i=1:n
a(i)=t1(i,:)*t1(i,:)';
end %i
if mm==1,
[b,c]=min(a);
else
[b,c]=max(a);
end
objeto(1)=c;
clear a b c t1
t1=t-ones(n,1)*t(objeto(1),:);
for i=1:n
a(i)=t1(i,:)*t1(i,:)';
end %i
[b,c]=max(a);
objeto(2)=c;
clear a b c t1
for pi=3:numero_pontos
list=1:n;
tt=t;
k=length(objeto);
list(objeto)=[];
tt(objeto,:)=[];
nl=length(list);
for j=1:nl
for i=1:k
t1=tt(j,:)-t(objeto(i),:);
a(i)=t1*t1';
end % i
[b,c]=min(a);
dmin(j)=b;
end %j
[b,c]=max(dmin');
objeto(pi)=list(c);
clear dmin a b c list
end %pi
objeto=objeto(1:numero_pontos);
% plot
if pll==1
[a,b]=size(t);
if b>1
plot(t(:,1),t(:,2),'.')
for i=1:n
text(t(i,1),t(i,2),int2str(i))
end
hold on
plot(t(objeto,1),t(objeto,2),'r*')
hold off
xlabel('Variavel 1')
ylabel('Variavel 2')
end
if b==1
plot(t(:,1),t(:,1),'.')
for i=1:n
text(t(i,1),t(i,1),int2str(i))
end
hold on
plot(t(objeto,1),t(objeto,1),'r*')
hold off
xlabel('Variavel 1')
ylabel('Variavel 1')
end
end
xm=X(objeto,:);
ym=Y(objeto,:);
ind=[1:n]';
ind(objeto)=[];
xt=X(ind,:);
yt=Y(ind,:);
function [model,test]=duplex(X,k)
% -------------------------------------------------------------------------
% Function: [model,test]=duplex(X,k)
% -------------------------------------------------------------------------
% Aim:
% Subset selection with Duplex algorithm; uniform design of model and test
% sets
% -------------------------------------------------------------------------
% Input:
% X, matrix (n,p), predictor variables in columns
% k, number of objects to be selected to test set, (test set can contain
% at most 0.5n objects. If less objest are selected to test set, than k
% first objects of the model and test sets are designed uniformly and the
% remaining objects not selected by Duplex algorithm are included
% into model set)
% -------------------------------------------------------------------------
% Output:
% model, vector (k+(n-k),1), list of objects selected to model set
% test, vector (k,1), list of objects selected to test set (optionally)
% -----------------------------------------------------------------------
% Example:
% [model,test]=duplex(X,10)
% -----------------------------------------------------------------------
% Reference:
% R.D. Snee, Technometrics 19 (1977) 415-428
% Written by Michal Daszykowski
% Department of Chemometrics, Institute of Chemistry,
% The University of Silesia
% December 2004
[m,n]=size(X);
ma=floor(0.5*m);
if k>ma
k=ma;
end
x=[[1:size(X,1)]' X];
n=size(x,2);
% Fist two most distant points to model set
p=tril(fastdist(x(:,2:n),x(:,2:n)));
[i1 i2]=find(p==max(max(p)));
model=x([i1 i2],1);
x([i1 i2],:)=[];
% Another two most distant points to test set
p=tril(fastdist(x(:,2:n),x(:,2:n)));
[i1 i2]=find(p==max(max(p)));
test=x([i1 i2],1);
x([i1 i2],:)=[];
h=waitbar(0,'Please wait ...');
h=waitbar(0/k,h);
iter=2;
while length(model)<k
[ii,ww]=max(min(fastdist(x(:,2:n),X(model,:))));
model=[model;x(ww,1)];
x(ww,:)=[];
[ii,ww]=max(min(fastdist(x(:,2:n),X(test,:))));
test=[test;x(ww,1)];
x(ww,:)=[];
iter=iter+1;
h=waitbar(iter/k,h);
end
if ~isempty(x);
model=[model;x(:,1)];
end
close(h);
function D=fastdist(x,y)
% Calculated Euclideam distances between two sets of objetcs
D=((sum(y'.^2))'*ones(1,size(x,1)))+(ones(size(y,1),1)*(sum(x'.^2)))-2*(y*x');