%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% REGRESION representa las curvas de regresion de diferentes ordenes a %%% partir de un conjunto de datos. %%% %%% AUTOR: Jesus Cid Sueiro. %%% VERSION: 2.1 (01/03/2010) %%% CHANGES: wrt v1.0: visualiza las curvas de regresi?n progresivamente, %%% cada vez que se a?ade una observacion. %%% wrt v1.1: Splits observations in training and test %%% Starts commenting code in English. %%% Changes inverse computation by the '\' matlab %%% operator, which is much more robust. %%% Removes sample-by-sample regression and %%% visualization. %%% wrt v2.0: Includes train and test data labels in legend plot. %%% Asignatura: Teoría Moderna de la Detección y la Estimación %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% Borra todo clear all; close all; format compact %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% Parametros ajustables N = 10; %%% No. of training samples (and same amount for testing) orden = [1 2 N-1]; %%% Models to visualize %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% Inicializa figura figure tipolinea = 'b- b--b: b-.r- r--r: r-.g- g--g: g-.'; %%% Inicializa vectores de observaciones x = zeros(2*N,1); s = zeros(2*N,1); %%% Determina y representa las observaciones for i=1:2*N subplot(121) axis([0 1 0 2]); [x(i,1),s(i,1)] = ginput(1); % Selecciona observacion con el raton plot(x,s,'+'); % Pinta todas las observaciones end axis([0 1 0 2]); drawnow %%% Calcula matriz de features extendidas con potencias de hasta orden 'OrdenMax'. OrdenMax = N-1; %%% Maximum model order Xe = repmat(x,1,OrdenMax+1).^repmat(0:OrdenMax,2*N,1); %%% Split for training and test. ind = randperm(2*N); iTrain = ind(1:N); iTest = ind(N+1:2*N); xTrain = x(iTrain); sTrain = s(iTrain); XeTrain = Xe(iTrain,:); xTest = x(iTest); sTest = s(iTest); XeTest = Xe(iTest,:); %%% Plot training and test points subplot(121) axis([0 1 0 2]); hplot(1) = plot(xTrain,sTrain,'+b'); hold on; hplot(2) = plot(xTest,sTest,'*r'); leyenda{1} = 'Datos entren.'; leyenda{2} = 'Datos test'; % Compute feature vector from a uniform grid of the input line. xgrid = (0:0.01:1)'; xegrid = repmat(xgrid,1,OrdenMax+1).^repmat(0:OrdenMax,length(xgrid),1); %%% Determina las curvas de regresion eTrain = zeros(OrdenMax,1); eTest = zeros(OrdenMax,1); for j=1:N-1; % Por cada valor de j, una curva de % regresion de orden j Xej = XeTrain(:,1:j+1); % Selecciona las variables hasta orden j we = Xej\sTrain; % Calcula los coeficientes % Calcula errores cuadraticos medios yTrain = Xej*we; eTrain(j)= sum((sTrain-yTrain).^2); yTest = XeTest(:,1:j+1)*we; eTest(j) = sum((sTest-yTest).^2); % Compute estimates from the grid samples ygrid = we'*xegrid(:,1:j+1)'; % Representa graficamente las curvas de regresion [tf,loc] = ismember(j,orden); if (loc>0), %||((loc==0)&&(j==N-1)), if loc==0, loc = loc_old + 1; else loc_old = loc; end subplot(121); hold on hplot(loc+2) = plot(xgrid,ygrid,tipolinea(3*(loc-1)+1:3*loc)); title('Curvas de regresi?n'); xlabel('x'); ylabel('s') leyenda{loc+2} = ['Grado ' num2str(j)]; end end %%% Escribe la leyenda, y la grafica de errores subplot(121) axis([0 1 0 2]); legend(hplot,leyenda) drawnow % Dibuja la curva de errores subplot(122) iOrden = 1:OrdenMax; stem(iOrden,eTest,'--r','filled'); axis([0 OrdenMax 0 1.5*max(eTrain(1),eTest(1))]); hold on stem(iOrden,eTrain,'filled'); title('Errores cuadraticos de entrenamiento y test'); xlabel('Grado del polinomio de regresion') ylabel('Suma de errores cuadraticos') legend('Errores de test','Errores de entrenamiento'); drawnow hold off