function [xs, ys, yp, ysig, nb]=EGOPRS(xs,ys,xp,nmax,range,p) % INPUTS (xs, ys): initial sample pairs % xp: np x NDV prediction points % nmax: maximum number of samples % range: range of design domain % p: order or PRS % OUTPUTS (xs, ys): samples pairs included added samples % yp: np x 1 mean predictions % ysig: np x 1 prediction standard deviations % nb: # of coefficients in trend function % [ny,NDV]=size(xs);np=size(xp,1); % Size of samples meanx=mean(xs);stdx=std(xs);xs=(xs-meanx)./stdx;% Scaling sample locations meany=mean(ys); stdy=std(ys);ys=(ys-meany)/stdy; % Scaling samples xp=(xp-meanx)./stdx; % Scaling prediction points range=(range-meanx)./stdx; % Scaling design space nb=factorial(NDV+p)/(factorial(NDV)*factorial(p)); % No. of coefficients % fprintf(" ny yPBS maxEI xNew yNew\n") % Table legend %fprintf(" ny yPBS maxPI xNew yNew\n") % Table legend for loop=ny:nmax % EGO loop X=basis(xs,NDV,p); % Design matrix b=(X'*X)\(X'*ys); % Fitting PRS % % Optimization using EI to find a new sample location par=struct('b',b,'nb',nb,'p',p,'sigma2',sum((ys-X*b).^2)/(ny-nb),... 'R',inv(X'*X),'yPBS',min(ys)); % Structure of database options=optimset('Display','off','TolFun',1e-6,'DiffMinChange',0.001); xNew=ga(@(x) exImp(x,par),NDV,[],[],[],[],range(1,:),range(2,:),[],options); %xNew=ga(@(x) prImp(x,par),NDV,[],[],[],[],range(1,:),range(2,:),[],options); % Stopping criteria if (loop==nmax||max(min(abs(xNew-xs)))<1E-3||exImp(xNew,par)>-1E-3) %if (loop==nmax||max(min(abs(xNew-xs)))<1E-4||prImp(xNew,par)>-1E-3) fprintf('%4d %8.3g\n',loop,par.yPBS.*stdy+meany); break; end % fprintf('%4d %8.3g %8.3g',loop,par.yPBS.*stdy+meany,-exImp(xNew,par)); %fprintf('%4d %8.3g %8.3g',loop,par.yPBS.*stdy+meany,-prImp(xNew,par)); xs=[xs;xNew]; ny=ny+1; % Adding a new sample yNew=(EGOfn(xNew.*stdx+meanx)-meany)/stdy; ys=[ys;yNew]; fprintf(' %8.3g',xNew.*stdx+meanx);fprintf(' %8.3g\n',yNew*stdy+meany); end xi=basis(xp,NDV,p); % Basis vector at prediction points yp=(xi*b)*stdy+meany; % PRS predictions ysig=zeros(np,1); % Prediction STD for i=1:np, ysig(i)=sqrt(par.sigma2.*xi(i,:)*par.R*xi(i,:)')*stdy; end xs=xs.*stdx+meanx;ys=ys*stdy+meany; % Denormalizing samples end % function PI=prImp(x,par) xi = basis(x,size(x,2),par.p); % Basis vector yhat=xi*par.b; % PRS predictions ytarget=par.yPBS-0.25*abs(par.yPBS); % Target value ysig=sqrt(par.sigma2*xi*par.R*xi'); % Prediction STD u=(ytarget-yhat)./ysig; % Normalized target value PI=-cdf('Normal',u,0,1); % Probability of improvement end % function EI=exImp(x,par) xi = basis(x,size(x,2),par.p); % Basis vector yhat=xi*par.b; % PRS predictions ysig=sqrt(par.sigma2*xi*par.R*xi'); % Prediction STD u = (par.yPBS-yhat)./ysig; % Normalized PBS EI = -(par.yPBS-yhat).*normcdf(u,0,1)-ysig.*normpdf(u,0,1); % Negative EI end % function X=basis(x, n, p) % Calculating polynomial basis functions with n variables and p orders ny=size(x,1); X(:,1) = ones(ny,1); if p > 0 X=[X x]; X1=x; n_loc=1:n; n_loc1=1; for i=2:p nc=size(X1,2); X2=[]; ctr=1; for k=1:n l_ctr=0; for j=n_loc(k):nc X2(:,ctr)=x(:,k).*X1(:,j); ctr=ctr+1; l_ctr=l_ctr+1; end n_loc1(k+1)=l_ctr+n_loc1(k); end X=[X X2]; X1=X2; n_loc=n_loc1; end end end