function run_demo() % RUN_DEMO Runs a demonstration on using Gaussian Process % Prior Models for data analysis and prediction on one-dimensional data. % The test data used is a simple sine function, with additive Gaussian % white noise. % % If you want to test out your own function, you may change the values at % lines 35 - 36, where M, is n-dimensional input explanatory variable and y % is the noisy outcome. It is important to keep the data size below 2,000 % in case your system may run out of memory. % % During the training (optimisation) process, you may want to select your % own starting initial. To insert your own starting initial, go to the % function at the bottom of this page and replace the % variables in the appropriate command line. To know more about, look up % the Optimisation Toolbox provided by MATLAB by typing: % % help fminunc.m % % % (C) Gaussian Process Toolbox 1.0 % (C) Copyright 2005-2007, Keith Neo % http://www.hamilton.ie clc; disp(' Thank you for using Gaussian Process Toolbox 1.0'); disp(' ================================================'); disp(' This script will take you through step by step on how to use the toobox'); disp(' '); input_pause; disp(' Step 1: Create a 1-Dimensional Test Data'); input_pause; %% Data created here, {M,y} %% M=0.1*(1:100)'; y=sin(M)+0.1*randn(size(M,1),1); %% Data created ends %% disp(' Input explanatory, M and noisy outcome, y created:'); disp(' Figure 1 shows the noisy data'); figure(1), plot(M,y,'x'); title('Noisy data'); xlabel('Time'); disp(' '); input_pause; disp(' ---------------------------'); disp(' Step 2: Train the Test Data'); input_pause; %% Define training variables %% disp(' Requirement here is to use Optimisation Toolbox provided by MATLAB'); disp(' '); disp(' You can choose to use "FMINCON" or "FMINUNC" to perform large-scale'); disp(' nonlinear optimisation method to train the data. However, in this'); disp(' example, I will only focus only on using unconstrained'); disp(' optimisation "FMINUNC" to train the model'); disp(' The following script can be used as the optimisation script:'); run_time='Y'; while (run_time=='Y') || (run_time=='y') disp(' 1) nolin_grad: optimisation with gradient information provided'); disp(' 2) nolin_grad_optag: optimisation with gradient information provided'); disp(' 3) nolin_hess: optimisation with hessian information provided'); disp(' 4) nolin_hess_optag: optimisation with hessian information provided'); disp(' '); err_run=1; while err_run==1 choice=input(' Select method for optimisation: '); disp(' '); if isempty(choice) disp(' You must enter a value'); disp(' '); elseif (choice==1) || (choice==2) || (choice==3) || (choice==4) disp(' You have selected choice:'); disp(choice); err_run=0; else disp(' You have input wrong command, please select again'); disp(' '); end end hyp=train_data(M,y,choice); err_run=1; while err_run==1 run_time=input(' Do you want to try again? (Y/N): ','s'); disp(' '); run_time=run_time(1); if (run_time=='N') || (run_time=='n') err_run=0; elseif (run_time=='Y') || (run_time=='y') err_run=0; else disp(' You have input wrong command, please select again'); disp(' '); err_run=1; end end end input_pause; %% Definition of training variables ends %% %% Define Posterior %% clc; disp(' --------------------------------'); disp(' Step 3: Predicting the posterior'); input_pause; disp(' Using adapted hyperparameter values obtained from maximising'); disp(' the log-likelihood function.'); disp(' The posterior are predicted as shown in Figure 1.'); N=0.1*(0:100)'; [pr,sd]=gp2_iden(hyp,M,y,N); figure(1),hold on; disp(' '); input_pause; disp(' Now here is the prediction in red'); disp(' '); plot(N,pr,'r'); input_pause; disp(' And the 2x confidence intervals'); disp(' '); plot(N,pr+2*sd,'m'); plot(N,pr-2*sd,'m'); hold off; input_pause; %% Posterior Predicted and Ends %% %% Predicting Derivative Observations %% disp(' --------------------------------'); disp(' Step 4: Predicting the derivative observation'); input_pause; disp(' Using adapted hyperparameter values obtained from maximising'); disp(' the log-likelihood function.'); disp(' The derivative posterior are predicted as shown in Figure 2.'); [prd,sdd]=gp2_iden_derv(hyp,M,y,N); figure(2),plot(N,prd);hold on; title('Derivative prediction with confidence intervals'); xlabel('Time'); disp(' '); input_pause; disp(' Here are the confidence intervals for the derivative observation'); disp(' '); plot(N,prd+2*sdd,'r'); plot(N,prd-2*sdd,'r'); hold off; input_pause; %% Derivative Prediction Ends %% disp(' ============================================================='); disp(' Thank you for using Gaussian Process Prior Models Toolbox 1.0'); disp(' (C) Copyright 2005 - 2006, Keith Neo'); disp(' Email: keith.neo@nuim.ie'); disp(' Website: http://keith.hostmatrix.org'); disp(' Institute: Hamilton Institute, '); disp(' ============================================================='); function input_pause() disp(' Press any key to continue...'); disp(' '); pause; function hyp=train_data(M,y,choice) D0=size(M,2); OPT=optimset('Display','iter','GradObj','on','LargeScale','on'); if (choice==3) || (choice==4) OPT=optimset(OPT,'Hessian','on'); end switch choice case {1} disp(' Using NOLIN_GRAD.M'); h=fminunc('nolin_grad',rand(3,1),OPT,M,y); hyp=exp(h'); a=hyp(1); g=hyp(2:D0+1); v=hyp(D0+2); case {2} disp(' Using NOLIN_GRAD_OPTAG.M'); h=fminunc('nolin_grad_optag',rand(2,1),OPT,M,y); g=exp(h(1:D0)'); v=exp(h(D0+1)); a=nolin_a(M,y,exp(h)); hyp=[a g v]; case {3} disp(' Using NOLIN_HESS.M'); h=fminunc('nolin_hess',rand(3,1),OPT,M,y); hyp=exp(h'); a=hyp(1); g=hyp(2:D0+1); v=hyp(D0+2); case {4} disp(' Using NOLIN_HESS_OPTAG.M'); h=fminunc('nolin_hess_optag',rand(2,1),OPT,M,y); g=exp(h(1:D0)'); v=exp(h(D0+1)); a=nolin_a(M,y,exp(h)); hyp=[a g v]; otherwise disp('Bug in Script'); end disp(' '); disp('The hyperparmeter values obtained are:') a g v