// Copyright (C) 2010 - 2011 - DIGITEO - Michael Baudin // // This file must be used under the terms of the CeCILL. // This source file is licensed as described in the file COPYING, which // you should have received as part of this distribution. The terms // are also available at // http://www.cecill.info/licences/Licence_CeCILL_V2-en.txt // // This is a demonstration of selected Scilab optimization features. // // Summary: // 1. Non linear optimization with optim // 2. Combining optim with derivative // 3. Linear optimization with linpro // 4. Non linear least squares with lsqrsolve // 5. Genetic algorithms with optim_ga // 6. Numerical derivatives with derivative /////////////////////////////////////////////////////////// // // 1. Non linear optimization // // // Reference // H. H. Rosenbrock. An automatic method for finding the greatest or least value // of a function. The Computer Journal, 3(3):175{184, March 1960. function [f, g, ind]=rosenbrock(x, ind) f = 100*(x(2)-x(1)^2)^2 + (1-x(1))^2 g(1) = - 400*(x(2)-x(1)^2)*x(1) - 2*(1-x(1)) g(2) = 200*(x(2)-x(1)^2) endfunction x0 = [-1.2 1.0]; [ fopt , xopt ] = optim ( rosenbrock , x0 ) // With plots function [f, g, ind]=rosenbrockP(x, ind) [f, g, ind]=rosenbrock(x, ind) plot(x(1),x(2),"go") endfunction // Vectorized for contouring. function f=rosenbrockV(x1, x2) f = 100 *(x2-x1.^2).^2 + (1-x1).^2 endfunction x1 = linspace ( -2 , 2 , 100 ); x2 = linspace ( -1 , 2 , 100 ); [XX1,XX2]=meshgrid(x1,x2); Z = rosenbrockV ( XX1 , XX2 ); contour ( x1 , x2 , Z' , [1 10 100 500 1000] ); xtitle("Contours of the Rosenbrock function","X1","X2"); x0 = [-1.2 1.0]; [ fopt , xopt ] = optim ( rosenbrockP , x0 ); /////////////////////////////////////////////////////////// // // 2. Combining optim with derivative // function f=rosenbrockF(x) f = 100*(x(2)-x(1)^2)^2 + (1-x(1))^2 endfunction function [f, g, H] = rosenbrockFGH ( x ) A = x(2) - x(1)^2 B = 1-x(1) fm(1) = 10*A fm(2) = B f = sum(fm.^2) g(1) = - 400*A*x(1) - 2*B g(2) = 200 * A H(1,1) = 1200*x(1)^2 - 400*x(2) + 2 H(1,2) = -400*x(1) H(2,1) = H(1,2) H(2,2) = 200 endfunction x0 = [-1.2 1.0]; [f0, g0, H0] = rosenbrockFGH ( x0 ) [gFD, HFD] = derivative(rosenbrockF, x0', H_form="blockmat") (g0-gFD')./g0 (H0-HFD)./H0 function [f, g, ind]=rosenbrockCost2(x, ind) f = rosenbrockF(x) g = derivative(rosenbrockF, x', order=4) endfunction x0 = [-1.2 1.0]; [fopt, xopt]=optim(rosenbrockCost2, x0 ) [gopt, Hopt] = derivative(rosenbrockF, xopt', H_form="blockmat") cond(Hopt) /////////////////////////////////////////////////////////// // // 3. Linear optimization // // Reference // "Operations Research: applications and algorithms", Wayne L. Winstons, // Section 5.2, "The Computer and Sensitivity Analysis", in the // "Degeneracy and Sensitivity Analysis" subsection. // // Min -6*x1 - 4*x2 - 3*x3 - 2*x4 // such that: // 2*x1 + 3*x2 + x3 + 2* x4 <= 400 // x1 + x2 + 2*x3 + x4 <= 150 // 2*x1 + x2 + x3 + 0.5*x4 <= 200 // 3*x1 + x2 + x4 <= 250; // x >= 0; c = [-6 -4 -3 -2]'; A = [ 2 3 1 2 1 1 2 1 2 1 1 0.5 3 1 0 1 ]; b = [400 150 200 250]'; lb=[0 0 0 0]'; ub=[%inf %inf %inf %inf]'; // With karmarkar [xopt,fopt,exitflag,iter,lagr] = karmarkar([],[],c,[],[],[],[],[],A,b,lb,ub) // With ATOMS/quapro // Make sure that quapro is installed. qualoaded = atomsIsLoaded("quapro"); if ( ~qualoaded ) then atomsInstall("quapro"); atomsLoad("quapro"); end [xopt,lagr,fopt]=linpro(c,A,b,lb,ub) /////////////////////////////////////////////////////////// // // 4. Non linear least squares with lsqrsolve // function dy = myModel(t, y, a, b) // The right-hand side of the // Ordinary Differential Equation. dy(1) = -a*y(2) + y(1) + t^2 + 6*t + b dy(2) = b*y(1) - a*y(2) + 4*t + (a+b)*(1-t^2) endfunction function f = myDifferences(x, t, y_exp) // Returns the difference between the // simulated differential // equation and the experimental data. a = x(1) b = x(2) y0 = y_exp(1,:) t0 = 0 y_calc=ode(y0',t0,t,list(myModel,a,b)) diffmat = y_calc' - y_exp // Maxe a column vector f = diffmat(:) endfunction function f = myDiffLsqrsolve ( x, m, t, y_exp ) f = myDifferences ( x, t, y_exp ) endfunction // // 1. Experimental data t = [0 1 2 3 4 5 6]'; y_exp(:,1) = [-1 2 11 26 47 74 107]'; y_exp(:,2) = [ 1 3 09 19 33 51 73]'; // // 2. Optimize x0 = [0.1;0.4]; y0 = myDifferences ( x0 , t, y_exp ); m = size(y0,"*"); [xopt,diffopt]=lsqrsolve(x0,list(myDiffLsqrsolve,t, y_exp),m) // Comparing before/after optimization. function plotmyODE(a,b,t0,y0,t,y_exp,mytitle) y_calc=ode(y0',t0,t,list(myModel,a,b)); subplot(2,1,1); plot(t,y_calc(1,:),"bo-"); plot(t,y_exp(:,1),"r*"); legend(["ODE","Data"]); xtitle(mytitle,"t","Y1"); subplot(2,1,2); plot(t,y_calc(2,:),"bo-"); plot(t,y_exp(:,2),"r*"); legend(["ODE","Data"]); xtitle(mytitle,"t","Y2"); // Put transparent marks h = gcf(); for i = 1 : size(h.children,"*") for j = 2 : 3 h.children(i).children(j).children.mark_background=0; end end endfunction // Before optimization a=x0(1); b=x0(2); y0 = y_exp(1,:); t0 = 0; mytitle = "Before Optimization"; scf(); plotmyODE(a,b,t0,y0,t,y_exp,mytitle); // After optimization a=xopt(1); b=xopt(2); y0 = y_exp(1,:); t0 = 0; mytitle = "After Optimization"; scf(); plotmyODE(a,b,t0,y0,t,y_exp,mytitle); /////////////////////////////////////////////////////////// // // 5. Genetic algorithms // function y = rastriginV ( x1 , x2 ) // Vectorized function for contouring. y = x1.^2 + x2.^2-cos(12*x1)-cos(18*x2) endfunction function y = rastrigin ( x ) // Non-vectorized function for optimization. y = rastriginV ( x(1) , x(2) ) endfunction function y= rastriginBinary ( x ) BinLen = 8 lb = [-1 -1]'; ub = [1 1]'; tmp = convert_to_float(x, BinLen, ub, lb) y = rastrigin (tmp) endfunction PopSize = 100; Proba_cross = 0.7; Proba_mut = 0.1; NbGen = 10; Log = %T; ga_params = init_param(); ga_params = add_param(ga_params,"minbound",[-1 -1]'); ga_params = add_param(ga_params,"maxbound",[1 1]'); ga_params = add_param(ga_params,"dimension",2); ga_params = add_param(ga_params,"binary_length",8); ga_params = add_param(ga_params,"crossover_func",crossover_ga_binary); ga_params = add_param(ga_params,"mutation_func",mutation_ga_binary); ga_params = add_param(ga_params,"codage_func",coding_ga_binary); ga_params = add_param(ga_params,"multi_cross",%T); [pop_opt,fobj_pop_opt, pop_init , fobj_init] = optim_ga(rastriginBinary, PopSize, .. NbGen, Proba_mut, Proba_cross, Log,ga_params); // 3. Compute contouring values. x1 = linspace(-1,1,100); x2 = linspace(-1,1,100); [XX1,XX2]=meshgrid(x1,x2); Z = rastriginV ( XX1 , XX2 ); // 4. Plot the results. scf(); plot3d(x1,x2,Z'); xtitle('Ratrigin Function - 3D Plot','x1','x2'); scf(); drawlater; // Plot initial population subplot(2,1,1); xset("fpf"," "); contour ( x1 , x2 , Z' , 5 ); xtitle("Initial Population","X1","X2"); for i=1:length(pop_init) plot(pop_init(i)(1),pop_init(i)(2),"r."); end // Plot optimum population subplot(2,1,2); xset("fpf"," "); contour ( x1 , x2 , Z' , 5 ); xtitle("Optimum Population","X1","X2"); for i=1:length(pop_opt) plot(pop_opt(i)(1),pop_opt(i)(2),"g."); end drawnow; ////////////////////////////////////////////////////////////// // 6. Plot patterns of 1st order F.D. for order 1, 2, 4 // Plot the evaluation points for Finite Differences. function f = rosenPlot(x) [f, g, ind]=rosenbrock(x, 1) plot(x(1)-1,x(2)-1,"bo") endfunction x = [1;1]; // // Gradient h = scf(); g = derivative(rosenPlot, x, order=1); xtitle("Gradient - Evaluation points - Order 1","X1-1","X2-1"); h.children.axes_visible=["off" "off","off"]; h.children.data_bounds(1,:) = -h.children.data_bounds(2,:); h.children.data_bounds = 1.1*h.children.data_bounds; // h = scf(); g = derivative(rosenPlot, x, order=2); xtitle("Gradient - Evaluation points - Order 2","X1-1","X2-1"); h.children.axes_visible=["off" "off","off"]; h.children.data_bounds = 1.1*h.children.data_bounds; // h = scf(); g = derivative(rosenPlot, x, order=4); xtitle("Gradient - Evaluation points - Order 4","X1-1","X2-1"); h.children.axes_visible=["off" "off","off"]; h.children.data_bounds = 1.1*h.children.data_bounds;