1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
|
use PDL;
use PDL::Opt::Simplex;
#
# Simplex Demo - Alison Offer (aro@aaocbn.aao.gov.au)
#
#
# this is some sort of convergence criterion
$minsize = 1.e-6;
# max number of iterations ?
$maxiter = 100;
#
# number of evaluation
$count = 0;
print " \n
1: least squares gaussian fit to data + noise \n
32 *exp (-((x-10)/6)^2) + noise
2: minimise polynomial \n
(x-3)^2 + 2.*(x-3)*(y-2.5) + 3.*(y-2.5)^2 \n
Please make your choice (1 or 2):";
chop($choice = <>);
if ($choice == 1) {
print "Please enter noise factor (small number, 0-6):";
chop($factor = <>);
#
# data : gaussian + noise
#
foreach $j (0..19) {
$data[$j] = 32*exp(-(($j-10)/6)**2) +
$factor * (rand() - 0.5);
}
#
# initial guess - $initsize controls the size of the initial simplex (I think)
#
$init = pdl [ 33, 9, 12 ];
$initsize = 2;
#
#
($optimum,$ssize,$optval) = simplex($init,$initsize,$minsize,$maxiter,
# this sub returns the function to be minimised.
sub {my ($xv) =@_;
my $a = $xv->slice("(0)");
my $b = $xv->slice("(1)");
my $c = $xv->slice("(2)");
$count += $a->dim(0);
my $sum = $a * 0.0;
foreach $j (0..19) {
$sum += ($data[$j] -
$a*exp(-(($j-$b)/$c)**2))**2;
}
return $sum;
});
} else {
$init = pdl [ 2 , 2 ];
$initsize = 2;
($optimum,$ssize,$optval) = simplex($init,$initsize,$minsize,$maxiter,
sub {my ($xv) =@_;
my $x = $xv->slice("(0)");
my $y = $xv->slice("(1)");
$count += $x->dim(0);
return ($x-3)**2 + 2.*($x-3)*($y-2.5)
+ 3.*($y-2.5)**2;
});
}
print "N_EVAL = $count\n";
print "OPTIMUM = $optimum \n";
print "SSIZE = $ssize\n";
print "OPTVAL = $optval\n";
|