Changeset 419
- Timestamp:
- 08/25/10 13:40:01 (6 years ago)
- Location:
- branches/alta/mystic-0.2a1
- Files:
-
- 1 added
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/alta/mystic-0.2a1/Make.mm
r418 r419 36 36 forward_model.py \ 37 37 helputil.py \ 38 linesearch.py \ 38 39 mystic_math.py \ 39 40 nested.py \ -
branches/alta/mystic-0.2a1/_scipy060optimize.py
r418 r419 6 6 # guarantee implied provided you keep this notice in all copies. 7 7 # *****END NOTICE************ 8 9 # Minimization routines10 # (removed: fmin_bfgs, fmin_cg)11 8 """local copy of scipy.optimize""" 12 9 13 #__all__ = ['fmin', 'fmin_powell', 'fmin_ncg', 10 #__all__ = ['fmin', 'fmin_powell', 'fmin_ncg', 'fmin_cg', 'fmin_bfgs', 14 11 # 'fminbound','brent', 'golden','bracket','rosen','rosen_der', 15 12 # 'rosen_hess', 'rosen_hess_prod', 'brute', 'approx_fprime', … … 19 16 from numpy import atleast_1d, eye, mgrid, argmin, zeros, shape, empty, \ 20 17 squeeze, isscalar, vectorize, asarray, absolute, sqrt, Inf, asfarray, isinf 21 #import linesearch18 import linesearch 22 19 23 20 # These have been copied from Numeric's MLab.py … … 622 619 return (f2 - f1)/epsilon 623 620 624 '''625 621 def fmin_bfgs(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf, 626 622 epsilon=_epsilon, maxiter=None, full_output=0, disp=1, … … 808 804 809 805 return retlist 810 ''' 811 812 ''' 806 813 807 def fmin_cg(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf, epsilon=_epsilon, 814 808 maxiter=None, full_output=0, disp=1, retall=0, callback=None): … … 979 973 980 974 return retlist 981 '''982 975 983 976 def fmin_ncg(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5, … … 2042 2035 algor.append('Powell Direction Set Method.') 2043 2036 2044 # print 2045 # print "Nonlinear CG" 2046 # print "============" 2047 # start = time.time() 2048 # x = fmin_cg(rosen, x0, fprime=rosen_der, maxiter=200) 2049 # print x 2050 # times.append(time.time() - start) 2051 # algor.append('Nonlinear CG \t') 2052 2053 # print 2054 # print "BFGS Quasi-Newton" 2055 # print "=================" 2056 # start = time.time() 2057 # x = fmin_bfgs(rosen, x0, fprime=rosen_der, maxiter=80) 2058 # print x 2059 # times.append(time.time() - start) 2060 # algor.append('BFGS Quasi-Newton\t') 2061 2062 # print 2063 # print "BFGS approximate gradient" 2064 # print "=========================" 2065 # start = time.time() 2066 # x = fmin_bfgs(rosen, x0, gtol=1e-4, maxiter=100) 2067 # print x 2068 # times.append(time.time() - start) 2069 # algor.append('BFGS without gradient\t') 2070 2037 print 2038 print "Nonlinear CG" 2039 print "============" 2040 start = time.time() 2041 x = fmin_cg(rosen, x0, fprime=rosen_der, maxiter=200) 2042 print x 2043 times.append(time.time() - start) 2044 algor.append('Nonlinear CG \t') 2045 2046 print 2047 print "BFGS Quasi-Newton" 2048 print "=================" 2049 start = time.time() 2050 x = fmin_bfgs(rosen, x0, fprime=rosen_der, maxiter=80) 2051 print x 2052 times.append(time.time() - start) 2053 algor.append('BFGS Quasi-Newton\t') 2054 2055 print 2056 print "BFGS approximate gradient" 2057 print "=========================" 2058 start = time.time() 2059 x = fmin_bfgs(rosen, x0, gtol=1e-4, maxiter=100) 2060 print x 2061 times.append(time.time() - start) 2062 algor.append('BFGS without gradient\t') 2071 2063 2072 2064 print … … 2079 2071 algor.append('Newton-CG with hessian product') 2080 2072 2081 2082 2073 print 2083 2074 print "Newton-CG with full Hessian" -
branches/alta/mystic-0.2a1/_scipyoptimize.py
r418 r419 1 1 #!/usr/bin/env python 2 #__docformat__ = "restructuredtext en" 3 # ******NOTICE*************** 4 # optimize.py module by Travis E. Oliphant 5 # 6 # You may copy and use this module as you see fit with no 7 # guarantee implied provided you keep this notice in all copies. 8 # *****END NOTICE************ 9 """modified local copy of scipy.optimize""" 10 11 #__all__ = ['fmin', 'fmin_powell', 'fmin_ncg', # 'fmin_cg', 'fmin_bfgs', 2 """modified algorithms from local copy of scipy.optimize""" 3 4 #__all__ = ['fmin', 'fmin_powell', 'fmin_ncg', 'fmin_cg', 'fmin_bfgs', 12 5 # 'fminbound','brent', 'golden','bracket','rosen','rosen_der', 13 6 # 'rosen_hess', 'rosen_hess_prod', 'brute', 'approx_fprime', … … 239 232 return grad 240 233 241 def fmin_cg(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf, epsilon=_epsilon,242 maxiter=None, full_output=0, disp=1, retall=0, callback=None):243 """Minimize a function with nonlinear conjugate gradient algorithm.244 245 :Parameters:246 247 f -- the Python function or method to be minimized.248 x0 : ndarray -- the initial guess for the minimizer.249 250 fprime -- a function to compute the gradient of f.251 args -- extra arguments to f and fprime.252 gtol : number253 stop when norm of gradient is less than gtol254 norm : number255 order of vector norm to use256 epsilon :number257 if fprime is approximated use this value for258 the step size (can be scalar or vector)259 callback -- an optional user-supplied function to call after each260 iteration. It is called as callback(xk), where xk is the261 current parameter vector.262 263 :Returns: (xopt, {fopt, func_calls, grad_calls, warnflag}, {allvecs})264 265 xopt : ndarray266 the minimizer of f.267 fopt :number268 the value of f(xopt).269 func_calls : number270 the number of function_calls.271 grad_calls : number272 the number of gradient calls.273 warnflag :number274 an integer warning flag:275 1 : 'Maximum number of iterations exceeded.'276 2 : 'Gradient and/or function calls not changing'277 allvecs : ndarray278 if retall then this vector of the iterates is returned279 280 :OtherParameters:281 282 maxiter :number283 the maximum number of iterations.284 full_output : number285 if non-zero then return fopt, func_calls, grad_calls,286 and warnflag in addition to xopt.287 disp : number288 print convergence message if non-zero.289 retall : number290 return a list of results at each iteration if True291 292 :SeeAlso:293 294 fmin, fmin_powell, fmin_cg,295 fmin_bfgs, fmin_ncg -- multivariate local optimizers296 leastsq -- nonlinear least squares minimizer297 298 fmin_l_bfgs_b, fmin_tnc,299 fmin_cobyla -- constrained multivariate optimizers300 301 anneal, brute -- global optimizers302 303 fminbound, brent, golden, bracket -- local scalar minimizers304 305 fsolve -- n-dimenstional root-finding306 307 brentq, brenth, ridder, bisect, newton -- one-dimensional root-finding308 309 fixed_point -- scalar fixed-point finder310 311 Notes312 ---------------------------------------------313 314 Optimize the function, f, whose gradient is given by fprime using the315 nonlinear conjugate gradient algorithm of Polak and Ribiere316 See Wright, and Nocedal 'Numerical Optimization', 1999, pg. 120-122.317 """318 x0 = asarray(x0).flatten()319 if maxiter is None:320 maxiter = len(x0)*200321 func_calls, f = wrap_function(f, args)322 if fprime is None:323 grad_calls, myfprime = wrap_function(approx_fprime, (f, epsilon))324 else:325 grad_calls, myfprime = wrap_function(fprime, args)326 gfk = myfprime(x0)327 k = 0328 N = len(x0)329 xk = x0330 old_fval = f(xk)331 old_old_fval = old_fval + 5000332 333 if retall:334 allvecs = [xk]335 sk = [2*gtol]336 warnflag = 0337 pk = -gfk338 gnorm = vecnorm(gfk,ord=norm)339 while (gnorm > gtol) and (k < maxiter):340 deltak = numpy.dot(gfk,gfk)341 342 # These values are modified by the line search, even if it fails343 old_fval_backup = old_fval344 old_old_fval_backup = old_old_fval345 346 # NOTE: w/o scipy.optimize.linesearch, results may be noticeably worse.347 try: #XXX: break dependency on scipy.optimize.linesearch348 import scipy.optimize349 alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \350 scipy.optimize.linesearch.line_search(f,myfprime,xk,pk,gfk,\351 old_fval,old_old_fval,c2=0.4)352 except ImportError:353 alpha_k = None354 355 if alpha_k is None: # line search failed -- use different one.356 alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \357 line_search(f,myfprime,xk,pk,gfk,358 old_fval_backup,old_old_fval_backup)359 if alpha_k is None or alpha_k == 0:360 # This line search also failed to find a better solution.361 warnflag = 2362 break363 xk = xk + alpha_k*pk364 if retall:365 allvecs.append(xk)366 if gfkp1 is None:367 gfkp1 = myfprime(xk)368 yk = gfkp1 - gfk369 beta_k = pymax(0,numpy.dot(yk,gfkp1)/deltak)370 pk = -gfkp1 + beta_k * pk371 gfk = gfkp1372 gnorm = vecnorm(gfk,ord=norm)373 if callback is not None:374 callback(xk)375 k += 1376 377 378 if disp or full_output:379 fval = old_fval380 if warnflag == 2:381 if disp:382 print "Warning: Desired error not necessarily achieved due to precision loss"383 print " Current function value: %f" % fval384 print " Iterations: %d" % k385 print " Function evaluations: %d" % func_calls[0]386 print " Gradient evaluations: %d" % grad_calls[0]387 388 elif k >= maxiter:389 warnflag = 1390 if disp:391 print "Warning: Maximum number of iterations has been exceeded"392 print " Current function value: %f" % fval393 print " Iterations: %d" % k394 print " Function evaluations: %d" % func_calls[0]395 print " Gradient evaluations: %d" % grad_calls[0]396 else:397 if disp:398 print "Optimization terminated successfully."399 print " Current function value: %f" % fval400 print " Iterations: %d" % k401 print " Function evaluations: %d" % func_calls[0]402 print " Gradient evaluations: %d" % grad_calls[0]403 404 405 if full_output:406 retlist = xk, fval, func_calls[0], grad_calls[0], warnflag407 if retall:408 retlist += (allvecs,)409 else:410 retlist = xk411 if retall:412 retlist = (xk, allvecs)413 414 return retlist415 416 def fmin_bfgs(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf,417 epsilon=_epsilon, maxiter=None, full_output=0, disp=1,418 retall=0, callback=None):419 """Minimize a function using the BFGS algorithm.420 421 :Parameters:422 423 f : the Python function or method to be minimized.424 x0 : ndarray425 the initial guess for the minimizer.426 427 fprime : a function to compute the gradient of f.428 args : extra arguments to f and fprime.429 gtol : number430 gradient norm must be less than gtol before succesful termination431 norm : number432 order of norm (Inf is max, -Inf is min)433 epsilon : number434 if fprime is approximated use this value for435 the step size (can be scalar or vector)436 callback : an optional user-supplied function to call after each437 iteration. It is called as callback(xk), where xk is the438 current parameter vector.439 440 :Returns: (xopt, {fopt, gopt, Hopt, func_calls, grad_calls, warnflag}, <allvecs>)441 442 xopt : ndarray443 the minimizer of f.444 445 fopt : number446 the value of f(xopt).447 gopt : ndarray448 the value of f'(xopt). (Should be near 0)449 Bopt : ndarray450 the value of 1/f''(xopt). (inverse hessian matrix)451 func_calls : number452 the number of function_calls.453 grad_calls : number454 the number of gradient calls.455 warnflag : integer456 1 : 'Maximum number of iterations exceeded.'457 2 : 'Gradient and/or function calls not changing'458 allvecs : a list of all iterates (only returned if retall==1)459 460 :OtherParameters:461 462 maxiter : number463 the maximum number of iterations.464 full_output : number465 if non-zero then return fopt, func_calls, grad_calls,466 and warnflag in addition to xopt.467 disp : number468 print convergence message if non-zero.469 retall : number470 return a list of results at each iteration if non-zero471 472 :SeeAlso:473 474 fmin, fmin_powell, fmin_cg,475 fmin_bfgs, fmin_ncg -- multivariate local optimizers476 leastsq -- nonlinear least squares minimizer477 478 fmin_l_bfgs_b, fmin_tnc,479 fmin_cobyla -- constrained multivariate optimizers480 481 anneal, brute -- global optimizers482 483 fminbound, brent, golden, bracket -- local scalar minimizers484 485 fsolve -- n-dimenstional root-finding486 487 brentq, brenth, ridder, bisect, newton -- one-dimensional root-finding488 489 fixed_point -- scalar fixed-point finder490 491 Notes492 493 ----------------------------------494 495 Optimize the function, f, whose gradient is given by fprime using the496 quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)497 See Wright, and Nocedal 'Numerical Optimization', 1999, pg. 198.498 """499 x0 = asarray(x0).squeeze()500 if x0.ndim == 0:501 x0.shape = (1,)502 if maxiter is None:503 maxiter = len(x0)*200504 func_calls, f = wrap_function(f, args)505 if fprime is None:506 grad_calls, myfprime = wrap_function(approx_fprime, (f, epsilon))507 else:508 grad_calls, myfprime = wrap_function(fprime, args)509 gfk = myfprime(x0)510 k = 0511 N = len(x0)512 I = numpy.eye(N,dtype=int)513 Hk = I514 old_fval = f(x0)515 old_old_fval = old_fval + 5000516 xk = x0517 if retall:518 allvecs = [x0]519 sk = [2*gtol]520 warnflag = 0521 gnorm = vecnorm(gfk,ord=norm)522 while (gnorm > gtol) and (k < maxiter):523 pk = -numpy.dot(Hk,gfk)524 525 # NOTE: w/o scipy.optimize.linesearch, results may be noticeably worse.526 try: #XXX: break dependency on scipy.optimize.linesearch527 import scipy.optimize528 alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \529 scipy.optimize.linesearch.line_search(f,myfprime,xk,pk,gfk, \530 old_fval,old_old_fval)531 except ImportError:532 alpha_k = None533 534 if alpha_k is None: # line search failed try different one.535 alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \536 line_search(f,myfprime,xk,pk,gfk,537 old_fval,old_old_fval)538 if alpha_k is None:539 # This line search also failed to find a better solution.540 warnflag = 2541 break542 xkp1 = xk + alpha_k * pk543 if retall:544 allvecs.append(xkp1)545 sk = xkp1 - xk546 xk = xkp1547 if gfkp1 is None:548 gfkp1 = myfprime(xkp1)549 550 yk = gfkp1 - gfk551 gfk = gfkp1552 if callback is not None:553 callback(xk)554 k += 1555 gnorm = vecnorm(gfk,ord=norm)556 if (gnorm <= gtol):557 break558 559 try: # this was handled in numeric, let it remaines for more safety560 rhok = 1.0 / (numpy.dot(yk,sk))561 except ZeroDivisionError:562 rhok = 1000.0563 print "Divide-by-zero encountered: rhok assumed large"564 if isinf(rhok): # this is patch for numpy565 rhok = 1000.0566 print "Divide-by-zero encountered: rhok assumed large"567 A1 = I - sk[:,numpy.newaxis] * yk[numpy.newaxis,:] * rhok568 A2 = I - yk[:,numpy.newaxis] * sk[numpy.newaxis,:] * rhok569 Hk = numpy.dot(A1,numpy.dot(Hk,A2)) + rhok * sk[:,numpy.newaxis] \570 * sk[numpy.newaxis,:]571 572 if disp or full_output:573 fval = old_fval574 if warnflag == 2:575 if disp:576 print "Warning: Desired error not necessarily achieved due to precision loss"577 print " Current function value: %f" % fval578 print " Iterations: %d" % k579 print " Function evaluations: %d" % func_calls[0]580 print " Gradient evaluations: %d" % grad_calls[0]581 582 elif k >= maxiter:583 warnflag = 1584 if disp:585 print "Warning: Maximum number of iterations has been exceeded"586 print " Current function value: %f" % fval587 print " Iterations: %d" % k588 print " Function evaluations: %d" % func_calls[0]589 print " Gradient evaluations: %d" % grad_calls[0]590 else:591 if disp:592 print "Optimization terminated successfully."593 print " Current function value: %f" % fval594 print " Iterations: %d" % k595 print " Function evaluations: %d" % func_calls[0]596 print " Gradient evaluations: %d" % grad_calls[0]597 598 if full_output:599 retlist = xk, fval, gfk, Hk, func_calls[0], grad_calls[0], warnflag600 if retall:601 retlist += (allvecs,)602 else:603 retlist = xk604 if retall:605 retlist = (xk, allvecs)606 607 return retlist608 609 610 def main():611 import time612 613 times = []614 algor = []615 x0 = [0.8,1.2,0.7]616 print "Nelder-Mead Simplex"617 print "==================="618 start = time.time()619 x = fmin(rosen,x0)620 print x621 times.append(time.time() - start)622 algor.append('Nelder-Mead Simplex\t')623 624 print625 print "Powell Direction Set Method"626 print "==========================="627 start = time.time()628 x = fmin_powell(rosen,x0)629 print x630 times.append(time.time() - start)631 algor.append('Powell Direction Set Method.')632 633 print634 print "Nonlinear CG"635 print "============"636 start = time.time()637 x = fmin_cg(rosen, x0, fprime=rosen_der, maxiter=200)638 print x639 times.append(time.time() - start)640 algor.append('Nonlinear CG \t')641 642 print643 print "BFGS Quasi-Newton"644 print "================="645 start = time.time()646 x = fmin_bfgs(rosen, x0, fprime=rosen_der, maxiter=80)647 print x648 times.append(time.time() - start)649 algor.append('BFGS Quasi-Newton\t')650 651 print652 print "BFGS approximate gradient"653 print "========================="654 start = time.time()655 x = fmin_bfgs(rosen, x0, gtol=1e-4, maxiter=100)656 print x657 times.append(time.time() - start)658 algor.append('BFGS without gradient\t')659 660 print661 print "Newton-CG with Hessian product"662 print "=============================="663 start = time.time()664 x = fmin_ncg(rosen, x0, rosen_der, fhess_p=rosen_hess_prod, maxiter=80)665 print x666 times.append(time.time() - start)667 algor.append('Newton-CG with hessian product')668 669 print670 print "Newton-CG with full Hessian"671 print "==========================="672 start = time.time()673 x = fmin_ncg(rosen, x0, rosen_der, fhess=rosen_hess, maxiter=80)674 print x675 times.append(time.time() - start)676 algor.append('Newton-CG with full hessian')677 678 print679 print "\nMinimizing the Rosenbrock function of order 3\n"680 print " Algorithm \t\t\t Seconds"681 print "===========\t\t\t ========="682 for k in range(len(algor)):683 print algor[k], "\t -- ", times[k]684 234 685 235 if __name__ == "__main__": 686 main()236 pass 687 237 688 238 # End of file -
branches/alta/mystic-0.2a1/scipy_bfgs.py
r418 r419 60 60 61 61 from _scipyoptimize import zoom, _cubicmin, _quadmin, _epsilon 62 from _scipyoptimize import line _search, approx_fprime62 from _scipyoptimize import linesearch, line_search, approx_fprime 63 63 64 64 ########################################################################## … … 217 217 pk = -numpy.dot(Hk,gfk) 218 218 219 # If user has scipy installed, use that line search (since line search requires 220 # minpack.so). If scipy is not installed, use 221 # the function line_search (defined above, in helper functions) 222 try: 223 import scipy.optimize 224 alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \ 225 scipy.optimize.linesearch.line_search(func,myfprime,xk,pk,gfk, 226 old_fval,old_old_fval) 227 except ImportError: 228 alpha_k = None 229 #XXX If it gets stuck in the linesearch in scipy, 'exit' in the signal 230 # handler does not work. 219 alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \ 220 linesearch.line_search(func,myfprime,xk,pk,gfk, \ 221 old_fval,old_old_fval) 222 #XXX If it gets stuck in the linesearch in scipy, 223 # 'exit' in the signal handler does not work. 231 224 #alpha_k = None 232 225 -
branches/alta/mystic-0.2a1/scipy_cg.py
r418 r419 58 58 59 59 from _scipyoptimize import zoom, _cubicmin, _quadmin, _epsilon 60 from _scipyoptimize import line _search, approx_fprime60 from _scipyoptimize import linesearch, line_search, approx_fprime 61 61 62 62 ########################################################################## … … 225 225 old_old_fval_backup = old_old_fval 226 226 227 # NOTE: Without scipy.optimize's linesearch, results may be noticeably worse. 228 try: 229 import scipy.optimize 230 alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \ 231 scipy.optimize.linesearch.line_search(func,myfprime,xk,pk,gfk,\ 232 old_fval,old_old_fval,c2=0.4) 233 except ImportError: 234 alpha_k = None 235 #XXX Signal handler 'exit' won't work if it gets stuck inside scipy linesearch 227 alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \ 228 linesearch.line_search(func,myfprime,xk,pk,gfk,\ 229 old_fval,old_old_fval,c2=0.4) 230 #XXX Signal handler 'exit' won't work 231 # if it gets stuck inside scipy linesearch 236 232 #alpha_k = None 237 233
Note: See TracChangeset
for help on using the changeset viewer.