Changeset 176
- Timestamp:
- 08/09/09 13:45:49 (7 years ago)
- Location:
- branches/alta/mystic-0.1a2/mystic
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/alta/mystic-0.1a2/mystic/scipy_bfgs.py
r169 r176 367 367 epsilon : int or ndarray 368 368 If fprime is approximated, use this value for the step size. 369 callback : callable370 An optional user-supplied function to call after each371 iteration. Called as callback(xk), where xk is the372 current parameter vector.373 369 374 370 """ … … 376 372 args = ExtraArgs 377 373 x0 = self.population[0] 374 378 375 fprime=None 379 376 epsilon=_epsilon 380 disp=1381 callback=None377 self.disp = 0 378 self.callback = None 382 379 383 380 if kwds.has_key('fprime'): fprime = kwds['fprime'] 384 381 if kwds.has_key('epsilon'): epsilon = kwds['epsilon'] 385 if kwds.has_key('callback'): callback = kwds['callback'] 386 if kwds.has_key('disp'): disp = kwds['disp'] 382 if kwds.has_key('disp'): self.disp = kwds['disp'] 383 if kwds.has_key('callback'): self.callback = kwds['callback'] 384 387 385 #------------------------------------------------------------- 388 386 … … 407 405 self._maxiter = len(x0)*200 408 406 409 # Wrap gradient monitor 410 # ok to set monitor to Null here, or is there another way to do this? 407 # Wrap gradient monitor 411 408 if fprime is None: 412 409 gcalls, myfprime = wrap_function(approx_fprime, (func, epsilon), Null) … … 424 421 xk = x0 425 422 #sk = [2*gtol] 426 sk = [2*1e-5]427 423 warnflag = 0 428 424 while k < self._maxiter: … … 455 451 yk = gfkp1 - gfk 456 452 gfk = gfkp1 457 if callback is not None:458 callback(xk)453 if self.callback is not None: 454 self.callback(xk) 459 455 k += 1 460 456 … … 465 461 # Pass the gradient value to self.gfk for termination 466 462 self.gfk = gfk 467 463 464 # Calculate the inverse Hessian 468 465 try: # this was handled in numeric, let it remaines for more safety 469 466 rhok = 1.0 / (numpy.dot(yk,sk)) … … 485 482 486 483 self.generations = k 487 signal.signal(signal.SIGINT,signal.default_int_handler) 488 489 if disp: 484 signal.signal(signal.SIGINT,signal.default_int_handler) 485 486 # Fix me? 487 self.gcalls = gcalls[0] 488 489 # Print messages if self.disp is non-zero 490 if self.disp: 490 491 fval = old_fval 491 492 if warnflag == 2: 492 if disp:493 if self.disp: 493 494 print "Warning: Desired error not necessarily achieved" \ 494 495 "due to precision loss" … … 499 500 500 501 elif k >= self._maxiter: 501 if disp:502 if self.disp: 502 503 print "Warning: Maximum number of iterations has been exceeded" 503 504 print " Current function value: %f" % fval … … 506 507 print " Gradient evaluations: %d" % gcalls[0] 507 508 else: 508 if disp:509 if self.disp: 509 510 print "Optimization terminated successfully." 510 511 print " Current function value: %f" % fval … … 583 584 584 585 solver = BFGSSolver(len(x0)) 586 587 solver.disp = disp 588 solver.callback = callback 589 585 590 solver.SetInitialPoints(x0) 586 591 solver.enable_signal_handler() … … 588 593 solver.Solve(func,termination=GradientTermination(gtol, norm),\ 589 594 EvaluationMonitor=evalmon,StepMonitor=stepmon,\ 590 disp=disp, ExtraArgs=args, callback=callback,\ 591 epsilon=epsilon, fprime=fprime) 595 ExtraArgs=args, epsilon=epsilon, fprime=fprime) 592 596 solution = solver.Solution() 593 597 … … 598 602 warnflag = 0 599 603 fcalls = len(evalmon.x) 604 iterations = len(stepmon.x) 605 606 # fix me? 600 607 #gcalls = len(gradmon.x) 601 iterations = len(stepmon.x)602 gcalls = iterations + 1 # fix me? 608 gcalls = solver.gcalls 609 603 610 allvecs = [] 604 611 for i in range(iterations): -
branches/alta/mystic-0.1a2/mystic/scipy_ncg.py
r169 r176 18 18 - StepMonitor = CustomSow() with 4 columns 19 19 - enable_signal_handler() 20 - termination = SolutionImprovement(tolerance) 20 21 21 22 Usage … … 130 131 131 132 132 def Solve(self, func, fprime,termination, sigint_callback=None,133 EvaluationMonitor=Null, StepMonitor=Null, GradMonitor=Null,134 HessianMonitor=Null,ExtraArgs=(), **kwds):133 def Solve(self, func, termination, sigint_callback=None, 134 EvaluationMonitor=Null, StepMonitor=Null, #GradientMonitor=Null, 135 ExtraArgs=(), **kwds): 135 136 """Minimize a function using NCG. 136 137 … … 143 144 144 145 func -- the Python function or method to be minimized. 145 fprime -- callable f'(x,*args)146 Gradient of f.147 146 termination -- callable object providing termination conditions. 148 147 … … 158 157 Further Inputs: 159 158 159 fprime -- callable f'(x,*args) 160 Gradient of f. 160 161 fhess_p : callable fhess_p(x,p,*args) 161 162 Function which computes the Hessian of f times an … … 186 187 x0 = self.population[0] 187 188 x0 = asarray(x0).flatten() 189 188 190 epsilon = _epsilon 189 disp = 1190 callback = None191 self.disp = 1 192 self.callback = None 191 193 fhess_p = None 192 194 fhess = None 193 195 194 196 if kwds.has_key('epsilon'): epsilon = kwds['epsilon'] 195 if kwds.has_key('callback'): callback = kwds['callback']196 if kwds.has_key('disp'): disp = kwds['disp']197 if kwds.has_key('callback'): self.callback = kwds['callback'] 198 if kwds.has_key('disp'): self.disp = kwds['disp'] 197 199 if kwds.has_key('fhess'): fhess = kwds['fhess'] 198 200 if kwds.has_key('fhess_p'): fhess_p = kwds['fhess_p'] 201 202 # fprime is actually required. Temporary fix?: 203 if kwds.has_key('fprime'): fprime = kwds['fprime'] 199 204 #------------------------------------------------------------- 200 205 … … 216 221 self._maxiter = len(x0)*200 217 222 218 # Wrap gradient function? 223 # Wrap gradient function? 224 # gcalls, fprime = wrap_function(fprime, args, GradientMonitor) 219 225 gcalls, fprime = wrap_function(fprime, args, Null) 220 226 … … 286 292 update = alphak * pk 287 293 294 # Put last solution in trialSolution for termination() 288 295 self.trialSolution = xk 289 296 290 297 xk = xk + update # upcast if necessary 291 if callback is not None:292 callback(xk)298 if self.callback is not None: 299 self.callback(xk) 293 300 k += 1 294 301 … … 304 311 self.generations = k 305 312 313 # Fix me? 306 314 self.hcalls = hcalls 315 self.gcalls = gcalls[0] 307 316 308 317 signal.signal(signal.SIGINT,signal.default_int_handler) 309 318 310 if disp:319 if self.disp: 311 320 fval = old_fval 312 321 if k >= self._maxiter: 313 if disp:322 if self.disp: 314 323 print "Warning: Maximum number of iterations has been exceeded" 315 324 print " Current function value: %f" % fval … … 319 328 print " Hessian evaluations: %d" % hcalls 320 329 else: 321 if disp:330 if self.disp: 322 331 print "Optimization terminated successfully." 323 332 print " Current function value: %f" % fval … … 402 411 403 412 from mystic.tools import Sow, CustomSow 404 from mystic.termination import XTermination413 from mystic.termination import SolutionImprovement 405 414 #stepmon = Sow() 406 415 stepmon = CustomSow('x','y','g','h', x='x', y='fval', \ … … 413 422 solver.SetEvaluationLimits(maxiter,None) 414 423 # Does requiring fprime break abstract_solver interface? 415 solver.Solve(func, fprime, termination=XTermination(xtol),\424 solver.Solve(func, SolutionImprovement(tolerance=xtol),\ 416 425 EvaluationMonitor=evalmon,StepMonitor=stepmon,\ 417 426 disp=disp, ExtraArgs=args, callback=callback,\ 418 427 epsilon=epsilon, fhess_p=fhess_p,\ 419 fhess=fhess )428 fhess=fhess, fprime=fprime) 420 429 solution = solver.Solution() 421 430 … … 427 436 fcalls = len(evalmon.x) 428 437 iterations = len(stepmon.x) 429 gcalls = iterations # Fix me? 438 439 # Fix me? 440 gcalls = solver.gcalls 430 441 hcalls = solver.hcalls 442 431 443 allvecs = [] 432 444 for i in range(iterations):
Note: See TracChangeset
for help on using the changeset viewer.