Changeset 423
- Timestamp:
- 09/10/10 09:49:46 (6 years ago)
- Location:
- mystic
- Files:
-
- 1 added
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
mystic/_math/approx.py
r259 r423 1 1 #!/usr/bin/env python 2 3 2 """ 4 3 tools for measuring equality 5 4 """ 5 #NOTE: need to comapre to numpy.allclose... 6 6 7 def _float_approx_equal(x, y, tol=1e-18, rel=1e-7): 7 8 if tol is rel is None: -
mystic/mystic/termination.py
r422 r423 17 17 """cost of last iteration is < tolerance: 18 18 19 cost[-1] < tolerance"""19 cost[-1] <= tolerance""" 20 20 def _VTR(inst): 21 21 hist = inst.energy_history 22 return hist[-1] < tolerance22 return hist[-1] <= tolerance 23 23 return _VTR 24 24 … … 26 26 """change in cost is < tolerance over a number of generations: 27 27 28 cost[-g] - cost[-1] < tolerance, where g=generations"""28 cost[-g] - cost[-1] <= tolerance, where g=generations""" 29 29 def _ChangeOverGeneration(inst): 30 30 hist = inst.energy_history 31 31 lg = len(hist) 32 32 if lg <= generations: return False 33 return (hist[-generations]-hist[-1]) < tolerance33 return (hist[-generations]-hist[-1]) <= tolerance 34 34 return _ChangeOverGeneration 35 35 … … 47 47 return _NormalizedChangeOverGeneration 48 48 49 def CandidateRelativeTolerance(xtol =1e-4, ftol=1e-4):49 def CandidateRelativeTolerance(xtol = 1e-4, ftol = 1e-4): 50 50 """absolute difference in candidates is < tolerance: 51 51 … … 72 72 sum(abs(last_params - current_params)) <= tolerance""" 73 73 def _SolutionImprovement(inst): 74 update = inst.bestSolution - inst.trialSolution #XXX: if inf - inf ? 74 best = numpy.array(inst.bestSolution) 75 trial = numpy.array(inst.trialSolution) 76 update = best - trial #XXX: if inf - inf ? 75 77 answer = numpy.add.reduce(abs(update)) <= tolerance 76 78 return answer … … 90 92 lg = len(hist) 91 93 #XXX: throws error when hist is shorter than generations ? 92 return lg > generations and (hist[-generations]-hist[-1]) < 094 return lg > generations and (hist[-generations]-hist[-1]) <= 0 93 95 if not generations and fval == None: return True 94 96 return abs(inst.bestEnergy-fval) <= abs(tolerance * fval) 95 97 return _NormalizedCostTarget 96 98 97 def VTRChangeOverGeneration s(ftol = 0.005, gtol = 1e-6, generations = 30):99 def VTRChangeOverGeneration(ftol = 0.005, gtol = 1e-6, generations = 30): 98 100 """change in cost is < gtol over a number of generations, 99 101 or cost of last iteration is < ftol: 100 102 101 cost[-g] - cost[-1] < gtol, where g=generations *or* cost[-1] <ftol."""102 def _VTRChangeOverGeneration s(inst):103 cost[-g] - cost[-1] <= gtol, where g=generations *or* cost[-1] <= ftol.""" 104 def _VTRChangeOverGeneration(inst): 103 105 hist = inst.energy_history 104 106 lg = len(hist) 105 107 #XXX: throws error when hist is shorter than generations ? 106 return (lg > generations and (hist[-generations]-hist[-1]) < gtol)\107 or ( hist[-1] < ftol )108 return _VTRChangeOverGeneration s108 return (lg > generations and (hist[-generations]-hist[-1]) <= gtol)\ 109 or ( hist[-1] <= ftol ) 110 return _VTRChangeOverGeneration 109 111 110 def PopulationSpread(tolerance =1e-6):112 def PopulationSpread(tolerance = 1e-6): 111 113 """normalized absolute deviation from best candidate is < tolerance: 112 114 113 abs(params - params[0]) < tolerance"""115 abs(params - params[0]) <= tolerance""" 114 116 def _PopulationSpread(inst): 115 117 sim = numpy.array(inst.population) … … 117 119 # print "Warning: Invalid termination condition (nPop < 2)" 118 120 # return True 119 return all(abs(sim - sim[0]) <= abs(tolerance * sim[0]))121 return numpy.all(abs(sim - sim[0]) <= abs(tolerance * sim[0])) 120 122 return _PopulationSpread 121 123 122 def GradientNormTolerance(tolerance =1e-5, norm=Inf):124 def GradientNormTolerance(tolerance = 1e-5, norm = Inf): 123 125 """gradient norm is < tolerance, given user-supplied norm: 124 126 125 sum( abs(gradient)**norm )**(1.0/norm) < tolerance"""127 sum( abs(gradient)**norm )**(1.0/norm) <= tolerance""" 126 128 def _GradientNormTolerance(inst): 127 129 try:
Note: See TracChangeset
for help on using the changeset viewer.