Changeset 296
- Timestamp:
- 07/07/10 12:22:35 (6 years ago)
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/UQ/math/examples2/log_reader.py
r255 r296 9 9 f.close() 10 10 11 contents = file.split("_ #_ __ChiSq__ __params__\n")11 contents = file.split("___#___ __ChiSq__ __params__\n") 12 12 lines = contents[-1].split('\n') 13 13 … … 16 16 for line in lines[:-1]: 17 17 values = line.split(" ") 18 iter.append(eval(values[0]) )18 iter.append(eval(values[0])[0]) #FIXME: we ignore 'id' here 19 19 cost.append(eval(values[1])) 20 20 param.append(eval(values[2])) … … 31 31 ax1 = fig.add_subplot(2,1,1) 32 32 for j in range(len(param[0])): 33 #ax1.plot(iter,conv[j],label=str(j))34 33 ax1.plot(iter,conv[j],label=str(j),marker='o') 35 34 plt.legend() 36 35 37 36 ax2 = fig.add_subplot(2,1,2) 38 #ax2.plot(iter,cost,label='cost')39 37 ax2.plot(iter,cost,label='cost',marker='o') 40 38 plt.legend() -
mystic/README
r143 r296 2 2 3 3 Current release: 4 mystic-0. 1a24 mystic-0.2a1 5 5 6 6 Installation: -
mystic/mystic/abstract_map_solver.py
r242 r296 8 8 a parallel "map" function to enable parallel computing. This module 9 9 describes the map solver interface. As with the AbstractSolver, the 10 "Solve" method must be overwritte with the derived solver's optimization10 "Solve" method must be overwritten with the derived solver's optimization 11 11 algorithm. Additionally, for the AbstractMapSolver, a call to self.map 12 12 is required. In many cases, a minimal function call interface for a -
mystic/mystic/abstract_nested_solver.py
r242 r296 8 8 instances -- utilizing a parallel "map" function to enable parallel 9 9 computing. This module describes the nested solver interface. As with 10 the AbstractSolver, the "Solve" method must be overwritte with the derived10 the AbstractSolver, the "Solve" method must be overwritten with the derived 11 11 solver's optimization algorithm. Similar to AbstractMapSolver, a call to 12 12 self.map is required. In many cases, a minimal function call interface for a -
mystic/mystic/abstract_solver.py
r242 r296 112 112 self.bestSolution = [0.0] * dim 113 113 self.trialSolution = [0.0] * dim 114 self.id = None # identifier (use like "rank" for MPI) 114 115 115 116 self._init_popEnergy = 1.0E20 #XXX: or numpy.inf? -
mystic/mystic/differential_evolution.py
r251 r296 255 255 if self._handle_sigint: signal.signal(signal.SIGINT, self.signal_handler) 256 256 257 id = self.id 257 258 self.probability = CrossProbability 258 259 self.scale = ScalingFactor … … 278 279 279 280 self.energy_history.append(self.bestEnergy) 280 StepMonitor(self.bestSolution[:], self.bestEnergy )281 StepMonitor(self.bestSolution[:], self.bestEnergy, id) 281 282 self.generations = 0 #XXX: above currently *not* counted as an iteration 282 283 if callback is not None: … … 308 309 309 310 self.energy_history.append(self.bestEnergy) 310 StepMonitor(self.bestSolution[:], self.bestEnergy )311 StepMonitor(self.bestSolution[:], self.bestEnergy, id) 311 312 self.generations += 1 312 313 if callback is not None: … … 452 453 if self._handle_sigint: signal.signal(signal.SIGINT, self.signal_handler) 453 454 455 id = self.id 454 456 self.probability = CrossProbability 455 457 self.scale = ScalingFactor … … 487 489 self.energy_history.append(self.bestEnergy) 488 490 #FIXME: StepMonitor works for 'pp'? 489 StepMonitor(self.bestSolution[:], self.bestEnergy )491 StepMonitor(self.bestSolution[:], self.bestEnergy, id) 490 492 self.generations = 0 #XXX: above currently *not* counted as an iteration 491 493 if callback is not None: … … 529 531 self.energy_history.append(self.bestEnergy) 530 532 #FIXME: StepMonitor works for 'pp'? 531 StepMonitor(self.bestSolution[:], self.bestEnergy )533 StepMonitor(self.bestSolution[:], self.bestEnergy, id) 532 534 self.generations += 1 533 535 if callback is not None: -
mystic/mystic/nested.py
r242 r296 128 128 cf = [cost for i in range(len(initial_values))] 129 129 tm = [termination for i in range(len(initial_values))] 130 id = range(len(initial_values)) 130 131 131 132 # generate the local_optimize function 132 133 local_opt = """\n 133 def local_optimize(cost, termination, x0 ):134 def local_optimize(cost, termination, x0, rank): 134 135 from %s import %s as LocalSolver 135 136 from mystic import Sow … … 141 142 142 143 solver = LocalSolver(ndim) 144 solver.id = rank 143 145 solver.SetInitialPoints(x0) 144 146 """ % (self._solver.__module__, self._solver.__name__) … … 156 158 exec local_opt 157 159 158 # map:: params, energy, smon, emon = local_optimize(cost,term,x0 )160 # map:: params, energy, smon, emon = local_optimize(cost,term,x0,id) 159 161 mapconfig = dict(nnodes=self._nnodes, launcher=self._launcher, \ 160 162 mapper=self._mapper, queue=self._queue, \ 161 163 timelimit=self._timelimit, \ 162 164 ncpus=self._ncpus, servers=self._servers) 163 results = self._map(local_optimize, cf, tm, initial_values, **mapconfig)165 results = self._map(local_optimize, cf, tm, initial_values, id, **mapconfig) 164 166 165 167 # get the results with the lowest energy … … 183 185 # write 'bests' to monitors #XXX: non-best monitors may be useful too 184 186 for i in range(len(bestpath.y)): 185 StepMonitor(bestpath.x[i], bestpath.y[i] )187 StepMonitor(bestpath.x[i], bestpath.y[i], self.id) 186 188 #XXX: could apply callback here, or in exec'd code 187 189 for i in range(len(besteval.y)): … … 300 302 cf = [cost for i in range(len(initial_values))] 301 303 tm = [termination for i in range(len(initial_values))] 304 id = range(len(initial_values)) 302 305 303 306 # generate the local_optimize function 304 307 local_opt = """\n 305 def local_optimize(cost, termination, x0 ):308 def local_optimize(cost, termination, x0, rank): 306 309 from %s import %s as LocalSolver 307 310 from mystic import Sow … … 313 316 314 317 solver = LocalSolver(ndim) 318 solver.id = rank 315 319 solver.SetInitialPoints(x0) 316 320 """ % (self._solver.__module__, self._solver.__name__) … … 333 337 timelimit=self._timelimit, \ 334 338 ncpus=self._ncpus, servers=self._servers) 335 results = self._map(local_optimize, cf, tm, initial_values, **mapconfig)339 results = self._map(local_optimize, cf, tm, initial_values, id, **mapconfig) 336 340 337 341 # get the results with the lowest energy … … 355 359 # write 'bests' to monitors #XXX: non-best monitors may be useful too 356 360 for i in range(len(bestpath.y)): 357 StepMonitor(bestpath.x[i], bestpath.y[i] )361 StepMonitor(bestpath.x[i], bestpath.y[i], self.id) 358 362 #XXX: could apply callback here, or in exec'd code 359 363 for i in range(len(besteval.y)): -
mystic/mystic/scipy_optimize.py
r242 r296 184 184 #------------------------------------------------------------- 185 185 186 id = self.id 186 187 x0 = asfarray(x0).flatten() 187 188 N = len(x0) #XXX: this should be equal to self.nDim … … 206 207 allvecs = [sim[0]] 207 208 fsim[0] = func(x0) 208 StepMonitor(sim[0], fsim[0] ) # sim = all values; "best" is sim[0]209 StepMonitor(sim[0], fsim[0], id) # sim = all values; "best" is sim[0] 209 210 210 211 #--- ensure initial simplex is within bounds --- … … 227 228 self.popEnergy = fsim 228 229 self.energy_history.append(self.bestEnergy) 229 StepMonitor(sim[0], fsim[0] ) # sim = all values; "best" is sim[0]230 StepMonitor(sim[0], fsim[0], id) # sim = all values; "best" is sim[0] 230 231 231 232 iterations = 1 … … 295 296 self.popEnergy = fsim 296 297 self.energy_history.append(self.bestEnergy) 297 StepMonitor(sim[0], fsim[0] ) # sim = all values; "best" is sim[0]298 StepMonitor(sim[0], fsim[0],id) # sim = all values; "best" is sim[0] 298 299 299 300 self.generations = iterations … … 511 512 #------------------------------------------------------------- 512 513 514 id = self.id 513 515 x = asfarray(x0).flatten() 514 516 if retall: … … 536 538 self.popEnergy[0] = fval #XXX: pointless? 537 539 self.energy_history.append(self.bestEnergy) 538 StepMonitor(x, fval) # get initial values540 StepMonitor(x, fval, id) # get initial values 539 541 540 542 iter = 0; … … 589 591 self.population[0] = x #XXX: pointless 590 592 self.popEnergy[0] = fval #XXX: pointless 591 StepMonitor(x, fval) # get ith values; #XXX: should be [x],[fval] ?593 StepMonitor(x, fval, id) # get ith values; #XXX: should be [x],[fval] ? 592 594 593 595 self.generations = iter -
mystic/mystic/tools.py
r247 r296 131 131 self._x = [] 132 132 self._y = [] 133 134 def __call__(self, x, y): 133 self._id = [] 134 135 def __call__(self, x, y, id=None): 135 136 from numpy import ndarray 136 137 if isinstance(x,ndarray): x = list(x) 137 138 self._x.append(x) 138 139 self._y.append(y) 140 self._id.append(id) 139 141 140 142 def get_x(self): … … 144 146 return self._y 145 147 148 def get_id(self): 149 return self._id 150 146 151 x = property(get_x, doc = "Params") 147 152 y = property(get_y, doc = "Costs") 153 id = property(get_id, doc = "Id") 148 154 pass 149 155 … … 162 168 self._xinterval = xinterval 163 169 return 164 def __call__(self, x, y ):170 def __call__(self, x, y, id=None): 165 171 from numpy import ndarray 166 172 #Sow.__call__(self, x, y) 167 super(VerboseSow,self).__call__(x, y )173 super(VerboseSow,self).__call__(x, y, id) 168 174 if isinstance(y,(list,ndarray)): 169 175 y = y[0] #XXX: get the "best" fit... which should be in y[0] … … 172 178 if int(self._step % self._yinterval) == 0: 173 179 #print "Generation %d has best Chi-Squared: %s" % (self._step, y) 174 print "Generation %d has best Chi-Squared: %f" % (self._step, y) 180 message = "Generation %d has best Chi-Squared: %f" % (self._step, y) 181 if id != None: message = "[id: %d] " % (id) + message 182 print message 175 183 if int(self._step % self._xinterval) == 0: 176 184 if isinstance(x,ndarray): x = list(x) 177 print "Generation %d has best fit parameters:\n %s" % (self._step, x) 185 message = "Generation %d has best fit parameters:\n %s" % (self._step, x) 186 if id != None: message = "[id: %d] " % (id) + message 187 print message 178 188 self._step += 1 179 189 return … … 198 208 self._file = open(self._filename,ind) 199 209 self._file.write("%s\n" % datetime.datetime.now().ctime() ) 200 self._file.write("_ #_ __ChiSq__ __params__\n")210 self._file.write("___#___ __ChiSq__ __params__\n") 201 211 self._file.close() 202 212 return 203 def __call__(self, x, y ):213 def __call__(self, x, y, id=None): 204 214 self._file = open(self._filename,'a') 205 215 from numpy import ndarray 206 216 #Sow.__call__(self, x, y) 207 super(LoggingSow,self).__call__(x, y )217 super(LoggingSow,self).__call__(x, y, id) 208 218 if isinstance(y,(list,ndarray)): 209 219 y = y[0] #XXX: get the "best" fit... which should be in y[0] … … 212 222 if int(self._step % self._yinterval) == 0: 213 223 if isinstance(x,ndarray): x = list(x) 214 self._file.write(" %d %f %s\n" % (self._step, y, x)) 224 step = [self._step] 225 if id != None: step.append(id) 226 self._file.write("%s %f %s\n" % (tuple(step), y, x)) 215 227 self._step += 1 216 228 self._file.close()
Note: See TracChangeset
for help on using the changeset viewer.