- Timestamp:
- 05/27/13 17:41:37 (3 years ago)
- Location:
- branches/decorate
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
branches/decorate/memoize.py
r663 r664 4 4 from keymaps import stringmap 5 5 6 __all__ = ['memoize','memoized','archive_dict','db_dict'] 6 __all__ = ['memoize','memoized','archive_dict',\ 7 'null_archive','file_archive','db_archive'] 7 8 8 9 def isiterable(x): … … 118 119 #FIXME: the below need expiration of cache due to time, calls, etc... 119 120 #FIXME: memoize*_round fails when decorating a class method 120 #FIXME: need to ensure that all open's are closed appropriately121 122 # Want:123 # - if has archive, then can try reading archive if not found in memory124 # - if has archive, then can write to archive upon evaluation125 # - load an archive / update an existing archive126 # - save some or all of memo to archive127 121 128 122 class archive_dict(dict): 129 """dictionary augmented with load and dumps""" 130 def __get_archive(self, archive): 131 try: 132 f = open(archive, 'rb') 133 import dill as pickle 134 cache = pickle.load(f) 135 f.close() 136 except: 137 cache = {} 138 return cache 139 def load(self, archive=None, *args): 140 cache = self.__get_archive(archive) 123 """dictionary augmented with an archive backend""" 124 def __init__(self, *args, **kwds): 125 """initialize a dictionary with an archive backend 126 127 Additional Inputs: 128 archive: instance of archive object 129 """ 130 self.__swap__ = null_archive() 131 self.__archive__ = kwds.pop('archive', null_archive()) 132 dict.__init__(self, *args, **kwds) 133 return 134 def load(self, *args): 135 """load archive contents 136 137 If arguments are given, only load the specified keys 138 """ 141 139 if not args: 142 self.update( cache)140 self.update(self.archive.__asdict__()) 143 141 for arg in args: 144 if cache.has_key(arg): 145 self.update({arg:cache[arg]}) 146 cache.clear() 147 return 148 def dump(self, archive=None, *args): 149 cache = self.__get_archive(archive) 142 try: 143 self.update({arg:self.archive[arg]}) 144 except KeyError: 145 pass 146 return 147 def dump(self, *args): 148 """dump contents to archive 149 150 If arguments are given, only dump the specified keys 151 """ 152 if not args: 153 self.archive.update(self) 150 154 for arg in args: 151 155 if self.has_key(arg): 152 cache.update({arg:self.__getitem__(arg)}) 153 if not args: 154 cache.update(self) 155 try: 156 f = open(archive, 'wb') 157 import dill as pickle 158 pickle.dump(cache, f) 159 except: 160 pass #XXX: warning? fail? 161 cache.clear() 162 return 156 self.archive.update({arg:self.__getitem__(arg)}) 157 return 158 def archived(self, *on): 159 """check if the dict is archived, or toggle archiving 160 161 If on is True, turn on the archive; if on is False, turn off the archive 162 """ 163 L = len(on) 164 if not L: 165 return not isinstance(self.archive, null_archive) 166 if L > 1: 167 raise TypeError, "archived expected at most 1 argument, got %s" % str(L+1) 168 if bool(on[0]): 169 if not isinstance(self.__swap__, null_archive): 170 self.__swap__, self.__archive__ = self.__archive__, self.__swap__ 171 elif isinstance(self.__archive__, null_archive): 172 raise ValueError, "no valid archive has been set" 173 else: 174 if not isinstance(self.__archive__, null_archive): 175 self.__swap__, self.__archive__ = self.__archive__, self.__swap__ 176 def __get_archive(self): 177 #if not isinstance(self.__archive__, null_archive): 178 # return 179 return self.__archive__ 180 def __archive(self, archive): 181 if not isinstance(self.__swap__, null_archive): 182 self.__swap__, self.__archive__ = self.__archive__, self.__swap__ 183 self.__archive__ = archive 184 # interface 185 archive = property(__get_archive, __archive) 163 186 pass 164 187 165 188 166 class file_dict(archive_dict): 189 class null_archive(dict): 190 """dictionary interface to nothing -- it's always empty""" 191 def __init__(self): 192 """initialize a permanently-empty dictionary""" 193 dict.__init__(self) 194 return 195 def __asdict__(self): 196 return self 197 def __setitem__(self, key, value): 198 pass 199 def update(self, adict, **kwds): 200 pass 201 def setdefault(self, key, *value): 202 return self.get(key, *value) 203 def __repr__(self): 204 return "archive(NULL)" 205 pass 206 207 208 class file_archive(dict): 167 209 """dictionary-style interface to a file""" 168 def __get_archive(self, archive):169 try:170 f = open(archive, 'rb')171 import dill as pickle172 cache = pickle.load(f)173 f.close()174 except:175 cache = {}176 return cache177 210 def __init__(self, filename=None, serialized=True): # False 211 """initialize a file with a synchronized dictionary interface 212 213 Inputs: 214 serialized: if True, pickle file contents; otherwise save python objects 215 filename: name of the file backend [default: memo.pkl or memo.py] 216 """ 178 217 import os 179 218 """filename = full filepath""" … … 184 223 self._serialized = serialized 185 224 if not os.path.exists(filename): 186 if serialized: self.dump(filename) 187 else: open(filename, 'wb').write('memo = {}') 188 return 189 def __setitem__(self, key, value): 190 memo = self.__asdict__() 191 memo[key] = value 192 self.__save__(memo) 193 return 194 def __getitem__(self, key): 195 memo = self.__asdict__() 196 return memo[key] 225 self.__save__({}) 226 return 197 227 def __asdict__(self): 198 228 if self._serialized: 199 memo = self.__get_archive(self._filename) 229 try: 230 f = open(self._filename, 'rb') 231 import dill as pickle 232 memo = pickle.load(f) 233 f.close() 234 except: 235 memo = {} 200 236 else: 201 237 import os … … 209 245 os.chdir(curdir) 210 246 return memo 211 def has_key(self, key): 212 return key in self.__asdict__() 247 def __save__(self, memo=None): 248 if memo == None: return 249 if self._serialized: 250 try: 251 f = open(self._filename, 'wb') 252 import dill as pickle 253 pickle.dump(memo, f) 254 f.close() 255 except: 256 pass #XXX: warning? fail? 257 else: 258 open(self._filename, 'wb').write('memo = %s' % memo) 259 return 260 #FIXME: missing a bunch of __...__ 261 def __getitem__(self, key): 262 memo = self.__asdict__() 263 return memo[key] 264 def __iter__(self): 265 return self.__asdict__().iterkeys() 266 def __repr__(self): 267 return "archive(%s: %s)" % (self._filename, self.__asdict__()) 268 def __setitem__(self, key, value): 269 memo = self.__asdict__() 270 memo[key] = value 271 self.__save__(memo) 272 return 273 def clear(self): 274 self.__save__({}) 275 return 276 #FIXME: copy, fromkeys 213 277 def get(self, key, value=None): 214 278 memo = self.__asdict__() 215 279 return memo.get(key, value) 216 def __save__(self, memo=None): 217 if memo == None: return 218 if self._serialized: 219 f = open(self._filename, 'wb') 220 import dill as pickle 221 pickle.dump(memo, f) 222 else: 223 open(self._filename, 'wb').write('memo = %s' % memo) 224 return 280 def has_key(self, key): 281 return key in self.__asdict__() 282 def items(self): 283 return list(self.iteritems()) 284 def iteritems(self): 285 return self.__asdict__().iteritems() 286 iterkeys = __iter__ 287 def itervalues(self): 288 return self.__asdict__().itervalues() 289 def keys(self): 290 return list(self.__iter__()) 225 291 def pop(self, key, *value): 226 292 memo = self.__asdict__() … … 228 294 self.__save__(memo) 229 295 return res 230 def clear(self): 231 self.__save__({}) 232 return 296 #FIXME: popitem 297 def setdefault(self, key, *value): 298 res = self.__asdict__().get(key, *value) 299 self.__setitem__(key, res) 300 return res 233 301 def update(self, adict, **kwds): 234 302 memo = self.__asdict__() … … 236 304 self.__save__(memo) 237 305 return 238 def itervalues(self):239 return self.__asdict__().itervalues()240 306 def values(self): 241 307 return list(self.itervalues()) 242 def iteritems(self):243 return self.__asdict__().iteritems()244 def items(self):245 return list(self.iteritems())246 def keys(self):247 return list(self.__iter__())248 def __repr__(self):249 return "memo(%s)" % self.__asdict__()250 def __iter__(self):251 return self.__asdict__().iterkeys()252 iterkeys = __iter__253 308 pass 254 309 255 310 256 311 #XXX: should inherit from object not dict ? 257 class db_ dict(archive_dict): #XXX: requires UTF-8 key312 class db_archive(dict): #XXX: requires UTF-8 key 258 313 """dictionary-style interface to a database""" 259 314 def __init__(self, database=None, table=None): 260 """database = database url; table = table name""" 315 """initialize a database with a synchronized dictionary interface 316 317 Inputs: 318 database: url of the database backend [default: :memory:] 319 table: name of the associated database table 320 """ 261 321 if database is None: database = ':memory:' 262 322 self._database = database … … 269 329 self._curs.execute(sql) 270 330 return 271 def __setitem__(self, key, value): #XXX: maintains 'history' of values272 sql = "insert into %s values(?,?)" % self._table273 self._curs.execute(sql, (key,value))274 self._conn.commit()275 return276 def __getitem__(self, key):277 res = self._select_key_items(key)278 if res: return res[-1][-1] # always get the last one279 raise KeyError, key280 331 def __asdict__(self): 281 332 sql = "select * from %s" % self._table … … 284 335 [d.update({k:v}) for (k,v) in res] # always get the last one 285 336 return d 286 def has_key(self, key): 287 return bool(self._select_key_items(key)) 337 #FIXME: missing a bunch of __...__ 338 def __getitem__(self, key): 339 res = self._select_key_items(key) 340 if res: return res[-1][-1] # always get the last one 341 raise KeyError, key 342 def __iter__(self): 343 sql = "select argstr from %s" % self._table 344 return (k[-1] for k in set(self._curs.execute(sql))) 345 def __repr__(self): 346 return "archive(%s: %s)" % (self._table, self.__asdict__()) 347 def __setitem__(self, key, value): #XXX: maintains 'history' of values 348 sql = "insert into %s values(?,?)" % self._table 349 self._curs.execute(sql, (key,value)) 350 self._conn.commit() 351 return 352 def clear(self): 353 [self.pop(k) for k in self.keys()] # better delete table, add empty ? 354 return 355 #FIXME: copy, fromkeys 288 356 def get(self, key, value=None): 289 357 res = self._select_key_items(key) 290 358 if res: value = res[-1][-1] 291 359 return value 360 def has_key(self, key): 361 return bool(self._select_key_items(key)) 362 def items(self): 363 #return self.__asdict__().items() 364 return list(self.iteritems()) 365 def iteritems(self): 366 return ((k,self.__getitem__(k)) for k in self.__iter__()) 367 iterkeys = __iter__ 368 def itervalues(self): 369 return (self.__getitem__(k) for k in self.__iter__()) 370 def keys(self): 371 #return self.__asdict__().keys() 372 return list(self.__iter__()) 292 373 def pop(self, key, *value): 293 374 L = len(value) 294 375 if L > 1: 295 raise TypeError, "pop expected at most 2 arguments, got %s" % L+1376 raise TypeError, "pop expected at most 2 arguments, got %s" % str(L+1) 296 377 res = self._select_key_items(key) 297 378 if res: … … 304 385 self._conn.commit() 305 386 return _value 306 def clear(self): 307 [self.pop(k) for k in self.keys()] # better delete table, add empty ? 308 return 387 #FIXME: popitem 388 def setdefault(self, key, *value): 389 L = len(value) 390 if L > 1: 391 raise TypeError, "setvalue expected at most 2 arguments, got %s" % str(L+1) 392 res = self._select_key_items(key) 393 if res: 394 _value = res[-1][-1] 395 else: 396 if not L: _value = None 397 else: _value = value[0] 398 self.__setitem__(key, _value) 399 return _value 309 400 def update(self, adict, **kwds): 310 401 _dict = adict.copy() … … 312 403 [self.__setitem__(k,v) for (k,v) in _dict.items()] 313 404 return 314 def iteritems(self):315 return ((k,self.__getitem__(k)) for k in self.__iter__())316 def items(self):317 #return self.__asdict__().items()318 return list(self.iteritems())319 def keys(self):320 #return self.__asdict__().keys()321 return list(self.__iter__())322 def itervalues(self):323 return (self.__getitem__(k) for k in self.__iter__())324 405 def values(self): 325 406 #return self.__asdict__().values() 326 407 return list(self.itervalues()) 327 def __repr__(self):328 return "%s(%s)" % (self._table, self.__asdict__())329 def __iter__(self):330 sql = "select argstr from %s" % self._table331 return (k[-1] for k in set(self._curs.execute(sql)))332 iterkeys = __iter__333 408 def _select_key_items(self, key): 334 409 '''Return a tuple of (key, value) pairs that match the specified key. … … 339 414 340 415 341 def memoized( memo=None, keymap=None, tol=None, deep=False, archived=False):416 def memoized(cache=None, keymap=None, tol=None, deep=False): 342 417 """Decorator that memoizes a function's return value each time it is called. 343 418 If called later with the same arguments, the memoized value is returned, and 344 not re-evaluated. This may lead to memory issues, as memo is nevercleared.419 not re-evaluated. This may lead to memory issues, as cache is not cleared. 345 420 This decorator takes an integer tolerance 'tol', equal to the number of 346 421 decimal places to which it will round off floats. 347 422 348 memo= storage hashmap (default is {})423 cache = storage hashmap (default is {}) 349 424 keymap = cache key encoder (default is keymaps.stringmap(flat=False)) 350 425 tol = integer tolerance for rounding (default is None) 351 426 deep = boolean for rounding depth (default is False, i.e. 'shallow') 352 archived = boolean for archiving (default is False, i.e. "don't archive")353 427 """ 354 428 if keymap is None: keymap = stringmap(flat=False) 355 if memo is None: memo= archive_dict()356 elif type( memo) is dict: memo = archive_dict(memo)429 if cache is None: cache = archive_dict() 430 elif type(cache) is dict: cache = archive_dict(cache) 357 431 # does archive make sense with database, file, ?... (requires more thought) 358 432 … … 370 444 _args, _kwds = rounded_args(*args, **kwds) 371 445 argstr = keymap(*_args, **_kwds) 372 if memo.has_key(argstr):373 return memo[argstr]374 if archived:375 memo.load(argstr)376 if memo.has_key(argstr):377 return memo[argstr]446 if cache.has_key(argstr): 447 return cache[argstr] 448 if cache.archived(): 449 cache.load(argstr) 450 if cache.has_key(argstr): 451 return cache[argstr] 378 452 res = f(*args, **kwds) 379 memo[argstr] = res #XXX: any automated dump to archive?453 cache[argstr] = res #XXX: any automated dump to archive? 380 454 return res 381 455 except: #TypeError 382 456 return f(*args, **kwds) 383 func.memo = memo 384 if archived: #XXX: archiving should be toggleable? 385 func.load = memo.load #XXX: comment out? 386 func.dump = memo.dump #XXX: comment out? 457 func.cache = cache 387 458 return func 388 459 return dec 389 460 390 #FIXME: use cache maxsize algorithms... where dump if maxsize 391 #FIXME: can make trash_archive where archives to del 461 #FIXME: use cache maxsize algorithms... where dump to archive if maxsize 392 462 393 463 class memoize(object): 394 464 """Decorator that memoizes a function's return value each time it is called. 395 465 If called later with the same arguments, the memoized value is returned, and 396 not re-evaluated. This may lead to memory issues, as memo is nevercleared.466 not re-evaluated. This may lead to memory issues, as cache is not cleared. 397 467 Can memoize a *method* on an object. 398 468 """ 399 def __init__(self, memo=None, keymap=None, tol=None, deep=False):400 # self.func = func401 if keymap is None: keymap = stringmap(flat=False)402 if memo is None: memo= archive_dict()403 elif type(memo) is dict: memo = archive_dict(memo)404 self.memo = memo405 self.__keymap = keymap406 407 if deep: rounded = deep_round408 else: rounded = simple_round409 410 @rounded(tol)411 def rounded_args(*args, **kwds):412 return (args, kwds)413 self.__rounded_args = rounded_args414 return469 def __init__(self, cache=None, keymap=None, tol=None, deep=False): 470 # self.func = func 471 if keymap is None: keymap = stringmap(flat=False) 472 if cache is None: cache = archive_dict() 473 elif type(cache) is dict: cache = archive_dict(cache) 474 self.cache = cache 475 self.__keymap = keymap 476 477 if deep: rounded = deep_round 478 else: rounded = simple_round 479 480 @rounded(tol) 481 def rounded_args(*args, **kwds): 482 return (args, kwds) 483 self.__rounded_args = rounded_args 484 return 415 485 416 486 def __call__(self, func): 417 self.func = func 418 import functools 419 @functools.wraps(func) 420 def dec(*args, **kwds): 421 try: 422 _args, _kwds = self.__rounded_args(*args, **kwds) 423 argstr = self.__keymap(*_args, **_kwds) 424 if self.memo.has_key(argstr): 425 return self.memo[argstr] 426 res = self.func(*args, **kwds) 427 self.memo[argstr] = res 428 return res 429 except: #TypeError 430 return self.func(*args, **kwds) 431 return dec 487 self.func = func 488 import functools 489 @functools.wraps(func) 490 def dec(*args, **kwds): 491 try: 492 _args, _kwds = self.__rounded_args(*args, **kwds) 493 argstr = self.__keymap(*_args, **_kwds) 494 if self.cache.has_key(argstr): 495 return self.cache[argstr] 496 if self.cache.archived(): 497 self.cache.load(argstr) 498 if self.cache.has_key(argstr): 499 return self.cache[argstr] 500 res = self.func(*args, **kwds) 501 self.cache[argstr] = res 502 return res 503 except: #TypeError 504 return self.func(*args, **kwds) 505 return dec 432 506 433 507 def __repr__(self): 434 """Return the function's docstring."""435 return self.func.__doc__508 """Return the function's docstring.""" 509 return self.func.__doc__ 436 510 437 511 def __get__(self, obj, objtype): 438 """Support instance methods."""439 import functools440 return functools.partial(self.__call__, obj)512 """Support instance methods.""" 513 import functools 514 return functools.partial(self.__call__, obj) 441 515 442 516 # EOF -
branches/decorate/surrogate.py
r663 r664 20 20 21 21 22 from memoize import memoized 22 from memoize import memoized, archive_dict, file_archive 23 23 from keymaps import picklemap 24 24 dumps = picklemap(flat=False) 25 cache = archive_dict(archive=file_archive('surrogate.pkl')) 25 26 #@memoized(keymap=dumps, tol=0, deep=True) # slower, but more robust 26 27 #@memoized(tol=0, deep=True) 27 #@memoized( keymap=dumps, archived=True) # slower, but more robust28 @memoized( archived=True)28 #@memoized(cache=cache, keymap=dumps) # slower, but more robust 29 @memoized(cache=cache) 29 30 def marc_surr(x): 30 31 """calculate perforation area using a tanh-based model surrogate -
branches/decorate/test_cached_memoize.py
r658 r664 1 from memoize import memoized 1 from memoize import memoized, file_archive 2 2 from timer import timed 3 3 4 4 # here caching saves time in a recursive function... 5 @memoized( archived=True)5 @memoized() 6 6 @timed() 7 7 def fibonacci(n): … … 12 12 return fibonacci(n-1) + fibonacci(n-2) 13 13 14 archive = 'fibonacci.pkl' 15 fibonacci. load(archive)14 fibonacci.cache.archive = file_archive('fibonacci.pkl') 15 fibonacci.cache.load() 16 16 17 17 print fibonacci(7) 18 18 print fibonacci(9) 19 19 20 fibonacci. dump(archive)20 fibonacci.cache.dump() 21 21 22 22 -
branches/decorate/test_memoize.py
r663 r664 110 110 111 111 112 from memoize import db_dict112 from memoize import archive_dict, db_archive 113 113 import dill 114 @memoized( memo=db_dict())114 @memoized(cache=archive_dict(archive=db_archive())) 115 115 def add(x,y): 116 116 return x+y … … 118 118 add(1,2) 119 119 add(1,3) 120 print "db_ memo = %s" % add.memo120 print "db_cache = %s" % add.cache 121 121 122 @memoized( memo=dict())122 @memoized(cache=dict()) 123 123 def add(x,y): 124 124 return x+y … … 126 126 add(1,2) 127 127 add(1,3) 128 print "dict_ memo = %s" % add.memo128 print "dict_cache = %s" % add.cache 129 129 130 @memoized( memo=add.memo)130 @memoized(cache=add.cache) 131 131 def add(x,y): 132 132 return x+y 133 133 add(1,2) 134 134 add(2,2) 135 print "re_dict_ memo = %s" % add.memo135 print "re_dict_cache = %s" % add.cache 136 136 137 137 @memoized(keymap=dumps) … … 141 141 add(1,2) 142 142 add(1,3) 143 print "pickle_dict_ memo = %s" % add.memo143 print "pickle_dict_cache = %s" % add.cache 144 144 145 145 -
branches/decorate/test_timed_monitor.py
r591 r664 174 174 print " upper bounds: %s" % upper_bounds 175 175 # print " ..." 176 try: model. load('surrogate.pkl')176 try: model.cache.load() 177 177 except: pass 178 178 diameter = UQ(RVstart,RVend,lower_bounds,upper_bounds) 179 try: model. dump('surrogate.pkl')179 try: model.cache.dump() 180 180 except: pass 181 181
Note: See TracChangeset
for help on using the changeset viewer.