Package ivs :: Package aux :: Module decorators
[hide private]
[frames] | no frames]

Source Code for Module ivs.aux.decorators

  1  # -*- coding: utf-8 -*- 
  2  """ 
  3  Various decorator functions 
  4      - Memoization with args and kwargs (@memoized) 
  5      - Make a parallel version of a function (@make_parallel) 
  6      - Retry with exponential backoff (@retry(3,2)) 
  7      - Retry accessing website with exponential backoff (@retry(3,2)) 
  8      - Counting function calls (@countcalls) 
  9      - Timing function calls 
 10      - Redirecting print statements to logger 
 11      - Disable-decorator decorator 
 12      - Extend/Reopen an existing class (like in Ruby) 
 13  """ 
 14  import functools 
 15  import cPickle 
 16  import time 
 17  import logging 
 18  import sys 
 19  import math 
 20  import socket 
 21  import logging 
 22  import inspect 
 23   
 24  logger = logging.getLogger("DEC") 
 25  memory = {} 
26 27 #{ Common tools 28 -def memoized(fctn):
29 """ 30 Cache a function's return value each time it is called. 31 If called later with the same arguments, the cached value is returned, and 32 not re-evaluated. 33 """ 34 @functools.wraps(fctn) 35 def memo(*args,**kwargs): 36 haxh = cPickle.dumps((fctn.__name__, args, sorted(kwargs.iteritems()))) 37 modname = fctn.__module__ 38 if not (modname in memory): 39 memory[modname] = {} 40 if not (haxh in memory[modname]): 41 memory[modname][haxh] = fctn(*args,**kwargs) 42 logger.debug("Function %s memoized"%(str(fctn))) 43 return memory[modname][haxh]
44 if memo.__doc__: 45 memo.__doc__ = "\n".join([memo.__doc__,"This function is memoized."]) 46 return memo 47
48 -def clear_memoization(keys=None):
49 """ 50 Clear contents of memory 51 """ 52 if keys is None: 53 keys = memory.keys() 54 for key in keys: 55 if key in memory: 56 riddens = [memory[key].pop(ikey) for ikey in memory[key].keys()[:]] 57 logger.debug("Memoization cleared")
58
59 -def make_parallel(fctn):
60 """ 61 Make a parallel version of a function. 62 63 This extends the function's arguments with one extra argument, which is 64 a parallel array that collects the output of the function. 65 66 You have to decorate this function in turn with a function that calls the 67 basic function, but with different arguments so to effectively make it 68 parallel (e.g. in the frequency analysis case, this would be a function 69 that calls the periodogram calculation with different f0 and fn. 70 """ 71 @functools.wraps(fctn) 72 def extra(*args,**kwargs): 73 arr = args[-1] # this is the parallel array 74 out = fctn(*args[:-1],**kwargs) 75 arr.append(out) # collect the output
76 return extra 77
78 -def timeit(fctn):
79 """ 80 Time a function. 81 82 @return: output from func, duration of function 83 @rtype: 2-tuple 84 """ 85 @functools.wraps(fctn) 86 def time_this(*args,**kwargs): 87 start_time = time.time() 88 output = fctn(*args,**kwargs) 89 duration = time.time()-start_time 90 print "FUNC: %s MOD: %s: EXEC TIME: %.3fs"%(fctn.__module__,fctn.__name__,duration) 91 return output
92 return time_this 93
94 -def timeit_duration(fctn):
95 """ 96 Time a function and return duration. 97 98 @return: output from func, duration of function 99 @rtype: 2-tuple 100 """ 101 @functools.wraps(fctn) 102 def time_this(*args,**kwargs): 103 start_time = time.time() 104 output = fctn(*args,**kwargs) 105 duration = time.time()-start_time 106 print "FUNC: %s MOD: %s: EXEC TIME: %.3fs"%(fctn.__module__,fctn.__name__,duration) 107 return duration
108 return time_this 109
110 -def retry(tries, delay=3, backoff=2):
111 """ 112 Retry a function or method until it returns True. 113 114 Delay sets the initial delay, and backoff sets how much the delay should 115 lengthen after each failure. backoff must be greater than 1, or else it 116 isn't really a backoff. tries must be at least 0, and delay greater than 0. 117 """ 118 119 if backoff <= 1: 120 raise ValueError("backoff must be greater than 1") 121 122 tries = math.floor(tries) 123 if tries < 0: 124 raise ValueError("tries must be 0 or greater") 125 126 if delay <= 0: 127 raise ValueError("delay must be greater than 0") 128 129 def deco_retry(f): 130 def f_retry(*args, **kwargs): 131 mtries, mdelay = tries, delay # make mutable 132 133 rv = f(*args, **kwargs) # first attempt 134 while mtries > 0: 135 if rv == True: # Done on success 136 return True 137 138 mtries -= 1 # consume an attempt 139 time.sleep(mdelay) # wait... 140 mdelay *= backoff # make future wait longer 141 142 rv = f(*args, **kwargs) # Try again 143 144 return False # Ran out of tries :-(
145 146 return f_retry # true decorator -> decorated function 147 return deco_retry # @retry(arg[, ...]) -> true decorator 148
149 -def retry_http(tries, backoff=2, on_failure='error'):
150 """ 151 Retry a function or method reading from the internet until no socket or IOError 152 is raised 153 154 delay sets the initial delay, and backoff sets how much the delay should 155 lengthen after each failure. backoff must be greater than 1, or else it 156 isn't really a backoff. tries must be at least 0, and delay greater than 0. 157 """ 158 delay = socket.getdefaulttimeout() 159 o_delay = socket.getdefaulttimeout() 160 if backoff <= 1: 161 raise ValueError("backoff must be greater than 1") 162 163 tries = math.floor(tries) 164 if tries < 0: 165 raise ValueError("tries must be 0 or greater") 166 167 if delay <= 0: 168 delay = 15. 169 o_delay = 15. 170 socket.setdefaulttimeout(delay) 171 #raise ValueError("delay must be greater than 0") 172 173 def deco_retry(f): 174 def f_retry(*args, **kwargs): 175 mtries, mdelay = tries, delay # make mutable 176 177 while mtries > 0: 178 try: 179 rv = f(*args, **kwargs) # Try again 180 except IOError,msg: 181 rv = False 182 except socket.error: 183 rv = False 184 185 if rv != False: # Done on success 186 return rv 187 mtries -= 1 # consume an attempt 188 socket.setdefaulttimeout(mdelay) # wait... 189 mdelay *= backoff # make future wait longer 190 logger.error("URL timeout: %d attempts remaining (delay=%.1fs)"%(mtries,mdelay)) 191 logger.critical("URL timeout: number of trials exceeded") 192 if on_failure=='error': 193 raise IOError,msg # Ran out of tries :-( 194 else: 195 logger.critical("URL Failed, but continuing...") 196 return None
197 198 return f_retry # true decorator -> decorated function 199 socket.setdefaulttimeout(o_delay) 200 return deco_retry # @retry(arg[, ...]) -> true decorator 201
202 -class countcalls(object):
203 """ 204 Keeps track of the number of times a function is called. 205 """ 206 __instances = {} 207
208 - def __init__(self, f):
209 self.__f = f 210 self.__numcalls = 0 211 countcalls.__instances[f] = self
212
213 - def __call__(self, *args, **kwargs):
214 self.__numcalls += 1 215 return self.__f(*args, **kwargs)
216 217 @staticmethod
218 - def count(f):
219 """Return the number of times the function f was called.""" 220 return countcalls.__instances[f].__numcalls
221 222 @staticmethod
223 - def counts():
224 """Return a dict of {function: # of calls} for all registered functions.""" 225 return dict([(f, countcalls.count(f)) for f in countcalls.__instances])
226
227 -class LogPrinter:
228 """ 229 LogPrinter class which serves to emulates a file object and logs 230 whatever it gets sent to a Logger object at the INFO level. 231 """
232 - def __init__(self):
233 """Grabs the specific logger to use for logprinting.""" 234 self.ilogger = logging.getLogger('logprinter') 235 il = self.ilogger 236 logging.basicConfig() 237 il.setLevel(logging.INFO)
238
239 - def write(self, text):
240 """Logs written output to a specific logger""" 241 text = text.strip() 242 if text: self.ilogger.info(text)
243
244 -def logprintinfo(func):
245 """ 246 Wraps a method so that any calls made to print get logged instead 247 """ 248 def pwrapper(*arg,**kwargs): 249 stdobak = sys.stdout 250 lpinstance = LogPrinter() 251 sys.stdout = lpinstance 252 try: 253 return func(*arg,**kwargs) 254 finally: 255 sys.stdout = stdobak
256 return pwrapper 257
258 259 -def filter_kwargs(fctn):
260 """ 261 Remove keyword arguments which are not used by a function. 262 """ 263 @functools.wraps(fctn) 264 def do_filter(*args,**kwargs): 265 args_,varargs,keywords,defaults = inspect.getargspec(fctn) 266 #-- loop over all keywords given by the user, and remove them from the 267 # kwargs dictionary if their names are not present in 'args' 268 for key in kwargs.keys(): 269 if not key in args_[-len(defaults):]: 270 thrash = kwargs.pop(key) 271 return fctn(*args,**kwargs)
272 return do_filter 273
274 #} 275 276 #{ Disable decorator 277 -def disabled(func):
278 """ 279 Disables the provided function 280 281 use as follows: 282 1. set a global enable flag for a decorator: 283 >>> global_mydecorator_enable_flag = True 284 285 2. toggle decorator right before the definition 286 >>> state = mydecorator if global_mydecorator_enable_flag else disabled 287 >>> @state 288 """ 289 return func
290 #}
291 292 #{ Extend/Reopen class decorator 293 294 -def extend(cls):
295 """ 296 Decorator that allows you to add methods or attributes to an already 297 existing class. Inspired on the reopening of classes in Ruby 298 299 Example: 300 301 >>> @extend(SomeClassThatAlreadyExists) 302 >>> def some_method(): 303 >>> do stuff 304 305 Will add the method some_method to SomeClassThatAlreadyExists 306 """ 307 def decorator(f): 308 setattr(cls, f.__name__, f) 309 return cls
310 return decorator 311
312 -def class_extend(cls):
313 """ 314 Similar as extend(cls), but instead of decorating a function, you use 315 it to decorate a class. All attributes of that class will be added to 316 cls. Use at own risk, results may vary!!! 317 """ 318 def decorator(nclf): 319 for at in nclf.__dict__.keys(): 320 setattr(cls, at, getattr(nclf, at)) 321 return cls
322 return decorator 323 324 #} 325