from time import time from threading import Lock from django.core.cache import cache from django.utils.cache import patch_response_headers def single_cacheable(cache_timeout=60, stale_timeout=60, key_template=''): def paramed_decorator(func): func.__func_lck = Lock() def decorated(request, *args, **kw): lck = func.__func_lck if cache_timeout != 0 and request.method == "GET": key = "sc_"+key_template.format(*args, **kw) val = cache.get(key) ctout = cache_timeout if val is None: acq_ret = lck.acquire(0) try: if acq_ret == False: # other thread have lock lck.acquire(1) # wait while other thread finished val = cache.get(key) # check for responce cached by other thread if val is None: resp = func(request, *args, **kw) if hasattr(resp, '_cache_timeout'): ctout = resp._cache_timeout patch_response_headers(resp, ctout) else: refresh_tm, resp = val return resp else: # we are first thread what acquired lock resp = func(request, *args, **kw) if hasattr(resp, '_cache_timeout'): ctout = resp._cache_timeout patch_response_headers(resp, ctout) cache.set(key, (ctout + time(), resp), ctout + stale_timeout) finally: lck.release() else: refresh_tm, resp = val if time() > refresh_tm and refresh_tm > 0: if lck.acquire(0) == False: # other thread refreshed cache return resp try: cache.set(key, (0, resp), stale_timeout) # mark responce as stale resp = func(request, *args, **kw) # compute fresh value if hasattr(resp, '_cache_timeout'): ctout = resp._cache_timeout patch_response_headers(resp, ctout) cache.set(key, (ctout + time(), resp), ctout + stale_timeout) finally: lck.release() else: return resp return resp else: with lck: # wait while other thread finished resp = func(request, *args, **kw) # run the view return resp decorated.__doc__ = func.__doc__ decorated.__dict__ = func.__dict__ return decorated return paramed_decorator