Skip to content

Commit

Permalink
Merge pull request #601 from girder/more-cache-config
Browse files Browse the repository at this point in the history
Add more cache configuration options.
  • Loading branch information
manthey authored May 13, 2021
2 parents 65492d5 + 789c6d6 commit 939e51a
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 10 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@

## Unreleased

### Features
- Allow setting the cache memory portion and maximum for tilesources (#601)

### Improvements
- Cache histogram requests (#598)

Expand Down
9 changes: 5 additions & 4 deletions large_image/cache_util/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,8 @@ def __new__(metacls, name, bases, namespace, **kwargs): # noqa - N804
CacheProperties[cacheName].get('itemExpectedSize')):
maxSize = pickAvailableCache(
CacheProperties[cacheName]['itemExpectedSize'],
maxItems=CacheProperties[cacheName]['maxItems'])
maxItems=CacheProperties[cacheName]['maxItems'],
cacheName=cacheName)
maxSize = namespace.pop('cacheMaxSize', maxSize)
maxSize = kwargs.get('cacheMaxSize', maxSize)
if maxSize is None:
Expand All @@ -154,10 +155,10 @@ def __new__(metacls, name, bases, namespace, **kwargs): # noqa - N804
cacheName = cls

if LruCacheMetaclass.namedCaches.get(cacheName) is None:
cache, cacheLock = CacheFactory().getCache(maxSize)
cache, cacheLock = CacheFactory().getCache(maxSize, cacheName=cacheName)
LruCacheMetaclass.namedCaches[cacheName] = (cache, cacheLock)
config.getConfig('logger').info(
'Created LRU Cache for %r with %d maximum size' % (cacheName, maxSize))
'Created LRU Cache for %r with %d maximum size' % (cacheName, cache.maxsize))
else:
(cache, cacheLock) = LruCacheMetaclass.namedCaches[cacheName]

Expand Down Expand Up @@ -204,7 +205,7 @@ def getTileCache():

if _tileCache is None:
# Decide whether to use Memcached or cachetools
_tileCache, _tileLock = CacheFactory().getCache()
_tileCache, _tileLock = CacheFactory().getCache(cacheName='tileCache')
return _tileCache, _tileLock


Expand Down
30 changes: 24 additions & 6 deletions large_image/cache_util/cachefactory.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
MemCache = None


def pickAvailableCache(sizeEach, portion=8, maxItems=None):
def pickAvailableCache(sizeEach, portion=8, maxItems=None, cacheName=None):
"""
Given an estimated size of an item, return how many of those items would
fit in a fixed portion of the available virtual memory.
Expand All @@ -39,9 +39,17 @@ def pickAvailableCache(sizeEach, portion=8, maxItems=None):
:param portion: the inverse fraction of the memory which can be used.
:param maxItems: if specified, the number of items is never more than this
value.
:param cacheName: if specified, the portion can be affected by the
configuration.
:return: the number of items that should be cached. Always at least two,
unless maxItems is less.
"""
if cacheName:
portion = max(portion, int(config.getConfig(
f'cache_{cacheName}_memory_portion', portion)))
configMaxItems = int(config.getConfig(f'cache_{cacheName}_maximum', 0))
if configMaxItems > 0:
maxItems = configMaxItems
# Estimate usage based on (1 / portion) of the total virtual memory.
if psutil:
memory = psutil.virtual_memory().total
Expand All @@ -56,18 +64,28 @@ def pickAvailableCache(sizeEach, portion=8, maxItems=None):
class CacheFactory:
logged = False

def getCacheSize(self, numItems):
def getCacheSize(self, numItems, cacheName=None):
if numItems is None:
defaultPortion = 32
try:
portion = int(config.getConfig('cache_python_memory_portion', defaultPortion))
portion = max(portion, 3)
portion = int(config.getConfig('cache_python_memory_portion', 0))
if cacheName:
portion = max(portion, int(config.getConfig(
f'cache_{cacheName}_memory_portion', portion)))
portion = max(portion or defaultPortion, 3)
except ValueError:
portion = defaultPortion
numItems = pickAvailableCache(256**2 * 4 * 2, portion)
if cacheName:
try:
maxItems = int(config.getConfig(f'cache_{cacheName}_maximum', 0))
if maxItems > 0:
numItems = min(numItems, max(maxItems, 3))
except ValueError:
pass
return numItems

def getCache(self, numItems=None):
def getCache(self, numItems=None, cacheName=None):
# memcached is the fallback default, if available.
cacheBackend = config.getConfig('cache_backend', 'python')
if cacheBackend:
Expand Down Expand Up @@ -96,7 +114,7 @@ def getCache(self, numItems=None):
cache = None
if cache is None: # fallback backend
cacheBackend = 'python'
cache = LRUCache(self.getCacheSize(numItems))
cache = LRUCache(self.getCacheSize(numItems, cacheName=cacheName))
cacheLock = threading.Lock()
if numItems is None and not CacheFactory.logged:
config.getConfig('logprint').info('Using %s for large_image caching' % cacheBackend)
Expand Down
10 changes: 10 additions & 0 deletions large_image/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
'logger': fallbackLogger,
'logprint': fallbackLogger,

# For tiles
'cache_backend': 'python', # 'python' or 'memcached'
# 'python' cache can use 1/(val) of the available memory
'cache_python_memory_portion': 32,
Expand All @@ -19,6 +20,15 @@
'cache_memcached_username': None,
'cache_memcached_password': None,

# Generally, these keys are the form of "cache_<cacheName>_<key>"

# For tilesources. These are also limited by available file handles.
# 'python' cache can use 1/(val) of the available memory based on a very
# rough estimate of the amount of memory used by a tilesource
'cache_tilesource_memory_portion': 8,
# If >0, this is the maximum number of tilesources that will be cached
'cache_tilesource_maximum': 0,

'max_small_image_size': 4096,
}

Expand Down

0 comments on commit 939e51a

Please sign in to comment.