0

added a memcached option. It's really slow though. don't use it.

This commit is contained in:
Andrew Brown
2012-03-04 22:25:38 -05:00
parent d1f53cc910
commit 98c23fd970
4 changed files with 38 additions and 4 deletions

View File

@@ -350,6 +350,8 @@ dir but you forgot to put quotes around the directory, since it contains spaces.
# Set up the cache objects to use # Set up the cache objects to use
caches = [] caches = []
caches.append(cache.LRUCache(size=100)) caches.append(cache.LRUCache(size=100))
if config.get("memcached_host", False):
caches.append(cache.Memcached(config['memcached_host']))
# TODO: optionally more caching layers here # TODO: optionally more caching layers here
renders = config['renders'] renders = config['renders']

View File

@@ -22,6 +22,7 @@ attribute.
""" """
import functools import functools
import logging import logging
import cPickle
class LRUCache(object): class LRUCache(object):
"""A simple, generic, in-memory LRU cache that implements the standard """A simple, generic, in-memory LRU cache that implements the standard
@@ -124,3 +125,30 @@ class LRUCache(object):
cache[key] = link cache[key] = link
# memcached is an option, but unless your IO costs are really high, it just
# ends up adding overhead and isn't worth it.
try:
import memcache
except ImportError:
class Memcached(object):
def __init__(*args):
raise ImportError("No module 'memcache' found. Please install python-memcached")
else:
class Memcached(object):
def __init__(self, conn='127.0.0.1:11211'):
self.conn = conn
self.mc = memcache.Client([conn], debug=0, pickler=cPickle.Pickler, unpickler=cPickle.Unpickler)
def __getstate__(self):
return self.conn
def __setstate__(self, conn):
self.__init__(conn)
def __getitem__(self, key):
v = self.mc.get(key)
if not v:
raise KeyError()
return v
def __setitem__(self, key, value):
self.mc.set(key, value)

View File

@@ -86,3 +86,7 @@ worlds = Setting(required=True, validator=make_dictValidator(validateStr, valida
outputdir = Setting(required=True, validator=validateOutputDir, default=None) outputdir = Setting(required=True, validator=validateOutputDir, default=None)
processes = Setting(required=True, validator=int, default=-1) processes = Setting(required=True, validator=int, default=-1)
# memcached is an option, but unless your IO costs are really high, it just
# ends up adding overhead and isn't worth it.
memcached_host = Setting(required=False, validator=str, default=None)

View File

@@ -18,6 +18,7 @@ import os
import os.path import os.path
from glob import glob from glob import glob
import logging import logging
import hashlib
import numpy import numpy
@@ -581,14 +582,13 @@ class CachedRegionSet(RegionSetWrapper):
s += obj.regiondir s += obj.regiondir
logging.debug("Initializing a cache with key '%s'", s) logging.debug("Initializing a cache with key '%s'", s)
if len(s) > 32:
import hashlib s = hashlib.md5(s).hexdigest()
s = hashlib.md5(s).hexdigest()
self.key = s self.key = s
def get_chunk(self, x, z): def get_chunk(self, x, z):
key = (self.key, x, z) key = hashlib.md5(repr((self.key, x, z))).hexdigest()
for i, cache in enumerate(self.caches): for i, cache in enumerate(self.caches):
try: try:
retval = cache[key] retval = cache[key]