0

Merge branch 'anvil'

This commit is contained in:
Andrew Brown
2012-03-04 00:02:17 -05:00
6 changed files with 216 additions and 41 deletions

View File

@@ -15,48 +15,97 @@
"""This module has supporting functions for the caching logic used in world.py.
Each cache class should implement the standard container type interface
(__getitem__ and __setitem__, as well as provide a "hits" and "misses"
attribute.
"""
import functools
import logging
def lru_cache(max_size=100):
"""A quick-and-dirty LRU implementation.
Uses a dict to store mappings, and a list to store orderings.
class LRUCache(object):
"""A simple in-memory LRU cache.
Only supports positional arguments
An ordered dict type would simplify this implementation a bit, but we want
Python 2.6 compatibility and the standard library ordereddict was added in
2.7. It's probably okay because this implementation can be tuned for
exactly what we need and nothing more.
This implementation keeps a linked-list of cache keys and values, ordered
in least-recently-used order. A dictionary maps keys to linked-list nodes.
On cache hit, the link is moved to the end of the list. On cache miss, the
first item of the list is evicted. All operations have constant time
complexity (dict lookups are worst case O(n) time)
"""
def lru_decorator(fun):
class _LinkNode(object):
__slots__ = ['left', 'right', 'key', 'value']
def __init__(self,l=None,r=None,k=None,v=None):
self.left = l
self.right = r
self.key = k
self.value = v
cache = {}
lru_ordering = []
@functools.wraps(fun)
def new_fun(*args):
try:
result = cache[args]
except KeyError:
# cache miss =(
new_fun.miss += 1
result = fun(*args)
def __init__(self, size=100):
self.cache = {}
# Insert into cache
cache[args] = result
lru_ordering.append(args)
self.listhead = LRUCache._LinkNode()
self.listtail = LRUCache._LinkNode()
# Two sentinel nodes at the ends of the linked list simplify boundary
# conditions in the code below.
self.listhead.right = self.listtail
self.listtail.left = self.listhead
if len(cache) > max_size:
# Evict an item
del cache[ lru_ordering.pop(0) ]
self.hits = 0
self.misses = 0
else:
# Move the result item to the end of the list
new_fun.hits += 1
position = lru_ordering.index(args)
lru_ordering.append(lru_ordering.pop(position))
self.size = size
return result
# Initialize an empty cache of the same size for worker processes
def __getstate__(self):
return self.size
def __setstate__(self, size):
self.__init__(size)
new_fun.hits = 0
new_fun.miss = 0
return new_fun
def __getitem__(self, key):
try:
link = self.cache[key]
except KeyError:
self.misses += 1
raise
# Disconnect the link from where it is
link.left.right = link.right
link.right.left = link.left
# Insert the link at the end of the list
tail = self.listtail
link.left = tail.left
link.right = tail
tail.left.right = link
tail.left = link
self.hits += 1
return link.value
def __setitem__(self, key, value):
cache = self.cache
if key in cache:
raise KeyError("That key already exists in the cache!")
if len(cache) >= self.size:
# Evict a node
link = self.listhead.right
del cache[link.key]
link.left.right = link.right
link.right.left = link.left
del link
# The node doesn't exist already, and we have room for it. Let's do this.
tail = self.listtail
link = LRUCache._LinkNode(tail.left, tail,key,value)
tail.left.right = link
tail.left = link
cache[key] = link
return lru_decorator

View File

@@ -55,6 +55,7 @@ def validateRenderMode(mode):
# make sure that mode is a list of things that are all rendermode primative
if isinstance(mode, str):
# Try and find an item named "mode" in the rendermodes module
mode = mode.lower().replace("-","_")
try:
mode = getattr(rendermodes, mode)
except AttributeError:

View File

@@ -208,7 +208,7 @@ class RegionSet(object):
"""
def __init__(self, regiondir, cachesize=16):
def __init__(self, regiondir):
"""Initialize a new RegionSet to access the region files in the given
directory.
@@ -232,10 +232,6 @@ class RegionSet(object):
self.empty_chunk = [None,None]
logging.debug("Done scanning regions")
# Caching implementaiton: a simple LRU cache
# Decorate the getter methods with the cache decorator
self.get_chunk = cache.lru_cache(cachesize)(self.get_chunk)
# Re-initialize upon unpickling
def __getstate__(self):
return self.regiondir
@@ -258,7 +254,6 @@ class RegionSet(object):
else:
raise Exception("Woah, what kind of dimension is this! %r" % self.regiondir)
# this is decorated with cache.lru_cache in __init__(). Be aware!
@log_other_exceptions
def get_chunk(self, x, z):
"""Returns a dictionary object representing the "Level" NBT Compound
@@ -547,7 +542,59 @@ class CroppedRegionSet(RegionSetWrapper):
else:
return None
class CachedRegionSet(RegionSetWrapper):
"""A regionset wrapper that implements caching of the results from
get_chunk()
"""
def __init__(self, rsetobj, cacheobjects):
"""Initialize this wrapper around the given regionset object and with
the given list of cache objects. The cache objects may be shared among
other CachedRegionSet objects.
"""
super(CachedRegionSet, self).__init__(rsetobj)
self.caches = cacheobjects
# Construct a key from the sequence of transformations and the real
# RegionSet object, so that items we place in the cache don't conflict
# with other worlds/transformation combinations.
obj = self._r
s = ""
while isinstance(obj, RegionSetWrapper):
s += obj.__class__.__name__ + "."
obj = obj._r
# obj should now be the actual RegionSet object
s += obj.regiondir
logging.debug("Initializing a cache with key '%s'", s)
if len(s) > 32:
import hashlib
s = hashlib.md5(s).hexdigest()
self.key = s
def get_chunk(self, x, z):
key = (self.key, x, z)
for i, cache in enumerate(self.caches):
try:
retval = cache[key]
# This did have it, no need to re-add it to this cache, just
# the ones before it
i -= 1
break
except KeyError:
pass
else:
retval = super(CachedRegionSet, self).get_chunk(x,z)
# Now add retval to all the caches that didn't have it, all the caches
# up to and including index i
for cache in self.caches[:i+1]:
cache[key] = retval
return retval
def get_save_dir():
"""Returns the path to the local saves directory