0

added caching to chunks in RegionSet() currently disabled

This build is broken due to the mismatch in expected and actual error
condition for get_chunk. The C code still expects it to return None.
This commit is contained in:
Andrew Brown
2012-01-16 00:48:30 -05:00
parent a48077fe54
commit ba87959e31
2 changed files with 155 additions and 6 deletions

62
overviewer_core/cache.py Normal file
View File

@@ -0,0 +1,62 @@
# This file is part of the Minecraft Overviewer.
#
# Minecraft Overviewer is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# Minecraft Overviewer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the Overviewer. If not, see <http://www.gnu.org/licenses/>.
"""This module has supporting functions for the caching logic used in world.py.
"""
import functools
def lru_cache(max_size=100):
"""A quick-and-dirty LRU implementation.
Uses a dict to store mappings, and a list to store orderings.
Only supports positional arguments
"""
def lru_decorator(fun):
cache = {}
lru_ordering = []
@functools.wraps(fun)
def new_fun(*args):
try:
result = cache[args]
except KeyError:
# cache miss =(
new_fun.miss += 1
result = fun(*args)
# Insert into cache
cache[args] = result
lru_ordering.append(args)
if len(cache) > max_size:
# Evict an item
del cache[ lru_ordering.pop(0) ]
else:
# Move the result item to the end of the list
new_fun.hits += 1
position = lru_ordering.index(args)
lru_ordering.append(lru_ordering.pop(position))
return result
new_fun.hits = 0
new_fun.miss = 0
return new_fun
return lru_decorator

View File

@@ -23,6 +23,7 @@ import collections
import numpy
import nbt
import cache
"""
This module has routines for extracting information about available worlds
@@ -235,6 +236,10 @@ class RegionSet(object):
self.empty_chunk = [None,None]
logging.debug("Done scanning regions")
# Caching implementaiton: a simple LRU cache
# Decorate the get_chunk method with the cache decorator
#self.get_chunk = cache.lru_cache(cachesize)(self.get_chunk)
# Re-initialize upon unpickling
def __getstate__(self):
return self.regiondir
@@ -243,21 +248,24 @@ class RegionSet(object):
def __repr__(self):
return "<RegionSet regiondir=%r>" % self.regiondir
def get_chunk(self,x, z):
def get_chunk(self, x, z):
"""Returns a dictionary object representing the "Level" NBT Compound
structure for a chunk given its x, z coordinates. The coordinates are
chunk coordinates. Raises ChunkDoesntExist exception if the given chunk
does not exist.
The returned dictionary corresponds to the “Level” structure in the
The returned dictionary corresponds to the "Level" structure in the
chunk file, with a few changes:
* The Blocks byte string is transformed into a 16x16x128 numpy array
* The SkyLight byte string is transformed into a 16x16x128 numpy
* The "Blocks" byte string is transformed into a 16x16x128 numpy array
* The "SkyLight" byte string is transformed into a 16x16x128 numpy
array
* The BlockLight byte string is transformed into a 16x16x128 numpy
* The "BlockLight" byte string is transformed into a 16x16x128 numpy
array
* The “Data” byte string is transformed into a 16x16x128 numpy array
* The "Data" byte string is transformed into a 16x16x128 numpy array
Warning: the returned data may be cached and thus should not be
modified, lest it affect the return values of future calls for the same
chunk.
"""
regionfile = self._get_region_path(x, z)
@@ -428,3 +436,82 @@ def get_worlds():
return ret
def lru_cache(maxsize=100):
'''Generalized Least-recently-used cache decorator.
Arguments to the cached function must be hashable.
Cache performance statistics stored in f.hits and f.misses.
Clear the cache with f.clear().
http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
This snippet is from
http://code.activestate.com/recipes/498245-lru-and-lfu-cache-decorators/
'''
maxqueue = maxsize * 10
def decorating_function(user_function,
len=len, iter=iter, tuple=tuple, sorted=sorted, KeyError=KeyError):
cache = {} # mapping of args to results
queue = collections.deque() # order that keys have been used
refcount = collections.defaultdict(int)# times each key is in the queue
sentinel = object() # marker for looping around the queue
kwd_mark = object() # separate positional and keyword args
# lookup optimizations (ugly but fast)
queue_append, queue_popleft = queue.append, queue.popleft
queue_appendleft, queue_pop = queue.appendleft, queue.pop
@functools.wraps(user_function)
def wrapper(*args, **kwds):
# cache key records both positional and keyword args
key = args
if kwds:
key += (kwd_mark,) + tuple(sorted(kwds.items()))
# record recent use of this key
queue_append(key)
refcount[key] += 1
# get cache entry or compute if not found
try:
result = cache[key]
wrapper.hits += 1
except KeyError:
result = user_function(*args, **kwds)
cache[key] = result
wrapper.misses += 1
# purge least recently used cache entry
if len(cache) > maxsize:
key = queue_popleft()
refcount[key] -= 1
while refcount[key]:
key = queue_popleft()
refcount[key] -= 1
del cache[key], refcount[key]
# periodically compact the queue by eliminating duplicate keys
# while preserving order of most recent access
if len(queue) > maxqueue:
refcount.clear()
queue_appendleft(sentinel)
for key in ifilterfalse(refcount.__contains__,
iter(queue_pop, sentinel)):
queue_appendleft(key)
refcount[key] = 1
return result
def clear():
cache.clear()
queue.clear()
refcount.clear()
wrapper.hits = wrapper.misses = 0
wrapper.hits = wrapper.misses = 0
wrapper.clear = clear
return wrapper
return decorating_function