0

Initial chunk cache commit mk2

This commit is contained in:
Xon
2011-03-26 13:27:33 +08:00
parent 71a2a024cc
commit 729141d426
5 changed files with 74 additions and 31 deletions

View File

@@ -52,7 +52,7 @@ def get_lvldata(world, filename, x, y, retries=2):
# non existent region file doesn't mean corrupt chunk.
if filename == None:
raise NoSuchChunk
return None
try:
d = world.load_from_region(filename, x, y)
@@ -60,13 +60,15 @@ def get_lvldata(world, filename, x, y, retries=2):
if retries > 0:
# wait a little bit, and try again (up to `retries` times)
time.sleep(1)
#make sure we reload region info
world.reload_region(filename)
return get_lvldata(world, filename, x, y, retries=retries-1)
else:
logging.warning("Error opening chunk (%i, %i) in %s. It may be corrupt. %s", x, y, filename, e)
raise ChunkCorrupt(str(e))
if not d: raise NoSuchChunk(x,y)
return d[1]['Level']
return d
def get_blockarray(level):
"""Takes the level struct as returned from get_lvldata, and returns the

26
nbt.py
View File

@@ -282,6 +282,17 @@ class MCRFileReader(object):
return timestamp
def openfile(self):
#make sure we clean up
if self._file is None:
self._file = open(self._filename,'rb')
def closefile(self):
#make sure we clean up
if self._file is not None:
self._file.close()
self._file = None
def get_chunks(self):
"""Return a list of all chunks contained in this region file,
as a list of (x, y) coordinate tuples. To load these chunks,
@@ -304,8 +315,7 @@ class MCRFileReader(object):
if self._locations:
return
if self._file is None:
self._file = open(self._filename,'rb')
self.openfile()
self._chunks = None
self._locations = []
@@ -325,9 +335,7 @@ class MCRFileReader(object):
timestamp_append(self._read_chunk_timestamp())
if closeFile:
#free the file object since it isn't safe to be reused in child processes (seek point goes wonky!)
self._file.close()
self._file = None
self.closefile()
return
def get_chunk_timestamp(self, x, y):
@@ -350,7 +358,7 @@ class MCRFileReader(object):
location = self._locations[x + y * 32]
return location is not None
def load_chunk(self, x, y):
def load_chunk(self, x, y,closeFile=True):
"""Return a NBTFileReader instance for the given chunk, or
None if the given chunk doesn't exist in this region file. If
you provide an x or y not between 0 and 31, it will be
@@ -366,8 +374,8 @@ class MCRFileReader(object):
if location is None:
return None
if self._file is None:
self._file = open(self._filename,'rb');
self.openfile()
# seek to the data
self._file.seek(location[0])
@@ -391,4 +399,6 @@ class MCRFileReader(object):
data = self._file.read(data_length - 1)
data = StringIO.StringIO(data)
if closeFile:
self.closefile()
return NBTFileReader(data, is_gzip=is_gzip)

View File

@@ -41,7 +41,6 @@ except ImportError:
sys.exit(1)
import optimizeimages
import composite
import world
import quadtree
import googlemap
@@ -166,10 +165,7 @@ def main():
logging.info("Welcome to Minecraft Overviewer!")
logging.debug("Current log level: {0}".format(logging.getLogger().level))
if not composite.extension_alpha_over:
logging.info("Notice: alpha_over extension not found; using default PIL paste()")
useBiomeData = os.path.exists(os.path.join(worlddir, 'biomes'))
if not useBiomeData:
logging.info("Notice: Not using biome data for tinting")

View File

@@ -410,9 +410,10 @@ class QuadtreeGen(object):
# check chunk mtimes to see if they are newer
try:
needs_rerender = False
get_region_mtime = world.get_region_mtime
for col, row, chunkx, chunky, regionfile in chunks:
# check region file mtime first.
region,regionMtime = world.get_region_mtime(regionfile)
region,regionMtime = get_region_mtime(regionfile)
if regionMtime <= tile_mtime:
continue

View File

@@ -29,6 +29,7 @@ import numpy
import chunk
import nbt
import textures
import time
"""
This module has routines for extracting information about available worlds
@@ -75,14 +76,17 @@ class World(object):
#this also caches all the region file header info
logging.info("Scanning regions")
regionfiles = {}
regions = {}
for x, y, regionfile in self._iterate_regionfiles():
mcr = nbt.MCRFileReader(regionfile)
self.regions = {}
for x, y, regionfile in self._iterate_regionfiles():
mcr = self.reload_region(regionfile)
mcr.get_chunk_info()
regions[regionfile] = (mcr,os.path.getmtime(regionfile))
regionfiles[(x,y)] = (x,y,regionfile,mcr)
self.regionfiles = regionfiles
self.regions = regions
# set the number of region file handles we will permit open at any time before we start closing them
# self.regionlimit = 1000
# the max number of chunks we will keep before removing them
self.chunklimit = 1024*6 # this should be a multipule of the max chunks per region or things could get wonky ???
self.chunkcount = 0
logging.debug("Done scanning regions")
# figure out chunk format is in use
@@ -118,24 +122,54 @@ class World(object):
"""
_, _, regionfile,_ = self.regionfiles.get((chunkX//32, chunkY//32),(None,None,None,None));
return regionfile
def load_from_region(self,filename, x, y):
nbt = self.load_region(filename).load_chunk(x, y)
if nbt is None:
return None ## return none. I think this is who we should indicate missing chunks
#raise IOError("No such chunk in region: (%i, %i)" % (x, y))
return nbt.read_all()
#we need to manage the chunk cache
regioninfo = self.regions[filename]
if regioninfo is None:
return None
chunks = regioninfo[2]
chunk_data = chunks.get((x,y))
if chunk_data is None:
nbt = self.load_region(filename).load_chunk(x, y)
if nbt is None:
chunks[(x,y)] = [None,None]
return None ## return none. I think this is who we should indicate missing chunks
#raise IOError("No such chunk in region: (%i, %i)" % (x, y))
#prune the cache if required
if self.chunkcount > self.chunklimit: #todo: make the emptying the chunk cache slightly less crazy
[self.reload_region(regionfile) for regionfile in self.regions if regionfile <> filename]
self.chunkcount += 1
#we cache the transformed data, not it's raw form
data = nbt.read_all()
level = data[1]['Level']
chunk_data = level
#chunk_data = {}
#chunk_data['skylight'] = chunk.get_skylight_array(level)
#chunk_data['blocklight'] = chunk.get_blocklight_array(level)
#chunk_data['blockarray'] = chunk.get_blockdata_array(level)
#chunk_data['TileEntities'] = chunk.get_tileentity_data(level)
chunks[(x,y)] = [level,time.time()]
else:
chunk_data = chunk_data[0]
return chunk_data
#used to reload a changed region
def reload_region(self,filename):
self.regions[filename] = (nbt.MCRFileReader(filename),os.path.getmtime(regionfile))
if self.regions.get(filename) is not None:
self.regions[filename][0].closefile()
chunkcache = {}
mcr = nbt.MCRFileReader(filename)
self.regions[filename] = (mcr,os.path.getmtime(filename),chunkcache)
return mcr
def load_region(self,filename):
def load_region(self,filename):
return self.regions[filename][0]
def get_region_mtime(self,filename):
return self.regions[filename]
return (self.regions[filename][0],self.regions[filename][1])
def convert_coords(self, chunkx, chunky):
"""Takes a coordinate (chunkx, chunky) where chunkx and chunky are