Initial chunk cache commit mk2
This commit is contained in:
6
chunk.py
6
chunk.py
@@ -52,7 +52,7 @@ def get_lvldata(world, filename, x, y, retries=2):
|
|||||||
|
|
||||||
# non existent region file doesn't mean corrupt chunk.
|
# non existent region file doesn't mean corrupt chunk.
|
||||||
if filename == None:
|
if filename == None:
|
||||||
raise NoSuchChunk
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
d = world.load_from_region(filename, x, y)
|
d = world.load_from_region(filename, x, y)
|
||||||
@@ -60,13 +60,15 @@ def get_lvldata(world, filename, x, y, retries=2):
|
|||||||
if retries > 0:
|
if retries > 0:
|
||||||
# wait a little bit, and try again (up to `retries` times)
|
# wait a little bit, and try again (up to `retries` times)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
#make sure we reload region info
|
||||||
|
world.reload_region(filename)
|
||||||
return get_lvldata(world, filename, x, y, retries=retries-1)
|
return get_lvldata(world, filename, x, y, retries=retries-1)
|
||||||
else:
|
else:
|
||||||
logging.warning("Error opening chunk (%i, %i) in %s. It may be corrupt. %s", x, y, filename, e)
|
logging.warning("Error opening chunk (%i, %i) in %s. It may be corrupt. %s", x, y, filename, e)
|
||||||
raise ChunkCorrupt(str(e))
|
raise ChunkCorrupt(str(e))
|
||||||
|
|
||||||
if not d: raise NoSuchChunk(x,y)
|
if not d: raise NoSuchChunk(x,y)
|
||||||
return d[1]['Level']
|
return d
|
||||||
|
|
||||||
def get_blockarray(level):
|
def get_blockarray(level):
|
||||||
"""Takes the level struct as returned from get_lvldata, and returns the
|
"""Takes the level struct as returned from get_lvldata, and returns the
|
||||||
|
|||||||
26
nbt.py
26
nbt.py
@@ -282,6 +282,17 @@ class MCRFileReader(object):
|
|||||||
|
|
||||||
return timestamp
|
return timestamp
|
||||||
|
|
||||||
|
def openfile(self):
|
||||||
|
#make sure we clean up
|
||||||
|
if self._file is None:
|
||||||
|
self._file = open(self._filename,'rb')
|
||||||
|
|
||||||
|
def closefile(self):
|
||||||
|
#make sure we clean up
|
||||||
|
if self._file is not None:
|
||||||
|
self._file.close()
|
||||||
|
self._file = None
|
||||||
|
|
||||||
def get_chunks(self):
|
def get_chunks(self):
|
||||||
"""Return a list of all chunks contained in this region file,
|
"""Return a list of all chunks contained in this region file,
|
||||||
as a list of (x, y) coordinate tuples. To load these chunks,
|
as a list of (x, y) coordinate tuples. To load these chunks,
|
||||||
@@ -304,8 +315,7 @@ class MCRFileReader(object):
|
|||||||
if self._locations:
|
if self._locations:
|
||||||
return
|
return
|
||||||
|
|
||||||
if self._file is None:
|
self.openfile()
|
||||||
self._file = open(self._filename,'rb')
|
|
||||||
|
|
||||||
self._chunks = None
|
self._chunks = None
|
||||||
self._locations = []
|
self._locations = []
|
||||||
@@ -325,9 +335,7 @@ class MCRFileReader(object):
|
|||||||
timestamp_append(self._read_chunk_timestamp())
|
timestamp_append(self._read_chunk_timestamp())
|
||||||
|
|
||||||
if closeFile:
|
if closeFile:
|
||||||
#free the file object since it isn't safe to be reused in child processes (seek point goes wonky!)
|
self.closefile()
|
||||||
self._file.close()
|
|
||||||
self._file = None
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def get_chunk_timestamp(self, x, y):
|
def get_chunk_timestamp(self, x, y):
|
||||||
@@ -350,7 +358,7 @@ class MCRFileReader(object):
|
|||||||
location = self._locations[x + y * 32]
|
location = self._locations[x + y * 32]
|
||||||
return location is not None
|
return location is not None
|
||||||
|
|
||||||
def load_chunk(self, x, y):
|
def load_chunk(self, x, y,closeFile=True):
|
||||||
"""Return a NBTFileReader instance for the given chunk, or
|
"""Return a NBTFileReader instance for the given chunk, or
|
||||||
None if the given chunk doesn't exist in this region file. If
|
None if the given chunk doesn't exist in this region file. If
|
||||||
you provide an x or y not between 0 and 31, it will be
|
you provide an x or y not between 0 and 31, it will be
|
||||||
@@ -366,8 +374,8 @@ class MCRFileReader(object):
|
|||||||
if location is None:
|
if location is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if self._file is None:
|
self.openfile()
|
||||||
self._file = open(self._filename,'rb');
|
|
||||||
# seek to the data
|
# seek to the data
|
||||||
self._file.seek(location[0])
|
self._file.seek(location[0])
|
||||||
|
|
||||||
@@ -391,4 +399,6 @@ class MCRFileReader(object):
|
|||||||
data = self._file.read(data_length - 1)
|
data = self._file.read(data_length - 1)
|
||||||
data = StringIO.StringIO(data)
|
data = StringIO.StringIO(data)
|
||||||
|
|
||||||
|
if closeFile:
|
||||||
|
self.closefile()
|
||||||
return NBTFileReader(data, is_gzip=is_gzip)
|
return NBTFileReader(data, is_gzip=is_gzip)
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ except ImportError:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
import optimizeimages
|
import optimizeimages
|
||||||
import composite
|
|
||||||
import world
|
import world
|
||||||
import quadtree
|
import quadtree
|
||||||
import googlemap
|
import googlemap
|
||||||
@@ -166,10 +165,7 @@ def main():
|
|||||||
|
|
||||||
logging.info("Welcome to Minecraft Overviewer!")
|
logging.info("Welcome to Minecraft Overviewer!")
|
||||||
logging.debug("Current log level: {0}".format(logging.getLogger().level))
|
logging.debug("Current log level: {0}".format(logging.getLogger().level))
|
||||||
|
|
||||||
if not composite.extension_alpha_over:
|
|
||||||
logging.info("Notice: alpha_over extension not found; using default PIL paste()")
|
|
||||||
|
|
||||||
useBiomeData = os.path.exists(os.path.join(worlddir, 'biomes'))
|
useBiomeData = os.path.exists(os.path.join(worlddir, 'biomes'))
|
||||||
if not useBiomeData:
|
if not useBiomeData:
|
||||||
logging.info("Notice: Not using biome data for tinting")
|
logging.info("Notice: Not using biome data for tinting")
|
||||||
|
|||||||
@@ -410,9 +410,10 @@ class QuadtreeGen(object):
|
|||||||
# check chunk mtimes to see if they are newer
|
# check chunk mtimes to see if they are newer
|
||||||
try:
|
try:
|
||||||
needs_rerender = False
|
needs_rerender = False
|
||||||
|
get_region_mtime = world.get_region_mtime
|
||||||
for col, row, chunkx, chunky, regionfile in chunks:
|
for col, row, chunkx, chunky, regionfile in chunks:
|
||||||
# check region file mtime first.
|
# check region file mtime first.
|
||||||
region,regionMtime = world.get_region_mtime(regionfile)
|
region,regionMtime = get_region_mtime(regionfile)
|
||||||
if regionMtime <= tile_mtime:
|
if regionMtime <= tile_mtime:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
64
world.py
64
world.py
@@ -29,6 +29,7 @@ import numpy
|
|||||||
import chunk
|
import chunk
|
||||||
import nbt
|
import nbt
|
||||||
import textures
|
import textures
|
||||||
|
import time
|
||||||
|
|
||||||
"""
|
"""
|
||||||
This module has routines for extracting information about available worlds
|
This module has routines for extracting information about available worlds
|
||||||
@@ -75,14 +76,17 @@ class World(object):
|
|||||||
#this also caches all the region file header info
|
#this also caches all the region file header info
|
||||||
logging.info("Scanning regions")
|
logging.info("Scanning regions")
|
||||||
regionfiles = {}
|
regionfiles = {}
|
||||||
regions = {}
|
self.regions = {}
|
||||||
for x, y, regionfile in self._iterate_regionfiles():
|
for x, y, regionfile in self._iterate_regionfiles():
|
||||||
mcr = nbt.MCRFileReader(regionfile)
|
mcr = self.reload_region(regionfile)
|
||||||
mcr.get_chunk_info()
|
mcr.get_chunk_info()
|
||||||
regions[regionfile] = (mcr,os.path.getmtime(regionfile))
|
|
||||||
regionfiles[(x,y)] = (x,y,regionfile,mcr)
|
regionfiles[(x,y)] = (x,y,regionfile,mcr)
|
||||||
self.regionfiles = regionfiles
|
self.regionfiles = regionfiles
|
||||||
self.regions = regions
|
# set the number of region file handles we will permit open at any time before we start closing them
|
||||||
|
# self.regionlimit = 1000
|
||||||
|
# the max number of chunks we will keep before removing them
|
||||||
|
self.chunklimit = 1024*6 # this should be a multipule of the max chunks per region or things could get wonky ???
|
||||||
|
self.chunkcount = 0
|
||||||
logging.debug("Done scanning regions")
|
logging.debug("Done scanning regions")
|
||||||
|
|
||||||
# figure out chunk format is in use
|
# figure out chunk format is in use
|
||||||
@@ -118,24 +122,54 @@ class World(object):
|
|||||||
"""
|
"""
|
||||||
_, _, regionfile,_ = self.regionfiles.get((chunkX//32, chunkY//32),(None,None,None,None));
|
_, _, regionfile,_ = self.regionfiles.get((chunkX//32, chunkY//32),(None,None,None,None));
|
||||||
return regionfile
|
return regionfile
|
||||||
|
|
||||||
def load_from_region(self,filename, x, y):
|
def load_from_region(self,filename, x, y):
|
||||||
nbt = self.load_region(filename).load_chunk(x, y)
|
#we need to manage the chunk cache
|
||||||
if nbt is None:
|
regioninfo = self.regions[filename]
|
||||||
return None ## return none. I think this is who we should indicate missing chunks
|
if regioninfo is None:
|
||||||
#raise IOError("No such chunk in region: (%i, %i)" % (x, y))
|
return None
|
||||||
return nbt.read_all()
|
chunks = regioninfo[2]
|
||||||
|
chunk_data = chunks.get((x,y))
|
||||||
|
if chunk_data is None:
|
||||||
|
nbt = self.load_region(filename).load_chunk(x, y)
|
||||||
|
if nbt is None:
|
||||||
|
chunks[(x,y)] = [None,None]
|
||||||
|
return None ## return none. I think this is who we should indicate missing chunks
|
||||||
|
#raise IOError("No such chunk in region: (%i, %i)" % (x, y))
|
||||||
|
#prune the cache if required
|
||||||
|
if self.chunkcount > self.chunklimit: #todo: make the emptying the chunk cache slightly less crazy
|
||||||
|
[self.reload_region(regionfile) for regionfile in self.regions if regionfile <> filename]
|
||||||
|
self.chunkcount += 1
|
||||||
|
|
||||||
|
#we cache the transformed data, not it's raw form
|
||||||
|
data = nbt.read_all()
|
||||||
|
level = data[1]['Level']
|
||||||
|
chunk_data = level
|
||||||
|
#chunk_data = {}
|
||||||
|
#chunk_data['skylight'] = chunk.get_skylight_array(level)
|
||||||
|
#chunk_data['blocklight'] = chunk.get_blocklight_array(level)
|
||||||
|
#chunk_data['blockarray'] = chunk.get_blockdata_array(level)
|
||||||
|
#chunk_data['TileEntities'] = chunk.get_tileentity_data(level)
|
||||||
|
|
||||||
|
chunks[(x,y)] = [level,time.time()]
|
||||||
|
else:
|
||||||
|
chunk_data = chunk_data[0]
|
||||||
|
return chunk_data
|
||||||
|
|
||||||
#used to reload a changed region
|
#used to reload a changed region
|
||||||
def reload_region(self,filename):
|
def reload_region(self,filename):
|
||||||
self.regions[filename] = (nbt.MCRFileReader(filename),os.path.getmtime(regionfile))
|
if self.regions.get(filename) is not None:
|
||||||
|
self.regions[filename][0].closefile()
|
||||||
|
chunkcache = {}
|
||||||
|
mcr = nbt.MCRFileReader(filename)
|
||||||
|
self.regions[filename] = (mcr,os.path.getmtime(filename),chunkcache)
|
||||||
|
return mcr
|
||||||
|
|
||||||
def load_region(self,filename):
|
def load_region(self,filename):
|
||||||
return self.regions[filename][0]
|
return self.regions[filename][0]
|
||||||
|
|
||||||
def get_region_mtime(self,filename):
|
def get_region_mtime(self,filename):
|
||||||
return self.regions[filename]
|
return (self.regions[filename][0],self.regions[filename][1])
|
||||||
|
|
||||||
def convert_coords(self, chunkx, chunky):
|
def convert_coords(self, chunkx, chunky):
|
||||||
"""Takes a coordinate (chunkx, chunky) where chunkx and chunky are
|
"""Takes a coordinate (chunkx, chunky) where chunkx and chunky are
|
||||||
|
|||||||
Reference in New Issue
Block a user