Merge branch 'master' of git://github.com/brownan/Minecraft-Overviewer
This commit is contained in:
@@ -123,7 +123,7 @@ Running
|
|||||||
-------
|
-------
|
||||||
To generate a set of Google Map tiles, use the gmap.py script like this::
|
To generate a set of Google Map tiles, use the gmap.py script like this::
|
||||||
|
|
||||||
python gmap.py [OPTIONS] <World Number / Path to World> <Output Directory>
|
python gmap.py [OPTIONS] <World # / Name / Path to World> <Output Directory>
|
||||||
|
|
||||||
The output directory will be created if it doesn't exist. This will generate a
|
The output directory will be created if it doesn't exist. This will generate a
|
||||||
set of image tiles for your world in the directory you choose. When it's done,
|
set of image tiles for your world in the directory you choose. When it's done,
|
||||||
|
|||||||
143
chunk.py
143
chunk.py
@@ -20,6 +20,7 @@ import hashlib
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
import math
|
import math
|
||||||
|
import sys
|
||||||
|
|
||||||
import nbt
|
import nbt
|
||||||
import textures
|
import textures
|
||||||
@@ -45,10 +46,13 @@ image
|
|||||||
# alpha_over extension, BUT this extension may fall back to PIL's
|
# alpha_over extension, BUT this extension may fall back to PIL's
|
||||||
# paste(), which DOES need the workaround.)
|
# paste(), which DOES need the workaround.)
|
||||||
|
|
||||||
def get_lvldata(filename):
|
def get_lvldata(filename, x, y):
|
||||||
"""Takes a filename and returns the Level struct, which contains all the
|
"""Takes a filename and chunkcoords and returns the Level struct, which contains all the
|
||||||
level info"""
|
level info"""
|
||||||
return nbt.load(filename)[1]['Level']
|
|
||||||
|
d = nbt.load_from_region(filename, x, y)
|
||||||
|
if not d: raise NoSuchChunk(x,y)
|
||||||
|
return d[1]['Level']
|
||||||
|
|
||||||
def get_blockarray(level):
|
def get_blockarray(level):
|
||||||
"""Takes the level struct as returned from get_lvldata, and returns the
|
"""Takes the level struct as returned from get_lvldata, and returns the
|
||||||
@@ -124,14 +128,12 @@ fluid_blocks = set([8,9,10,11])
|
|||||||
# (glass, half blocks)
|
# (glass, half blocks)
|
||||||
nospawn_blocks = set([20,44])
|
nospawn_blocks = set([20,44])
|
||||||
|
|
||||||
def find_oldimage(chunkfile, cached, cave):
|
def find_oldimage(chunkXY, cached, cave):
|
||||||
destdir, filename = os.path.split(chunkfile)
|
blockid = "%d.%d" % chunkXY
|
||||||
filename_split = filename.split(".")
|
|
||||||
blockid = ".".join(filename_split[1:3])
|
|
||||||
|
|
||||||
# Get the name of the existing image.
|
# Get the name of the existing image.
|
||||||
moredirs, dir2 = os.path.split(destdir)
|
dir1 = world.base36encode(chunkXY[0]%64)
|
||||||
dir1 = os.path.basename(moredirs)
|
dir2 = world.base36encode(chunkXY[1]%64)
|
||||||
cachename = '/'.join((dir1, dir2))
|
cachename = '/'.join((dir1, dir2))
|
||||||
|
|
||||||
oldimg = oldimg_path = None
|
oldimg = oldimg_path = None
|
||||||
@@ -139,23 +141,39 @@ def find_oldimage(chunkfile, cached, cave):
|
|||||||
if key in cached[cachename]:
|
if key in cached[cachename]:
|
||||||
oldimg_path = cached[cachename][key]
|
oldimg_path = cached[cachename][key]
|
||||||
_, oldimg = os.path.split(oldimg_path)
|
_, oldimg = os.path.split(oldimg_path)
|
||||||
logging.debug("Found cached image {0}".format(oldimg))
|
#logging.debug("Found cached image {0}".format(oldimg))
|
||||||
return oldimg, oldimg_path
|
return oldimg, oldimg_path
|
||||||
|
|
||||||
def check_cache(chunkfile, oldimg):
|
def check_cache(world, chunkXY, oldimg):
|
||||||
|
"""Returns True is oldimg is OK to use (i.e. not stale)"""
|
||||||
|
# TODO read to the region file and get the timestamp??
|
||||||
|
# TODO currently, just use the mtime on the region file
|
||||||
|
# TODO (which will cause a single chunk update to invalidate everything in the region
|
||||||
|
|
||||||
|
if not oldimg[1]: return False
|
||||||
|
chunkfile = os.path.join(world.worlddir, "region", "r.%d.%d.mcr" % (chunkXY[0]//32, chunkXY[1]//32))
|
||||||
|
|
||||||
|
with open(chunkfile, "rb") as f:
|
||||||
|
region = nbt.MCRFileReader(f)
|
||||||
|
mtime = region.get_chunk_timestamp(chunkXY[0], chunkXY[1])
|
||||||
|
#logging.debug("checking cache %s against %s %d", chunkfile, oldimg[1], mtime)
|
||||||
try:
|
try:
|
||||||
if oldimg[1] and os.path.getmtime(chunkfile) <= os.path.getmtime(oldimg[1]):
|
if mtime <= os.path.getmtime(oldimg[1]):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
except OSError:
|
except OSError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def render_and_save(chunkfile, cachedir, worldobj, oldimg, cave=False, queue=None):
|
# chunkcoords should be the coordinates of a possible chunk. it may not exist
|
||||||
|
def render_and_save(chunkcoords, cachedir, worldobj, oldimg, cave=False, queue=None):
|
||||||
"""Used as the entry point for the multiprocessing workers (since processes
|
"""Used as the entry point for the multiprocessing workers (since processes
|
||||||
can't target bound methods) or to easily render and save one chunk
|
can't target bound methods) or to easily render and save one chunk
|
||||||
|
|
||||||
Returns the image file location"""
|
chunkcoords is a tuple: (chunkX, chunkY)
|
||||||
a = ChunkRenderer(chunkfile, cachedir, worldobj, oldimg, queue)
|
|
||||||
|
If the chunk doesn't exist, return None.
|
||||||
|
Else, returns the image file location"""
|
||||||
|
a = ChunkRenderer(chunkcoords, cachedir, worldobj, oldimg, queue)
|
||||||
try:
|
try:
|
||||||
return a.render_and_save(cave)
|
return a.render_and_save(cave)
|
||||||
except ChunkCorrupt:
|
except ChunkCorrupt:
|
||||||
@@ -177,36 +195,53 @@ def render_and_save(chunkfile, cachedir, worldobj, oldimg, cave=False, queue=Non
|
|||||||
class ChunkCorrupt(Exception):
|
class ChunkCorrupt(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class NoSuchChunk(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
class ChunkRenderer(object):
|
class ChunkRenderer(object):
|
||||||
def __init__(self, chunkfile, cachedir, worldobj, oldimg, queue):
|
def __init__(self, chunkcoords, cachedir, worldobj, oldimg, queue):
|
||||||
"""Make a new chunk renderer for the given chunkfile.
|
"""Make a new chunk renderer for the given chunk coordinates.
|
||||||
chunkfile should be a full path to the .dat file to process
|
chunkcoors should be a tuple: (chunkX, chunkY)
|
||||||
|
|
||||||
cachedir is a directory to save the resulting chunk images to
|
cachedir is a directory to save the resulting chunk images to
|
||||||
"""
|
"""
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
# derive based on worlddir and chunkcoords
|
||||||
|
self.regionfile = os.path.join(worldobj.worlddir, "region",
|
||||||
|
"r.%d.%d.mcr" % (chunkcoords[0] // 32, chunkcoords[1]//32))
|
||||||
|
|
||||||
if not os.path.exists(chunkfile):
|
if not os.path.exists(self.regionfile):
|
||||||
raise ValueError("Could not find chunkfile")
|
raise ValueError("Could not find regionfile: %s" % self.regionfile)
|
||||||
self.chunkfile = chunkfile
|
|
||||||
destdir, filename = os.path.split(self.chunkfile)
|
## TODO TODO all of this class
|
||||||
filename_split = filename.split(".")
|
|
||||||
chunkcoords = filename_split[1:3]
|
#destdir, filename = os.path.split(self.chunkfile)
|
||||||
|
#filename_split = filename.split(".")
|
||||||
|
#chunkcoords = filename_split[1:3]
|
||||||
|
|
||||||
self.coords = map(world.base36decode, chunkcoords)
|
#self.coords = map(world.base36decode, chunkcoords)
|
||||||
self.blockid = ".".join(chunkcoords)
|
self.blockid = "%d.%d" % chunkcoords
|
||||||
|
|
||||||
# chunk coordinates (useful to converting local block coords to
|
# chunk coordinates (useful to converting local block coords to
|
||||||
# global block coords)
|
# global block coords)
|
||||||
self.chunkX = int(filename_split[1], base=36)
|
self.chunkX = chunkcoords[0]
|
||||||
self.chunkY = int(filename_split[2], base=36)
|
self.chunkY = chunkcoords[1]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
self.world = worldobj
|
self.world = worldobj
|
||||||
|
|
||||||
|
|
||||||
# Cachedir here is the base directory of the caches. We need to go 2
|
# Cachedir here is the base directory of the caches. We need to go 2
|
||||||
# levels deeper according to the chunk file. Get the last 2 components
|
# levels deeper according to the chunk file. Get the last 2 components
|
||||||
# of destdir and use that
|
# of destdir and use that
|
||||||
moredirs, dir2 = os.path.split(destdir)
|
##moredirs, dir2 = os.path.split(destdir)
|
||||||
_, dir1 = os.path.split(moredirs)
|
##_, dir1 = os.path.split(moredirs)
|
||||||
self.cachedir = os.path.join(cachedir, dir1, dir2)
|
self.cachedir = os.path.join(cachedir,
|
||||||
|
world.base36encode(self.chunkX%64),
|
||||||
|
world.base36encode(self.chunkY%64))
|
||||||
|
|
||||||
|
#logging.debug("cache location for this chunk: %s", self.cachedir)
|
||||||
self.oldimg, self.oldimg_path = oldimg
|
self.oldimg, self.oldimg_path = oldimg
|
||||||
|
|
||||||
|
|
||||||
@@ -229,9 +264,12 @@ class ChunkRenderer(object):
|
|||||||
"""Loads and returns the level structure"""
|
"""Loads and returns the level structure"""
|
||||||
if not hasattr(self, "_level"):
|
if not hasattr(self, "_level"):
|
||||||
try:
|
try:
|
||||||
self._level = get_lvldata(self.chunkfile)
|
self._level = get_lvldata(self.regionfile, self.chunkX, self.chunkY)
|
||||||
|
except NoSuchChunk, e:
|
||||||
|
#logging.debug("Skipping non-existant chunk")
|
||||||
|
raise
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logging.warning("Error opening chunk file %s. It may be corrupt. %s", self.chunkfile, e)
|
logging.warning("Error opening chunk file %s. It may be corrupt. %s", self.regionfile, e)
|
||||||
raise ChunkCorrupt(str(e))
|
raise ChunkCorrupt(str(e))
|
||||||
return self._level
|
return self._level
|
||||||
level = property(_load_level)
|
level = property(_load_level)
|
||||||
@@ -259,13 +297,13 @@ class ChunkRenderer(object):
|
|||||||
|
|
||||||
def _load_left(self):
|
def _load_left(self):
|
||||||
"""Loads and sets data from lower-left chunk"""
|
"""Loads and sets data from lower-left chunk"""
|
||||||
chunk_path = self.world.get_chunk_path(self.coords[0] - 1, self.coords[1])
|
chunk_path = self.world.get_region_path(self.chunkX - 1, self.chunkY)
|
||||||
try:
|
try:
|
||||||
chunk_data = get_lvldata(chunk_path)
|
chunk_data = get_lvldata(chunk_path, self.chunkX - 1, self.chunkY)
|
||||||
self._left_skylight = get_skylight_array(chunk_data)
|
self._left_skylight = get_skylight_array(chunk_data)
|
||||||
self._left_blocklight = get_blocklight_array(chunk_data)
|
self._left_blocklight = get_blocklight_array(chunk_data)
|
||||||
self._left_blocks = get_blockarray(chunk_data)
|
self._left_blocks = get_blockarray(chunk_data)
|
||||||
except IOError:
|
except NoSuchChunk:
|
||||||
self._left_skylight = None
|
self._left_skylight = None
|
||||||
self._left_blocklight = None
|
self._left_blocklight = None
|
||||||
self._left_blocks = None
|
self._left_blocks = None
|
||||||
@@ -293,13 +331,13 @@ class ChunkRenderer(object):
|
|||||||
|
|
||||||
def _load_right(self):
|
def _load_right(self):
|
||||||
"""Loads and sets data from lower-right chunk"""
|
"""Loads and sets data from lower-right chunk"""
|
||||||
chunk_path = self.world.get_chunk_path(self.coords[0], self.coords[1] + 1)
|
chunk_path = self.world.get_region_path(self.chunkX, self.chunkY + 1)
|
||||||
try:
|
try:
|
||||||
chunk_data = get_lvldata(chunk_path)
|
chunk_data = get_lvldata(chunk_path, self.chunkX, self.chunkY + 1)
|
||||||
self._right_skylight = get_skylight_array(chunk_data)
|
self._right_skylight = get_skylight_array(chunk_data)
|
||||||
self._right_blocklight = get_blocklight_array(chunk_data)
|
self._right_blocklight = get_blocklight_array(chunk_data)
|
||||||
self._right_blocks = get_blockarray(chunk_data)
|
self._right_blocks = get_blockarray(chunk_data)
|
||||||
except IOError:
|
except NoSuchChunk:
|
||||||
self._right_skylight = None
|
self._right_skylight = None
|
||||||
self._right_blocklight = None
|
self._right_blocklight = None
|
||||||
self._right_blocks = None
|
self._right_blocks = None
|
||||||
@@ -327,13 +365,13 @@ class ChunkRenderer(object):
|
|||||||
|
|
||||||
def _load_up_right(self):
|
def _load_up_right(self):
|
||||||
"""Loads and sets data from upper-right chunk"""
|
"""Loads and sets data from upper-right chunk"""
|
||||||
chunk_path = self.world.get_chunk_path(self.coords[0] + 1, self.coords[1])
|
chunk_path = self.world.get_region_path(self.chunkX + 1, self.chunkY)
|
||||||
try:
|
try:
|
||||||
chunk_data = get_lvldata(chunk_path)
|
chunk_data = get_lvldata(chunk_path, self.chunkX + 1, self.chunkY)
|
||||||
self._up_right_skylight = get_skylight_array(chunk_data)
|
self._up_right_skylight = get_skylight_array(chunk_data)
|
||||||
self._up_right_blocklight = get_blocklight_array(chunk_data)
|
self._up_right_blocklight = get_blocklight_array(chunk_data)
|
||||||
self._up_right_blocks = get_blockarray(chunk_data)
|
self._up_right_blocks = get_blockarray(chunk_data)
|
||||||
except IOError:
|
except NoSuchChunk:
|
||||||
self._up_right_skylight = None
|
self._up_right_skylight = None
|
||||||
self._up_right_blocklight = None
|
self._up_right_blocklight = None
|
||||||
self._up_right_blocks = None
|
self._up_right_blocks = None
|
||||||
@@ -347,13 +385,13 @@ class ChunkRenderer(object):
|
|||||||
|
|
||||||
def _load_up_left(self):
|
def _load_up_left(self):
|
||||||
"""Loads and sets data from upper-left chunk"""
|
"""Loads and sets data from upper-left chunk"""
|
||||||
chunk_path = self.world.get_chunk_path(self.coords[0], self.coords[1] - 1)
|
chunk_path = self.world.get_region_path(self.chunkX, self.chunkY - 1)
|
||||||
try:
|
try:
|
||||||
chunk_data = get_lvldata(chunk_path)
|
chunk_data = get_lvldata(chunk_path, self.chunkX, self.chunkY - 1)
|
||||||
self._up_left_skylight = get_skylight_array(chunk_data)
|
self._up_left_skylight = get_skylight_array(chunk_data)
|
||||||
self._up_left_blocklight = get_blocklight_array(chunk_data)
|
self._up_left_blocklight = get_blocklight_array(chunk_data)
|
||||||
self._up_left_blocks = get_blockarray(chunk_data)
|
self._up_left_blocks = get_blockarray(chunk_data)
|
||||||
except IOError:
|
except NoSuchChunk:
|
||||||
self._up_left_skylight = None
|
self._up_left_skylight = None
|
||||||
self._up_left_blocklight = None
|
self._up_left_blocklight = None
|
||||||
self._up_left_blocks = None
|
self._up_left_blocks = None
|
||||||
@@ -448,6 +486,7 @@ class ChunkRenderer(object):
|
|||||||
is up to date, this method doesn't render anything.
|
is up to date, this method doesn't render anything.
|
||||||
"""
|
"""
|
||||||
blockid = self.blockid
|
blockid = self.blockid
|
||||||
|
|
||||||
|
|
||||||
# Reasons for the code to get to this point:
|
# Reasons for the code to get to this point:
|
||||||
# 1) An old image doesn't exist
|
# 1) An old image doesn't exist
|
||||||
@@ -457,13 +496,17 @@ class ChunkRenderer(object):
|
|||||||
# the image was invalid and deleted (sort of the same as (1))
|
# the image was invalid and deleted (sort of the same as (1))
|
||||||
|
|
||||||
# What /should/ the image be named, go ahead and hash the block array
|
# What /should/ the image be named, go ahead and hash the block array
|
||||||
dest_filename = "img.{0}.{1}.{2}.png".format(
|
try:
|
||||||
|
dest_filename = "img.{0}.{1}.{2}.png".format(
|
||||||
blockid,
|
blockid,
|
||||||
"cave" if cave else "nocave",
|
"cave" if cave else "nocave",
|
||||||
self._hash_blockarray(),
|
self._hash_blockarray(),
|
||||||
)
|
)
|
||||||
|
except NoSuchChunk, e:
|
||||||
|
return None
|
||||||
|
|
||||||
dest_path = os.path.join(self.cachedir, dest_filename)
|
dest_path = os.path.join(self.cachedir, dest_filename)
|
||||||
|
#logging.debug("cache filename: %s", dest_path)
|
||||||
|
|
||||||
if self.oldimg:
|
if self.oldimg:
|
||||||
if dest_filename == self.oldimg:
|
if dest_filename == self.oldimg:
|
||||||
@@ -471,14 +514,17 @@ class ChunkRenderer(object):
|
|||||||
# hashes match.
|
# hashes match.
|
||||||
# Before we return it, update its mtime so the next round
|
# Before we return it, update its mtime so the next round
|
||||||
# doesn't have to check the hash
|
# doesn't have to check the hash
|
||||||
|
# TODO confirm hash checking is correct (it should be)
|
||||||
os.utime(dest_path, None)
|
os.utime(dest_path, None)
|
||||||
logging.debug("Using cached image")
|
logging.debug("Using cached image, and updating utime")
|
||||||
return dest_path
|
return dest_path
|
||||||
else:
|
else:
|
||||||
# Remove old image for this chunk. Anything already existing is
|
# Remove old image for this chunk. Anything already existing is
|
||||||
# either corrupt or out of date
|
# either corrupt or out of date
|
||||||
os.unlink(self.oldimg_path)
|
os.unlink(self.oldimg_path)
|
||||||
|
|
||||||
|
|
||||||
|
logging.debug("doing a real real render")
|
||||||
# Render the chunk
|
# Render the chunk
|
||||||
img = self.chunk_render(cave=cave)
|
img = self.chunk_render(cave=cave)
|
||||||
# Save it
|
# Save it
|
||||||
@@ -488,6 +534,7 @@ class ChunkRenderer(object):
|
|||||||
os.unlink(dest_path)
|
os.unlink(dest_path)
|
||||||
raise
|
raise
|
||||||
# Return its location
|
# Return its location
|
||||||
|
#raise Exception("early exit")
|
||||||
return dest_path
|
return dest_path
|
||||||
|
|
||||||
def calculate_darkness(self, skylight, blocklight):
|
def calculate_darkness(self, skylight, blocklight):
|
||||||
|
|||||||
34
gmap.py
34
gmap.py
@@ -36,8 +36,8 @@ import world
|
|||||||
import quadtree
|
import quadtree
|
||||||
|
|
||||||
helptext = """
|
helptext = """
|
||||||
%prog [OPTIONS] <World # / Path to World> <tiles dest dir>
|
%prog [OPTIONS] <World # / Name / Path to World> <tiles dest dir>
|
||||||
%prog -d <World # / Path to World / Path to cache dir> [tiles dest dir]"""
|
%prog -d <World # / Name / Path to World / Path to cache dir> [tiles dest dir]"""
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
try:
|
try:
|
||||||
@@ -69,11 +69,23 @@ def main():
|
|||||||
worlddir = args[0]
|
worlddir = args[0]
|
||||||
|
|
||||||
if not os.path.exists(worlddir):
|
if not os.path.exists(worlddir):
|
||||||
|
# world given is either world number, or name
|
||||||
|
worlds = world.get_worlds()
|
||||||
try:
|
try:
|
||||||
worldnum = int(worlddir)
|
worldnum = int(worlddir)
|
||||||
worlddir = world.get_worlds()[worldnum]['path']
|
worlddir = worlds[worldnum]['path']
|
||||||
except (ValueError, KeyError):
|
except ValueError:
|
||||||
print "Invalid world number or directory"
|
# it wasn't a number or path, try using it as a name
|
||||||
|
try:
|
||||||
|
worlddir = worlds[worlddir]['path']
|
||||||
|
except KeyError:
|
||||||
|
# it's not a number, name, or path
|
||||||
|
print "Invalid world name or path"
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit(1)
|
||||||
|
except KeyError:
|
||||||
|
# it was an invalid number
|
||||||
|
print "Invalid world number"
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@@ -176,13 +188,21 @@ def list_worlds():
|
|||||||
print 'No world saves found in the usual place'
|
print 'No world saves found in the usual place'
|
||||||
return
|
return
|
||||||
print "Detected saves:"
|
print "Detected saves:"
|
||||||
for num, info in sorted(worlds.iteritems()):
|
for name, info in sorted(worlds.iteritems()):
|
||||||
|
if isinstance(name, basestring) and name.startswith("World") and len(name) == 6:
|
||||||
|
try:
|
||||||
|
world_n = int(name[-1])
|
||||||
|
# we'll catch this one later, when it shows up as an
|
||||||
|
# integer key
|
||||||
|
continue
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
timestamp = time.strftime("%Y-%m-%d %H:%M",
|
timestamp = time.strftime("%Y-%m-%d %H:%M",
|
||||||
time.localtime(info['LastPlayed'] / 1000))
|
time.localtime(info['LastPlayed'] / 1000))
|
||||||
playtime = info['Time'] / 20
|
playtime = info['Time'] / 20
|
||||||
playstamp = '%d:%02d' % (playtime / 3600, playtime / 60 % 60)
|
playstamp = '%d:%02d' % (playtime / 3600, playtime / 60 % 60)
|
||||||
size = "%.2fMB" % (info['SizeOnDisk'] / 1024. / 1024.)
|
size = "%.2fMB" % (info['SizeOnDisk'] / 1024. / 1024.)
|
||||||
print "World %s: %s Playtime: %s Modified: %s" % (num, size, playstamp, timestamp)
|
print "World %s: %s Playtime: %s Modified: %s" % (name, size, playstamp, timestamp)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
197
nbt.py
197
nbt.py
@@ -13,18 +13,44 @@
|
|||||||
# You should have received a copy of the GNU General Public License along
|
# You should have received a copy of the GNU General Public License along
|
||||||
# with the Overviewer. If not, see <http://www.gnu.org/licenses/>.
|
# with the Overviewer. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import gzip
|
import gzip, zlib
|
||||||
import struct
|
import struct
|
||||||
|
import StringIO
|
||||||
|
import os
|
||||||
|
|
||||||
|
# decorator to handle filename or object as first parameter
|
||||||
|
def _file_loader(func):
|
||||||
|
def wrapper(fileobj, *args):
|
||||||
|
if isinstance(fileobj, basestring):
|
||||||
|
if not os.path.isfile(fileobj):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Is actually a filename
|
||||||
|
fileobj = open(fileobj, 'rb')
|
||||||
|
return func(fileobj, *args)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
@_file_loader
|
||||||
def load(fileobj):
|
def load(fileobj):
|
||||||
if isinstance(fileobj, basestring):
|
|
||||||
# Is actually a filename
|
|
||||||
fileobj = open(fileobj, 'rb')
|
|
||||||
return NBTFileReader(fileobj).read_all()
|
return NBTFileReader(fileobj).read_all()
|
||||||
|
|
||||||
|
@_file_loader
|
||||||
|
def load_from_region(fileobj, x, y):
|
||||||
|
nbt = MCRFileReader(fileobj).load_chunk(x, y)
|
||||||
|
if not nbt:
|
||||||
|
return None ## return none. I think this is who we should indicate missing chunks
|
||||||
|
#raise IOError("No such chunk in region: (%i, %i)" % (x, y))
|
||||||
|
return nbt.read_all()
|
||||||
|
|
||||||
class NBTFileReader(object):
|
class NBTFileReader(object):
|
||||||
def __init__(self, fileobj):
|
def __init__(self, fileobj, is_gzip=True):
|
||||||
self._file = gzip.GzipFile(fileobj=fileobj, mode='rb')
|
if is_gzip:
|
||||||
|
self._file = gzip.GzipFile(fileobj=fileobj, mode='rb')
|
||||||
|
else:
|
||||||
|
# pure zlib stream -- maybe later replace this with
|
||||||
|
# a custom zlib file object?
|
||||||
|
data = zlib.decompress(fileobj.read())
|
||||||
|
self._file = StringIO.StringIO(data)
|
||||||
|
|
||||||
# These private methods read the payload only of the following types
|
# These private methods read the payload only of the following types
|
||||||
def _read_tag_end(self):
|
def _read_tag_end(self):
|
||||||
@@ -143,3 +169,162 @@ class NBTFileReader(object):
|
|||||||
|
|
||||||
return name, payload
|
return name, payload
|
||||||
|
|
||||||
|
|
||||||
|
# For reference, the MCR format is outlined at
|
||||||
|
# <http://www.minecraftwiki.net/wiki/Beta_Level_Format>
|
||||||
|
class MCRFileReader(object):
|
||||||
|
"""A class for reading chunk region files, as introduced in the
|
||||||
|
Beta 1.3 update. It provides functions for opening individual
|
||||||
|
chunks (as instances of NBTFileReader), getting chunk timestamps,
|
||||||
|
and for listing chunks contained in the file."""
|
||||||
|
|
||||||
|
def __init__(self, fileobj):
|
||||||
|
self._file = fileobj
|
||||||
|
|
||||||
|
# cache used when the entire header tables are read in get_chunks()
|
||||||
|
self._locations = None
|
||||||
|
self._timestamps = None
|
||||||
|
self._chunks = None
|
||||||
|
|
||||||
|
def _read_24bit_int(self):
|
||||||
|
"""Read in a 24-bit, big-endian int, used in the chunk
|
||||||
|
location table."""
|
||||||
|
|
||||||
|
ret = 0
|
||||||
|
bytes = self._file.read(3)
|
||||||
|
for i in xrange(3):
|
||||||
|
ret = ret << 8
|
||||||
|
ret += struct.unpack("B", bytes[i])[0]
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _read_chunk_location(self, x=None, y=None):
|
||||||
|
"""Read and return the (offset, length) of the given chunk
|
||||||
|
coordinate, or None if the requested chunk doesn't exist. x
|
||||||
|
and y must be between 0 and 31, or None. If they are None,
|
||||||
|
then there will be no file seek before doing the read."""
|
||||||
|
|
||||||
|
if x != None and y != None:
|
||||||
|
if (not x >= 0) or (not x < 32) or (not y >= 0) or (not y < 32):
|
||||||
|
raise ValueError("Chunk location out of range.")
|
||||||
|
|
||||||
|
# check for a cached value
|
||||||
|
if self._locations:
|
||||||
|
return self._locations[x + y * 32]
|
||||||
|
|
||||||
|
# go to the correct entry in the chunk location table
|
||||||
|
self._file.seek(4 * (x + y * 32))
|
||||||
|
|
||||||
|
# 3-byte offset in 4KiB sectors
|
||||||
|
offset_sectors = self._read_24bit_int()
|
||||||
|
|
||||||
|
# 1-byte length in 4KiB sectors, rounded up
|
||||||
|
byte = self._file.read(1)
|
||||||
|
length_sectors = struct.unpack("B", byte)[0]
|
||||||
|
|
||||||
|
# check for empty chunks
|
||||||
|
if offset_sectors == 0 or length_sectors == 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return (offset_sectors * 4096, length_sectors * 4096)
|
||||||
|
|
||||||
|
def _read_chunk_timestamp(self, x=None, y=None):
|
||||||
|
"""Read and return the last modification time of the given
|
||||||
|
chunk coordinate. x and y must be between 0 and 31, or
|
||||||
|
None. If they are, None, then there will be no file seek
|
||||||
|
before doing the read."""
|
||||||
|
|
||||||
|
if x != None and y != None:
|
||||||
|
if (not x >= 0) or (not x < 32) or (not y >= 0) or (not y < 32):
|
||||||
|
raise ValueError("Chunk location out of range.")
|
||||||
|
|
||||||
|
# check for a cached value
|
||||||
|
if self._timestamps:
|
||||||
|
return self._timestamps[x + y * 32]
|
||||||
|
|
||||||
|
# go to the correct entry in the chunk timestamp table
|
||||||
|
self._file.seek(4 * (x + y * 32) + 4096)
|
||||||
|
|
||||||
|
bytes = self._file.read(4)
|
||||||
|
timestamp = struct.unpack(">I", bytes)[0]
|
||||||
|
|
||||||
|
return timestamp
|
||||||
|
|
||||||
|
def get_chunks(self):
|
||||||
|
"""Return a list of all chunks contained in this region file,
|
||||||
|
as a list of (x, y) coordinate tuples. To load these chunks,
|
||||||
|
provide these coordinates to load_chunk()."""
|
||||||
|
|
||||||
|
if self._chunks:
|
||||||
|
return self._chunks
|
||||||
|
|
||||||
|
self._chunks = []
|
||||||
|
self._locations = []
|
||||||
|
self._timestamps = []
|
||||||
|
|
||||||
|
# go to the beginning of the file
|
||||||
|
self._file.seek(0)
|
||||||
|
|
||||||
|
# read chunk location table
|
||||||
|
for y in xrange(32):
|
||||||
|
for x in xrange(32):
|
||||||
|
location = self._read_chunk_location()
|
||||||
|
self._locations.append(location)
|
||||||
|
if location:
|
||||||
|
self._chunks.append((x, y))
|
||||||
|
|
||||||
|
# read chunk timestamp table
|
||||||
|
for y in xrange(32):
|
||||||
|
for x in xrange(32):
|
||||||
|
timestamp = self._read_chunk_timestamp()
|
||||||
|
self._timestamps.append(timestamp)
|
||||||
|
|
||||||
|
return self._chunks
|
||||||
|
|
||||||
|
def get_chunk_timestamp(self, x, y):
|
||||||
|
"""Return the given chunk's modification time. If the given
|
||||||
|
chunk doesn't exist, this number may be nonsense. Like
|
||||||
|
load_chunk(), this will wrap x and y into the range [0, 31].
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self._read_chunk_timestamp(x % 32, y % 32)
|
||||||
|
|
||||||
|
def load_chunk(self, x, y):
|
||||||
|
"""Return a NBTFileReader instance for the given chunk, or
|
||||||
|
None if the given chunk doesn't exist in this region file. If
|
||||||
|
you provide an x or y not between 0 and 31, it will be
|
||||||
|
modulo'd into this range (x % 32, etc.) This is so you can
|
||||||
|
provide chunk coordinates in global coordinates, and still
|
||||||
|
have the chunks load out of regions properly."""
|
||||||
|
|
||||||
|
location = self._read_chunk_location(x % 32, y % 32)
|
||||||
|
if not location:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# seek to the data
|
||||||
|
self._file.seek(location[0])
|
||||||
|
|
||||||
|
# read in the chunk data header
|
||||||
|
bytes = self._file.read(4)
|
||||||
|
data_length = struct.unpack(">I", bytes)[0]
|
||||||
|
bytes = self._file.read(1)
|
||||||
|
compression = struct.unpack("B", bytes)[0]
|
||||||
|
|
||||||
|
# figure out the compression
|
||||||
|
is_gzip = True
|
||||||
|
if compression == 1:
|
||||||
|
# gzip -- not used by the official client, but trivial to support here so...
|
||||||
|
is_gzip = True
|
||||||
|
elif compression == 2:
|
||||||
|
# deflate -- pure zlib stream
|
||||||
|
is_gzip = False
|
||||||
|
else:
|
||||||
|
# unsupported!
|
||||||
|
raise Exception("Unsupported chunk compression type: %i" % (compression,))
|
||||||
|
|
||||||
|
# turn the rest of the data into a StringIO object
|
||||||
|
# (using data_length - 1, as we already read 1 byte for compression)
|
||||||
|
data = self._file.read(data_length - 1)
|
||||||
|
data = StringIO.StringIO(data)
|
||||||
|
|
||||||
|
return NBTFileReader(data, is_gzip=is_gzip)
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ import logging
|
|||||||
import util
|
import util
|
||||||
import cPickle
|
import cPickle
|
||||||
import stat
|
import stat
|
||||||
from time import gmtime, strftime
|
from time import gmtime, strftime, sleep
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
@@ -297,10 +297,12 @@ class QuadtreeGen(object):
|
|||||||
|
|
||||||
# This image is rendered at:
|
# This image is rendered at:
|
||||||
dest = os.path.join(self.destdir, "tiles", *(str(x) for x in path))
|
dest = os.path.join(self.destdir, "tiles", *(str(x) for x in path))
|
||||||
|
#logging.debug("this is rendered at %s", dest)
|
||||||
|
|
||||||
# And uses these chunks
|
# And uses these chunks
|
||||||
tilechunks = self._get_chunks_in_range(colstart, colend, rowstart,
|
tilechunks = self._get_chunks_in_range(colstart, colend, rowstart,
|
||||||
rowend)
|
rowend)
|
||||||
|
#logging.debug(" tilechunks: %r", tilechunks)
|
||||||
|
|
||||||
# Put this in the pool
|
# Put this in the pool
|
||||||
# (even if tilechunks is empty, render_worldtile will delete
|
# (even if tilechunks is empty, render_worldtile will delete
|
||||||
|
|||||||
@@ -281,7 +281,7 @@ def _build_blockimages():
|
|||||||
# 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
|
# 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
|
||||||
-1, -1, -1, -1, -1, 13, 12, 29, 28, 23, 22, 6, 6, 7, 8, 35, # Gold/iron blocks? Doublestep? TNT from above?
|
-1, -1, -1, -1, -1, 13, 12, 29, 28, 23, 22, 6, 6, 7, 8, 35, # Gold/iron blocks? Doublestep? TNT from above?
|
||||||
# 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63
|
# 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63
|
||||||
36, 37, 80, -1, 65, 4, 25,101, 98, 24, 43, -1, 86, -1, -1, -1, # Torch from above? leaving out fire. Redstone wire? Crops/furnaces handled elsewhere. sign post
|
36, 37, 80, -1, 65, 4, 25, -1, 98, 24, 43, -1, 86, -1, -1, -1, # Torch from above? leaving out fire. Redstone wire? Crops/furnaces handled elsewhere. sign post
|
||||||
# 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
|
# 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
|
||||||
-1, -1, -1, 16, -1, -1, -1, -1, -1, 51, 51, -1, -1, 1, 66, 67, # door,ladder left out. Minecart rail orientation
|
-1, -1, -1, 16, -1, -1, -1, -1, -1, 51, 51, -1, -1, 1, 66, 67, # door,ladder left out. Minecart rail orientation
|
||||||
# 80 81 82 83 84 85 86 87 88 89 90 91
|
# 80 81 82 83 84 85 86 87 88 89 90 91
|
||||||
|
|||||||
159
world.py
159
world.py
@@ -22,6 +22,7 @@ import sys
|
|||||||
import logging
|
import logging
|
||||||
import cPickle
|
import cPickle
|
||||||
import collections
|
import collections
|
||||||
|
import itertools
|
||||||
|
|
||||||
import numpy
|
import numpy
|
||||||
|
|
||||||
@@ -45,7 +46,10 @@ def _convert_coords(chunks):
|
|||||||
the image each one should be.
|
the image each one should be.
|
||||||
|
|
||||||
returns mincol, maxcol, minrow, maxrow, chunks_translated
|
returns mincol, maxcol, minrow, maxrow, chunks_translated
|
||||||
chunks_translated is a list of (col, row, filename)
|
chunks_translated is a list of (col, row, (chunkX, chunkY))
|
||||||
|
|
||||||
|
The (chunkX, chunkY) tuple is the chunkCoords, used to identify the
|
||||||
|
chunk file
|
||||||
"""
|
"""
|
||||||
chunks_translated = []
|
chunks_translated = []
|
||||||
# columns are determined by the sum of the chunk coords, rows are the
|
# columns are determined by the sum of the chunk coords, rows are the
|
||||||
@@ -60,7 +64,7 @@ def _convert_coords(chunks):
|
|||||||
row = c[1] - c[0]
|
row = c[1] - c[0]
|
||||||
minrow = min(minrow, row)
|
minrow = min(minrow, row)
|
||||||
maxrow = max(maxrow, row)
|
maxrow = max(maxrow, row)
|
||||||
chunks_translated.append((col, row, c[2]))
|
chunks_translated.append((col, row, (c[0],c[1])))
|
||||||
|
|
||||||
return mincol, maxcol, minrow, maxrow, chunks_translated
|
return mincol, maxcol, minrow, maxrow, chunks_translated
|
||||||
|
|
||||||
@@ -112,6 +116,14 @@ class WorldRenderer(object):
|
|||||||
self.cachedir = cachedir
|
self.cachedir = cachedir
|
||||||
self.useBiomeData = useBiomeData
|
self.useBiomeData = useBiomeData
|
||||||
|
|
||||||
|
# figure out chunk format is in use
|
||||||
|
# if mcregion, error out early until we can add support
|
||||||
|
data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]['Data']
|
||||||
|
#print data
|
||||||
|
if not ('version' in data and data['version'] == 19132):
|
||||||
|
logging.error("Sorry, This version of Minecraft-Overviewer only works with the new McRegion chunk format")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
if self.useBiomeData:
|
if self.useBiomeData:
|
||||||
textures.prepareBiomeData(worlddir)
|
textures.prepareBiomeData(worlddir)
|
||||||
|
|
||||||
@@ -119,6 +131,7 @@ class WorldRenderer(object):
|
|||||||
|
|
||||||
# In order to avoid having to look up the cache file names in
|
# In order to avoid having to look up the cache file names in
|
||||||
# ChunkRenderer, get them all and store them here
|
# ChunkRenderer, get them all and store them here
|
||||||
|
# TODO change how caching works
|
||||||
for root, dirnames, filenames in os.walk(cachedir):
|
for root, dirnames, filenames in os.walk(cachedir):
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
if not filename.endswith('.png') or not filename.startswith("img."):
|
if not filename.endswith('.png') or not filename.startswith("img."):
|
||||||
@@ -130,7 +143,6 @@ class WorldRenderer(object):
|
|||||||
bits = '.'.join((x, z, cave))
|
bits = '.'.join((x, z, cave))
|
||||||
cached[dir][bits] = os.path.join(root, filename)
|
cached[dir][bits] = os.path.join(root, filename)
|
||||||
|
|
||||||
|
|
||||||
# stores Points Of Interest to be mapped with markers
|
# stores Points Of Interest to be mapped with markers
|
||||||
# a list of dictionaries, see below for an example
|
# a list of dictionaries, see below for an example
|
||||||
self.POI = []
|
self.POI = []
|
||||||
@@ -155,6 +167,7 @@ class WorldRenderer(object):
|
|||||||
if not self.chunklist:
|
if not self.chunklist:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
raise Exception("not yet working") ## TODO correctly reimplement this for mcregion
|
||||||
# Get a list of the (chunks, chunky, filename) from the passed in list
|
# Get a list of the (chunks, chunky, filename) from the passed in list
|
||||||
# of filenames
|
# of filenames
|
||||||
chunklist = []
|
chunklist = []
|
||||||
@@ -182,15 +195,12 @@ class WorldRenderer(object):
|
|||||||
|
|
||||||
return inclusion_set
|
return inclusion_set
|
||||||
|
|
||||||
def get_chunk_path(self, chunkX, chunkY):
|
def get_region_path(self, chunkX, chunkY):
|
||||||
"""Returns the path to the chunk file at (chunkX, chunkY), if
|
"""Returns the path to the region that contains chunk (chunkX, chunkY)
|
||||||
it exists."""
|
"""
|
||||||
|
|
||||||
chunkFile = "%s/%s/c.%s.%s.dat" % (base36encode(chunkX % 64),
|
|
||||||
base36encode(chunkY % 64),
|
|
||||||
base36encode(chunkX),
|
|
||||||
base36encode(chunkY))
|
|
||||||
|
|
||||||
|
chunkFile = "region/r.%s.%s.mcr" % (chunkX//32, chunkY//32)
|
||||||
|
|
||||||
return os.path.join(self.worlddir, chunkFile)
|
return os.path.join(self.worlddir, chunkFile)
|
||||||
|
|
||||||
def findTrueSpawn(self):
|
def findTrueSpawn(self):
|
||||||
@@ -209,9 +219,9 @@ class WorldRenderer(object):
|
|||||||
chunkY = spawnZ/16
|
chunkY = spawnZ/16
|
||||||
|
|
||||||
## The filename of this chunk
|
## The filename of this chunk
|
||||||
chunkFile = self.get_chunk_path(chunkX, chunkY)
|
chunkFile = self.get_region_path(chunkX, chunkY)
|
||||||
|
|
||||||
data=nbt.load(chunkFile)[1]
|
data=nbt.load_from_region(chunkFile, chunkX, chunkY)[1]
|
||||||
level = data['Level']
|
level = data['Level']
|
||||||
blockArray = numpy.frombuffer(level['Blocks'], dtype=numpy.uint8).reshape((16,16,128))
|
blockArray = numpy.frombuffer(level['Blocks'], dtype=numpy.uint8).reshape((16,16,128))
|
||||||
|
|
||||||
@@ -232,14 +242,17 @@ class WorldRenderer(object):
|
|||||||
"""Starts the render. This returns when it is finished"""
|
"""Starts the render. This returns when it is finished"""
|
||||||
|
|
||||||
logging.info("Scanning chunks")
|
logging.info("Scanning chunks")
|
||||||
raw_chunks = self._find_chunkfiles()
|
raw_chunks = self._get_chunklist()
|
||||||
logging.debug("Done scanning chunks")
|
logging.debug("Done scanning chunks")
|
||||||
|
|
||||||
# Translate chunks to our diagonal coordinate system
|
# Translate chunks to our diagonal coordinate system
|
||||||
|
# TODO
|
||||||
mincol, maxcol, minrow, maxrow, chunks = _convert_coords(raw_chunks)
|
mincol, maxcol, minrow, maxrow, chunks = _convert_coords(raw_chunks)
|
||||||
del raw_chunks # Free some memory
|
del raw_chunks # Free some memory
|
||||||
|
|
||||||
self.chunkmap = self._render_chunks_async(chunks, procs)
|
self.chunkmap = self._render_chunks_async(chunks, procs)
|
||||||
|
logging.debug("world chunkmap has len %d", len(self.chunkmap))
|
||||||
|
|
||||||
|
|
||||||
self.mincol = mincol
|
self.mincol = mincol
|
||||||
self.maxcol = maxcol
|
self.maxcol = maxcol
|
||||||
@@ -248,35 +261,59 @@ class WorldRenderer(object):
|
|||||||
|
|
||||||
self.findTrueSpawn()
|
self.findTrueSpawn()
|
||||||
|
|
||||||
def _find_chunkfiles(self):
|
def _find_regionfiles(self):
|
||||||
"""Returns a list of all the chunk file locations, and the file they
|
"""Returns a list of all of the region files, along with their
|
||||||
correspond to.
|
coordinates
|
||||||
|
|
||||||
Returns a list of (chunkx, chunky, filename) where chunkx and chunky are
|
Returns (regionx, regiony, filename)"""
|
||||||
given in chunk coordinates. Use convert_coords() to turn the resulting list
|
|
||||||
into an oblique coordinate system.
|
|
||||||
|
|
||||||
Usually this scans the given worlddir, but will use the chunk list
|
|
||||||
given to the constructor if one was provided."""
|
|
||||||
all_chunks = []
|
all_chunks = []
|
||||||
|
|
||||||
for dirpath, dirnames, filenames in os.walk(self.worlddir):
|
for dirpath, dirnames, filenames in os.walk(os.path.join(self.worlddir, 'region')):
|
||||||
if not dirnames and filenames and "DIM-1" not in dirpath:
|
if not dirnames and filenames and "DIM-1" not in dirpath:
|
||||||
for f in filenames:
|
for f in filenames:
|
||||||
if f.startswith("c.") and f.endswith(".dat"):
|
if f.startswith("r.") and f.endswith(".mcr"):
|
||||||
p = f.split(".")
|
p = f.split(".")
|
||||||
all_chunks.append((base36decode(p[1]), base36decode(p[2]),
|
all_chunks.append((int(p[1]), int(p[2]),
|
||||||
os.path.join(dirpath, f)))
|
os.path.join(dirpath, f)))
|
||||||
|
return all_chunks
|
||||||
|
|
||||||
|
def _get_chunklist(self):
|
||||||
|
"""Returns a list of all possible chunk coordinates, based on the
|
||||||
|
available regions files. Note that not all chunk coordinates will
|
||||||
|
exists. The chunkrender will know how to ignore non-existant chunks
|
||||||
|
|
||||||
|
returns a list of (chunkx, chunky, regionfile) where regionfile is
|
||||||
|
the region file that contains this chunk
|
||||||
|
|
||||||
|
TODO, a --cachedir implemetation should involved thie method
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
all_chunks = []
|
||||||
|
|
||||||
|
regions = self._find_regionfiles()
|
||||||
|
logging.debug("Found %d regions",len(regions))
|
||||||
|
for region in regions:
|
||||||
|
these_chunks = list(itertools.product(
|
||||||
|
range(region[0]*32,region[0]*32 + 32),
|
||||||
|
range(region[1]*32,region[1]*32 + 32)
|
||||||
|
))
|
||||||
|
these_chunks = map(lambda x: (x[0], x[1], region[2]), these_chunks)
|
||||||
|
assert(len(these_chunks) == 1024)
|
||||||
|
all_chunks += these_chunks
|
||||||
|
|
||||||
if not all_chunks:
|
if not all_chunks:
|
||||||
logging.error("Error: No chunks found!")
|
logging.error("Error: No chunks found!")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
logging.debug("Total possible chunks: %d", len(all_chunks))
|
||||||
return all_chunks
|
return all_chunks
|
||||||
|
|
||||||
def _render_chunks_async(self, chunks, processes):
|
def _render_chunks_async(self, chunks, processes):
|
||||||
"""Starts up a process pool and renders all the chunks asynchronously.
|
"""Starts up a process pool and renders all the chunks asynchronously.
|
||||||
|
|
||||||
chunks is a list of (col, row, chunkfile)
|
chunks is a list of (col, row, (chunkX, chunkY)). Use chunkX,chunkY
|
||||||
|
to find the chunk data in a region file
|
||||||
|
|
||||||
Returns a dictionary mapping (col, row) to the file where that
|
Returns a dictionary mapping (col, row) to the file where that
|
||||||
chunk is rendered as an image
|
chunk is rendered as an image
|
||||||
@@ -294,21 +331,24 @@ class WorldRenderer(object):
|
|||||||
if processes == 1:
|
if processes == 1:
|
||||||
# Skip the multiprocessing stuff
|
# Skip the multiprocessing stuff
|
||||||
logging.debug("Rendering chunks synchronously since you requested 1 process")
|
logging.debug("Rendering chunks synchronously since you requested 1 process")
|
||||||
for i, (col, row, chunkfile) in enumerate(chunks):
|
for i, (col, row, chunkXY) in enumerate(chunks):
|
||||||
if inclusion_set and (col, row) not in inclusion_set:
|
##TODO##/if inclusion_set and (col, row) not in inclusion_set:
|
||||||
# Skip rendering, just find where the existing image is
|
##TODO##/ # Skip rendering, just find where the existing image is
|
||||||
_, imgpath = chunk.find_oldimage(chunkfile, cached, self.caves)
|
##TODO##/ _, imgpath = chunk.find_oldimage(chunkfile, cached, self.caves)
|
||||||
if imgpath:
|
##TODO##/ if imgpath:
|
||||||
results[(col, row)] = imgpath
|
##TODO##/ results[(col, row)] = imgpath
|
||||||
continue
|
##TODO##/ continue
|
||||||
|
|
||||||
oldimg = chunk.find_oldimage(chunkfile, cached, self.caves)
|
oldimg = chunk.find_oldimage(chunkXY, cached, self.caves)
|
||||||
if chunk.check_cache(chunkfile, oldimg):
|
# TODO remove this shortcircuit
|
||||||
|
if chunk.check_cache(self, chunkXY, oldimg):
|
||||||
result = oldimg[1]
|
result = oldimg[1]
|
||||||
else:
|
else:
|
||||||
result = chunk.render_and_save(chunkfile, self.cachedir, self, oldimg, queue=q)
|
#logging.debug("check cache failed, need to render (could be ghost chunk)")
|
||||||
|
result = chunk.render_and_save(chunkXY, self.cachedir, self, oldimg, queue=q)
|
||||||
results[(col, row)] = result
|
|
||||||
|
if result:
|
||||||
|
results[(col, row)] = result
|
||||||
if i > 0:
|
if i > 0:
|
||||||
try:
|
try:
|
||||||
item = q.get(block=False)
|
item = q.get(block=False)
|
||||||
@@ -324,20 +364,20 @@ class WorldRenderer(object):
|
|||||||
logging.debug("Rendering chunks in {0} processes".format(processes))
|
logging.debug("Rendering chunks in {0} processes".format(processes))
|
||||||
pool = multiprocessing.Pool(processes=processes)
|
pool = multiprocessing.Pool(processes=processes)
|
||||||
asyncresults = []
|
asyncresults = []
|
||||||
for col, row, chunkfile in chunks:
|
for col, row, chunkXY in chunks:
|
||||||
if inclusion_set and (col, row) not in inclusion_set:
|
##TODO/if inclusion_set and (col, row) not in inclusion_set:
|
||||||
# Skip rendering, just find where the existing image is
|
##TODO/ # Skip rendering, just find where the existing image is
|
||||||
_, imgpath = chunk.find_oldimage(chunkfile, cached, self.caves)
|
##TODO/ _, imgpath = chunk.find_oldimage(chunkfile, cached, self.caves)
|
||||||
if imgpath:
|
##TODO/ if imgpath:
|
||||||
results[(col, row)] = imgpath
|
##TODO/ results[(col, row)] = imgpath
|
||||||
continue
|
##TODO/ continue
|
||||||
|
|
||||||
oldimg = chunk.find_oldimage(chunkfile, cached, self.caves)
|
oldimg = chunk.find_oldimage(chunkXY, cached, self.caves)
|
||||||
if chunk.check_cache(chunkfile, oldimg):
|
if chunk.check_cache(self, chunkXY, oldimg):
|
||||||
result = FakeAsyncResult(oldimg[1])
|
result = FakeAsyncResult(oldimg[1])
|
||||||
else:
|
else:
|
||||||
result = pool.apply_async(chunk.render_and_save,
|
result = pool.apply_async(chunk.render_and_save,
|
||||||
args=(chunkfile,self.cachedir,self, oldimg),
|
args=(chunkXY,self.cachedir,self, oldimg),
|
||||||
kwds=dict(cave=self.caves, queue=q))
|
kwds=dict(cave=self.caves, queue=q))
|
||||||
asyncresults.append((col, row, result))
|
asyncresults.append((col, row, result))
|
||||||
|
|
||||||
@@ -384,7 +424,7 @@ def get_save_dir():
|
|||||||
return path
|
return path
|
||||||
|
|
||||||
def get_worlds():
|
def get_worlds():
|
||||||
"Returns {world # : level.dat information}"
|
"Returns {world # or name : level.dat information}"
|
||||||
ret = {}
|
ret = {}
|
||||||
save_dir = get_save_dir()
|
save_dir = get_save_dir()
|
||||||
|
|
||||||
@@ -393,12 +433,17 @@ def get_worlds():
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
for dir in os.listdir(save_dir):
|
for dir in os.listdir(save_dir):
|
||||||
|
world_dat = os.path.join(save_dir, dir, "level.dat")
|
||||||
|
if not os.path.exists(world_dat): continue
|
||||||
|
info = nbt.load(world_dat)[1]
|
||||||
|
info['Data']['path'] = os.path.join(save_dir, dir)
|
||||||
if dir.startswith("World") and len(dir) == 6:
|
if dir.startswith("World") and len(dir) == 6:
|
||||||
world_n = int(dir[-1])
|
try:
|
||||||
world_dat = os.path.join(save_dir, dir, "level.dat")
|
world_n = int(dir[-1])
|
||||||
if not os.path.exists(world_dat): continue
|
ret[world_n] = info['Data']
|
||||||
info = nbt.load(world_dat)[1]
|
except ValueError:
|
||||||
info['Data']['path'] = os.path.join(save_dir, dir)
|
pass
|
||||||
ret[world_n] = info['Data']
|
if 'LevelName' in info['Data'].keys():
|
||||||
|
ret[info['Data']['LevelName']] = info['Data']
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|||||||
Reference in New Issue
Block a user