Working rendering with the new McRegion format.
Notes: * Currently only works with -p 1 * Caching is mostly compatible with existing caches, but not completly. This needs more testing and more code reviews * There are probably many code paths that will throw exceptions. * Not ready for general use yet, but is OK for testing
This commit is contained in:
109
chunk.py
109
chunk.py
@@ -20,6 +20,7 @@ import hashlib
|
||||
import logging
|
||||
import time
|
||||
import math
|
||||
import sys
|
||||
|
||||
import nbt
|
||||
import textures
|
||||
@@ -45,10 +46,13 @@ image
|
||||
# alpha_over extension, BUT this extension may fall back to PIL's
|
||||
# paste(), which DOES need the workaround.)
|
||||
|
||||
def get_lvldata(filename):
|
||||
"""Takes a filename and returns the Level struct, which contains all the
|
||||
def get_lvldata(filename, x, y):
|
||||
"""Takes a filename and chunkcoords and returns the Level struct, which contains all the
|
||||
level info"""
|
||||
return nbt.load(filename)[1]['Level']
|
||||
|
||||
d = nbt.load_from_region(filename, x, y)
|
||||
if not d: raise NoSuchChunk(x,y)
|
||||
return d[1]['Level']
|
||||
|
||||
def get_blockarray(level):
|
||||
"""Takes the level struct as returned from get_lvldata, and returns the
|
||||
@@ -124,14 +128,13 @@ fluid_blocks = set([8,9,10,11])
|
||||
# (glass, half blocks)
|
||||
nospawn_blocks = set([20,44])
|
||||
|
||||
def find_oldimage(chunkfile, cached, cave):
|
||||
destdir, filename = os.path.split(chunkfile)
|
||||
filename_split = filename.split(".")
|
||||
blockid = ".".join(filename_split[1:3])
|
||||
def find_oldimage(chunkXY, cached, cave):
|
||||
# TODO update this
|
||||
blockid = "%d.%d" % chunkXY
|
||||
|
||||
# Get the name of the existing image.
|
||||
moredirs, dir2 = os.path.split(destdir)
|
||||
dir1 = os.path.basename(moredirs)
|
||||
dir1 = world.base36encode(chunkXY[0]%64)
|
||||
dir2 = world.base36encode(chunkXY[1]%64)
|
||||
cachename = '/'.join((dir1, dir2))
|
||||
|
||||
oldimg = oldimg_path = None
|
||||
@@ -150,12 +153,16 @@ def check_cache(chunkfile, oldimg):
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def render_and_save(chunkfile, cachedir, worldobj, oldimg, cave=False, queue=None):
|
||||
# chunkcoords should be the coordinates of a possible chunk. it may not exist
|
||||
def render_and_save(chunkcoords, cachedir, worldobj, oldimg, cave=False, queue=None):
|
||||
"""Used as the entry point for the multiprocessing workers (since processes
|
||||
can't target bound methods) or to easily render and save one chunk
|
||||
|
||||
Returns the image file location"""
|
||||
a = ChunkRenderer(chunkfile, cachedir, worldobj, oldimg, queue)
|
||||
chunkcoords is a tuple: (chunkX, chunkY)
|
||||
|
||||
If the chunk doesn't exist, return None.
|
||||
Else, returns the image file location"""
|
||||
a = ChunkRenderer(chunkcoords, cachedir, worldobj, oldimg, queue)
|
||||
try:
|
||||
return a.render_and_save(cave)
|
||||
except ChunkCorrupt:
|
||||
@@ -177,36 +184,53 @@ def render_and_save(chunkfile, cachedir, worldobj, oldimg, cave=False, queue=Non
|
||||
class ChunkCorrupt(Exception):
|
||||
pass
|
||||
|
||||
class NoSuchChunk(Exception):
|
||||
pass
|
||||
|
||||
class ChunkRenderer(object):
|
||||
def __init__(self, chunkfile, cachedir, worldobj, oldimg, queue):
|
||||
"""Make a new chunk renderer for the given chunkfile.
|
||||
chunkfile should be a full path to the .dat file to process
|
||||
def __init__(self, chunkcoords, cachedir, worldobj, oldimg, queue):
|
||||
"""Make a new chunk renderer for the given chunk coordinates.
|
||||
chunkcoors should be a tuple: (chunkX, chunkY)
|
||||
|
||||
cachedir is a directory to save the resulting chunk images to
|
||||
"""
|
||||
self.queue = queue
|
||||
# derive based on worlddir and chunkcoords
|
||||
self.regionfile = os.path.join(worldobj.worlddir, "region",
|
||||
"r.%d.%d.mcr" % (chunkcoords[0] // 32, chunkcoords[1]//32))
|
||||
|
||||
if not os.path.exists(chunkfile):
|
||||
raise ValueError("Could not find chunkfile")
|
||||
self.chunkfile = chunkfile
|
||||
destdir, filename = os.path.split(self.chunkfile)
|
||||
filename_split = filename.split(".")
|
||||
chunkcoords = filename_split[1:3]
|
||||
if not os.path.exists(self.regionfile):
|
||||
raise ValueError("Could not find regionfile: %s" % self.regionfile)
|
||||
|
||||
self.coords = map(world.base36decode, chunkcoords)
|
||||
self.blockid = ".".join(chunkcoords)
|
||||
## TODO TODO all of this class
|
||||
|
||||
#destdir, filename = os.path.split(self.chunkfile)
|
||||
#filename_split = filename.split(".")
|
||||
#chunkcoords = filename_split[1:3]
|
||||
|
||||
#self.coords = map(world.base36decode, chunkcoords)
|
||||
self.blockid = "%d.%d" % chunkcoords
|
||||
|
||||
# chunk coordinates (useful to converting local block coords to
|
||||
# global block coords)
|
||||
self.chunkX = int(filename_split[1], base=36)
|
||||
self.chunkY = int(filename_split[2], base=36)
|
||||
self.chunkX = chunkcoords[0]
|
||||
self.chunkY = chunkcoords[1]
|
||||
|
||||
|
||||
|
||||
self.world = worldobj
|
||||
|
||||
|
||||
# Cachedir here is the base directory of the caches. We need to go 2
|
||||
# levels deeper according to the chunk file. Get the last 2 components
|
||||
# of destdir and use that
|
||||
moredirs, dir2 = os.path.split(destdir)
|
||||
_, dir1 = os.path.split(moredirs)
|
||||
self.cachedir = os.path.join(cachedir, dir1, dir2)
|
||||
##moredirs, dir2 = os.path.split(destdir)
|
||||
##_, dir1 = os.path.split(moredirs)
|
||||
self.cachedir = os.path.join(cachedir,
|
||||
world.base36encode(self.chunkX%64),
|
||||
world.base36encode(self.chunkY%64))
|
||||
|
||||
#logging.debug("cache location for this chunk: %s", self.cachedir)
|
||||
self.oldimg, self.oldimg_path = oldimg
|
||||
|
||||
|
||||
@@ -229,9 +253,12 @@ class ChunkRenderer(object):
|
||||
"""Loads and returns the level structure"""
|
||||
if not hasattr(self, "_level"):
|
||||
try:
|
||||
self._level = get_lvldata(self.chunkfile)
|
||||
self._level = get_lvldata(self.regionfile, self.chunkX, self.chunkY)
|
||||
except NoSuchChunk, e:
|
||||
#logging.debug("Skipping non-existant chunk")
|
||||
raise
|
||||
except Exception, e:
|
||||
logging.warning("Error opening chunk file %s. It may be corrupt. %s", self.chunkfile, e)
|
||||
logging.warning("Error opening chunk file %s. It may be corrupt. %s", self.regionfile, e)
|
||||
raise ChunkCorrupt(str(e))
|
||||
return self._level
|
||||
level = property(_load_level)
|
||||
@@ -259,13 +286,13 @@ class ChunkRenderer(object):
|
||||
|
||||
def _load_left(self):
|
||||
"""Loads and sets data from lower-left chunk"""
|
||||
chunk_path = self.world.get_chunk_path(self.coords[0] - 1, self.coords[1])
|
||||
chunk_path = self.world.get_region_path(self.chunkX - 1, self.chunkY)
|
||||
try:
|
||||
chunk_data = get_lvldata(chunk_path)
|
||||
chunk_data = get_lvldata(chunk_path, self.chunkX-1, self.chunkY)
|
||||
self._left_skylight = get_skylight_array(chunk_data)
|
||||
self._left_blocklight = get_blocklight_array(chunk_data)
|
||||
self._left_blocks = get_blockarray(chunk_data)
|
||||
except IOError:
|
||||
except NoSuchChunk:
|
||||
self._left_skylight = None
|
||||
self._left_blocklight = None
|
||||
self._left_blocks = None
|
||||
@@ -293,13 +320,13 @@ class ChunkRenderer(object):
|
||||
|
||||
def _load_right(self):
|
||||
"""Loads and sets data from lower-right chunk"""
|
||||
chunk_path = self.world.get_chunk_path(self.coords[0], self.coords[1] + 1)
|
||||
chunk_path = self.world.get_region_path(self.chunkX, self.chunkY + 1)
|
||||
try:
|
||||
chunk_data = get_lvldata(chunk_path)
|
||||
chunk_data = get_lvldata(chunk_path, self.chunkX, self.chunkY+1)
|
||||
self._right_skylight = get_skylight_array(chunk_data)
|
||||
self._right_blocklight = get_blocklight_array(chunk_data)
|
||||
self._right_blocks = get_blockarray(chunk_data)
|
||||
except IOError:
|
||||
except NoSuchChunk:
|
||||
self._right_skylight = None
|
||||
self._right_blocklight = None
|
||||
self._right_blocks = None
|
||||
@@ -449,6 +476,7 @@ class ChunkRenderer(object):
|
||||
"""
|
||||
blockid = self.blockid
|
||||
|
||||
|
||||
# Reasons for the code to get to this point:
|
||||
# 1) An old image doesn't exist
|
||||
# 2) An old image exists, but the chunk was more recently modified (the
|
||||
@@ -457,13 +485,18 @@ class ChunkRenderer(object):
|
||||
# the image was invalid and deleted (sort of the same as (1))
|
||||
|
||||
# What /should/ the image be named, go ahead and hash the block array
|
||||
dest_filename = "img.{0}.{1}.{2}.png".format(
|
||||
try:
|
||||
dest_filename = "img.{0}.{1}.{2}.png".format(
|
||||
blockid,
|
||||
"cave" if cave else "nocave",
|
||||
self._hash_blockarray(),
|
||||
)
|
||||
except NoSuchChunk, e:
|
||||
return None
|
||||
|
||||
|
||||
dest_path = os.path.join(self.cachedir, dest_filename)
|
||||
#logging.debug("cache filename: %s", dest_path)
|
||||
|
||||
if self.oldimg:
|
||||
if dest_filename == self.oldimg:
|
||||
@@ -479,6 +512,7 @@ class ChunkRenderer(object):
|
||||
# either corrupt or out of date
|
||||
os.unlink(self.oldimg_path)
|
||||
|
||||
|
||||
# Render the chunk
|
||||
img = self.chunk_render(cave=cave)
|
||||
# Save it
|
||||
@@ -488,6 +522,7 @@ class ChunkRenderer(object):
|
||||
os.unlink(dest_path)
|
||||
raise
|
||||
# Return its location
|
||||
#raise Exception("early exit")
|
||||
return dest_path
|
||||
|
||||
def calculate_darkness(self, skylight, blocklight):
|
||||
|
||||
3
nbt.py
3
nbt.py
@@ -34,7 +34,8 @@ def load(fileobj):
|
||||
def load_from_region(fileobj, x, y):
|
||||
nbt = MCRFileReader(fileobj).load_chunk(x, y)
|
||||
if not nbt:
|
||||
raise IOError("No such chunk in region: (%i, %i)" % (x, y))
|
||||
return None ## return none. I think this is who we should indicate missing chunks
|
||||
#raise IOError("No such chunk in region: (%i, %i)" % (x, y))
|
||||
return nbt.read_all()
|
||||
|
||||
class NBTFileReader(object):
|
||||
|
||||
@@ -27,7 +27,7 @@ import logging
|
||||
import util
|
||||
import cPickle
|
||||
import stat
|
||||
from time import gmtime, strftime
|
||||
from time import gmtime, strftime, sleep
|
||||
|
||||
from PIL import Image
|
||||
|
||||
@@ -297,10 +297,12 @@ class QuadtreeGen(object):
|
||||
|
||||
# This image is rendered at:
|
||||
dest = os.path.join(self.destdir, "tiles", *(str(x) for x in path))
|
||||
#logging.debug("this is rendered at %s", dest)
|
||||
|
||||
# And uses these chunks
|
||||
tilechunks = self._get_chunks_in_range(colstart, colend, rowstart,
|
||||
rowend)
|
||||
#logging.debug(" tilechunks: %r", tilechunks)
|
||||
|
||||
# Put this in the pool
|
||||
# (even if tilechunks is empty, render_worldtile will delete
|
||||
|
||||
101
world.py
101
world.py
@@ -22,6 +22,7 @@ import sys
|
||||
import logging
|
||||
import cPickle
|
||||
import collections
|
||||
import itertools
|
||||
|
||||
import numpy
|
||||
|
||||
@@ -45,7 +46,10 @@ def _convert_coords(chunks):
|
||||
the image each one should be.
|
||||
|
||||
returns mincol, maxcol, minrow, maxrow, chunks_translated
|
||||
chunks_translated is a list of (col, row, filename)
|
||||
chunks_translated is a list of (col, row, (chunkX, chunkY))
|
||||
|
||||
The (chunkX, chunkY) tuple is the chunkCoords, used to identify the
|
||||
chunk file
|
||||
"""
|
||||
chunks_translated = []
|
||||
# columns are determined by the sum of the chunk coords, rows are the
|
||||
@@ -60,7 +64,7 @@ def _convert_coords(chunks):
|
||||
row = c[1] - c[0]
|
||||
minrow = min(minrow, row)
|
||||
maxrow = max(maxrow, row)
|
||||
chunks_translated.append((col, row, c[2]))
|
||||
chunks_translated.append((col, row, (c[0],c[1])))
|
||||
|
||||
return mincol, maxcol, minrow, maxrow, chunks_translated
|
||||
|
||||
@@ -118,7 +122,7 @@ class WorldRenderer(object):
|
||||
#print data
|
||||
if 'version' in data and data['version'] == 19132:
|
||||
logging.error("Sorry, Minecraft-Overviewer doesn't yet know how to read McRegion chunks")
|
||||
sys.exit(1)
|
||||
#sys.exit(1)
|
||||
|
||||
if self.useBiomeData:
|
||||
textures.prepareBiomeData(worlddir)
|
||||
@@ -127,6 +131,7 @@ class WorldRenderer(object):
|
||||
|
||||
# In order to avoid having to look up the cache file names in
|
||||
# ChunkRenderer, get them all and store them here
|
||||
# TODO change how caching works
|
||||
for root, dirnames, filenames in os.walk(cachedir):
|
||||
for filename in filenames:
|
||||
if not filename.endswith('.png') or not filename.startswith("img."):
|
||||
@@ -138,7 +143,6 @@ class WorldRenderer(object):
|
||||
bits = '.'.join((x, z, cave))
|
||||
cached[dir][bits] = os.path.join(root, filename)
|
||||
|
||||
|
||||
# stores Points Of Interest to be mapped with markers
|
||||
# a list of dictionaries, see below for an example
|
||||
self.POI = []
|
||||
@@ -163,6 +167,7 @@ class WorldRenderer(object):
|
||||
if not self.chunklist:
|
||||
return None
|
||||
|
||||
raise Exception("not yet working") ## TODO correctly reimplement this for mcregion
|
||||
# Get a list of the (chunks, chunky, filename) from the passed in list
|
||||
# of filenames
|
||||
chunklist = []
|
||||
@@ -190,14 +195,11 @@ class WorldRenderer(object):
|
||||
|
||||
return inclusion_set
|
||||
|
||||
def get_chunk_path(self, chunkX, chunkY):
|
||||
"""Returns the path to the chunk file at (chunkX, chunkY), if
|
||||
it exists."""
|
||||
def get_region_path(self, chunkX, chunkY):
|
||||
"""Returns the path to the region that contains chunk (chunkX, chunkY)
|
||||
"""
|
||||
|
||||
chunkFile = "%s/%s/c.%s.%s.dat" % (base36encode(chunkX % 64),
|
||||
base36encode(chunkY % 64),
|
||||
base36encode(chunkX),
|
||||
base36encode(chunkY))
|
||||
chunkFile = "region/r.%s.%s.mcr" % (chunkX//32, chunkY//32)
|
||||
|
||||
return os.path.join(self.worlddir, chunkFile)
|
||||
|
||||
@@ -217,9 +219,9 @@ class WorldRenderer(object):
|
||||
chunkY = spawnZ/16
|
||||
|
||||
## The filename of this chunk
|
||||
chunkFile = self.get_chunk_path(chunkX, chunkY)
|
||||
chunkFile = self.get_region_path(chunkX, chunkY)
|
||||
|
||||
data=nbt.load(chunkFile)[1]
|
||||
data=nbt.load_from_region(chunkFile, chunkX, chunkY)[1]
|
||||
level = data['Level']
|
||||
blockArray = numpy.frombuffer(level['Blocks'], dtype=numpy.uint8).reshape((16,16,128))
|
||||
|
||||
@@ -240,14 +242,17 @@ class WorldRenderer(object):
|
||||
"""Starts the render. This returns when it is finished"""
|
||||
|
||||
logging.info("Scanning chunks")
|
||||
raw_chunks = self._find_chunkfiles()
|
||||
raw_chunks = self._get_chunklist()
|
||||
logging.debug("Done scanning chunks")
|
||||
|
||||
# Translate chunks to our diagonal coordinate system
|
||||
# TODO
|
||||
mincol, maxcol, minrow, maxrow, chunks = _convert_coords(raw_chunks)
|
||||
del raw_chunks # Free some memory
|
||||
|
||||
self.chunkmap = self._render_chunks_async(chunks, procs)
|
||||
logging.debug("world chunkmap has len %d", len(self.chunkmap))
|
||||
|
||||
|
||||
self.mincol = mincol
|
||||
self.maxcol = maxcol
|
||||
@@ -272,35 +277,47 @@ class WorldRenderer(object):
|
||||
os.path.join(dirpath, f)))
|
||||
return all_chunks
|
||||
|
||||
def _find_chunkfiles(self):
|
||||
"""Returns a list of all the chunk file locations, and the file they
|
||||
correspond to.
|
||||
def _get_chunklist(self):
|
||||
"""Returns a list of all possible chunk coordinates, based on the
|
||||
available regions files. Note that not all chunk coordinates will
|
||||
exists. The chunkrender will know how to ignore non-existant chunks
|
||||
|
||||
Returns a list of (chunkx, chunky, filename) where chunkx and chunky are
|
||||
given in chunk coordinates. Use convert_coords() to turn the resulting list
|
||||
into an oblique coordinate system.
|
||||
returns a list of (chunkx, chunky, regionfile) where regionfile is
|
||||
the region file that contains this chunk
|
||||
|
||||
TODO, a --cachedir implemetation should involved thie method
|
||||
|
||||
"""
|
||||
|
||||
Usually this scans the given worlddir, but will use the chunk list
|
||||
given to the constructor if one was provided."""
|
||||
all_chunks = []
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(self.worlddir):
|
||||
if not dirnames and filenames and "DIM-1" not in dirpath:
|
||||
for f in filenames:
|
||||
if f.startswith("c.") and f.endswith(".dat"):
|
||||
p = f.split(".")
|
||||
all_chunks.append((base36decode(p[1]), base36decode(p[2]),
|
||||
os.path.join(dirpath, f)))
|
||||
regions = self._find_regionfiles()
|
||||
print "found %d regions" % len(regions)
|
||||
for region in regions:
|
||||
print "region %d, %d --> %s" % region
|
||||
these_chunks = list(itertools.product(
|
||||
range(region[0]*32,region[0]*32 + 32),
|
||||
range(region[1]*32,region[1]*32 + 32)
|
||||
))
|
||||
print "region %d,%d will go from:"
|
||||
print " %r" % range(region[0]*32,region[0]*32 + 32)
|
||||
print " %r" % range(region[1]*32,region[1]*32 + 32)
|
||||
these_chunks = map(lambda x: (x[0], x[1], region[2]), these_chunks)
|
||||
assert(len(these_chunks) == 1024)
|
||||
all_chunks += these_chunks
|
||||
|
||||
if not all_chunks:
|
||||
logging.error("Error: No chunks found!")
|
||||
sys.exit(1)
|
||||
|
||||
logging.debug("Total possible chunks: %d", len(all_chunks))
|
||||
return all_chunks
|
||||
|
||||
def _render_chunks_async(self, chunks, processes):
|
||||
"""Starts up a process pool and renders all the chunks asynchronously.
|
||||
|
||||
chunks is a list of (col, row, chunkfile)
|
||||
chunks is a list of (col, row, (chunkX, chunkY)). Use chunkX,chunkY
|
||||
to find the chunk data in a region file
|
||||
|
||||
Returns a dictionary mapping (col, row) to the file where that
|
||||
chunk is rendered as an image
|
||||
@@ -318,21 +335,23 @@ class WorldRenderer(object):
|
||||
if processes == 1:
|
||||
# Skip the multiprocessing stuff
|
||||
logging.debug("Rendering chunks synchronously since you requested 1 process")
|
||||
for i, (col, row, chunkfile) in enumerate(chunks):
|
||||
if inclusion_set and (col, row) not in inclusion_set:
|
||||
# Skip rendering, just find where the existing image is
|
||||
_, imgpath = chunk.find_oldimage(chunkfile, cached, self.caves)
|
||||
if imgpath:
|
||||
results[(col, row)] = imgpath
|
||||
continue
|
||||
for i, (col, row, chunkXY) in enumerate(chunks):
|
||||
##TODO##/if inclusion_set and (col, row) not in inclusion_set:
|
||||
##TODO##/ # Skip rendering, just find where the existing image is
|
||||
##TODO##/ _, imgpath = chunk.find_oldimage(chunkfile, cached, self.caves)
|
||||
##TODO##/ if imgpath:
|
||||
##TODO##/ results[(col, row)] = imgpath
|
||||
##TODO##/ continue
|
||||
|
||||
oldimg = chunk.find_oldimage(chunkfile, cached, self.caves)
|
||||
if chunk.check_cache(chunkfile, oldimg):
|
||||
oldimg = chunk.find_oldimage(chunkXY, cached, self.caves)
|
||||
# TODO remove this shortcircuit
|
||||
if oldimg[1]:## or chunk.check_cache(chunkfile, oldimg):
|
||||
result = oldimg[1]
|
||||
else:
|
||||
result = chunk.render_and_save(chunkfile, self.cachedir, self, oldimg, queue=q)
|
||||
result = chunk.render_and_save(chunkXY, self.cachedir, self, oldimg, queue=q)
|
||||
|
||||
results[(col, row)] = result
|
||||
if result:
|
||||
results[(col, row)] = result
|
||||
if i > 0:
|
||||
try:
|
||||
item = q.get(block=False)
|
||||
|
||||
Reference in New Issue
Block a user