diff --git a/overviewer_core/chunk.py b/overviewer_core/chunk.py
deleted file mode 100644
index 09c8d96..0000000
--- a/overviewer_core/chunk.py
+++ /dev/null
@@ -1,415 +0,0 @@
-# This file is part of the Minecraft Overviewer.
-#
-# Minecraft Overviewer is free software: you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or (at
-# your option) any later version.
-#
-# Minecraft Overviewer is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
-# Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with the Overviewer. If not, see .
-
-import numpy
-from PIL import Image, ImageDraw, ImageEnhance, ImageOps
-import os.path
-import logging
-import time
-import math
-import sys
-
-import nbt
-import textures
-import world
-import composite
-import c_overviewer
-
-"""
-This module has routines related to rendering one particular chunk into an
-image
-
-"""
-
-# General note about pasting transparent image objects onto an image with an
-# alpha channel:
-# If you use the image as its own mask, it will work fine only if the alpha
-# channel is binary. If there's any translucent parts, then the alpha channel
-# of the dest image will have its alpha channel modified. To prevent this:
-# first use im.split() and take the third item which is the alpha channel and
-# use that as the mask. Then take the image and use im.convert("RGB") to strip
-# the image from its alpha channel, and use that as the source to alpha_over()
-
-# (note that this workaround is NOT technically needed when using the
-# alpha_over extension, BUT this extension may fall back to PIL's
-# paste(), which DOES need the workaround.)
-
-def get_lvldata(region, filename, x, y, retries=2):
- """Takes a filename and chunkcoords and returns the Level struct, which contains all the
- level info"""
-
- # non existent region file doesn't mean corrupt chunk.
- if filename == None:
- raise NoSuchChunk
-
- try:
- d = region.get_chunk(x, y)
- except Exception, e:
- if retries > 0:
- # wait a little bit, and try again (up to `retries` times)
- time.sleep(1)
- #make sure we reload region info
- world.reload_region(filename)
- return get_lvldata(world, filename, x, y, retries=retries-1)
- else:
- logging.warning("Error opening chunk (%i, %i) in %s. It may be corrupt. %s", x, y, filename, e)
- raise ChunkCorrupt(str(e))
-
- if not d: raise NoSuchChunk(x,y)
- return d
-
-def get_blockarray(level):
- """Takes the level struct as returned from get_lvldata, and returns the
- Block array, which just contains all the block ids"""
- return level['Blocks']
-
-def get_skylight_array(level):
- """Returns the skylight array. This is 4 bits per block, but it is
- expanded for you so you may index it normally."""
- skylight = level['SkyLight']
- return skylight
-
-def get_blocklight_array(level):
- """Returns the blocklight array. This is 4 bits per block, but it
- is expanded for you so you may index it normally."""
- return blocklight_expanded
-
-def get_blockdata_array(level):
- """Returns the ancillary data from the 'Data' byte array. Data is packed
- in a similar manner to skylight data"""
- print level.keys()
- return level['Data']
-
-def get_tileentity_data(level):
- """Returns the TileEntities TAG_List from chunk dat file"""
- data = level['TileEntities']
- return data
-
-class ChunkCorrupt(Exception):
- pass
-
-class NoSuchChunk(Exception):
- pass
-
-class ChunkRenderer(object):
- def __init__(self, chunkcoords, regionobj, rendermode, queue):
- """Make a new chunk renderer for the given chunk coordinates.
- chunkcoors should be a tuple: (chunkX, chunkY)
-
- cachedir is a directory to save the resulting chunk images to
- """
- self.queue = queue
- self.region = regionobj
-
- self.regionfile = regionobj.get_region_path(*chunkcoords)
- #if not os.path.exists(self.regionfile):
- # raise ValueError("Could not find regionfile: %s" % self.regionfile)
-
- ## TODO TODO all of this class
-
- #destdir, filename = os.path.split(self.chunkfile)
- #filename_split = filename.split(".")
- #chunkcoords = filename_split[1:3]
-
- #self.coords = map(world.base36decode, chunkcoords)
- #self.blockid = "%d.%d" % chunkcoords
-
- # chunk coordinates (useful to converting local block coords to
- # global block coords)
- self.chunkX = chunkcoords[0]
- self.chunkY = chunkcoords[1]
-
- self.rendermode = rendermode
-
- def _load_level(self):
- """Loads and returns the level structure"""
- if not hasattr(self, "_level"):
- try:
- self._level = get_lvldata(self.region,self.regionfile, self.chunkX, self.chunkY)
- except NoSuchChunk, e:
- logging.debug("Skipping non-existant chunk")
- raise
- return self._level
- level = property(_load_level)
-
- def _load_blocks(self):
- """Loads and returns the block array"""
- if not hasattr(self, "_blocks"):
- self._blocks = get_blockarray(self._load_level())
- return self._blocks
- blocks = property(_load_blocks)
-
- def _load_skylight(self):
- """Loads and returns skylight array"""
- if not hasattr(self, "_skylight"):
- self._skylight = get_skylight_array(self.level)
- return self._skylight
- skylight = property(_load_skylight)
-
- def _load_blocklight(self):
- """Loads and returns blocklight array"""
- if not hasattr(self, "_blocklight"):
- self._blocklight = get_blocklight_array(self.level)
- return self._blocklight
- blocklight = property(_load_blocklight)
-
- def _load_left(self):
- """Loads and sets data from lower-left chunk"""
- chunk_path = self.world.get_region_path(self.chunkX - 1, self.chunkY)
- try:
- chunk_data = get_lvldata(self.world,chunk_path, self.chunkX - 1, self.chunkY)
- self._left_skylight = get_skylight_array(chunk_data)
- self._left_blocklight = get_blocklight_array(chunk_data)
- self._left_blocks = get_blockarray(chunk_data)
- except NoSuchChunk:
- self._left_skylight = None
- self._left_blocklight = None
- self._left_blocks = None
-
- def _load_left_blocks(self):
- """Loads and returns lower-left block array"""
- if not hasattr(self, "_left_blocks"):
- self._load_left()
- return self._left_blocks
- left_blocks = property(_load_left_blocks)
-
- def _load_left_skylight(self):
- """Loads and returns lower-left skylight array"""
- if not hasattr(self, "_left_skylight"):
- self._load_left()
- return self._left_skylight
- left_skylight = property(_load_left_skylight)
-
- def _load_left_blocklight(self):
- """Loads and returns lower-left blocklight array"""
- if not hasattr(self, "_left_blocklight"):
- self._load_left()
- return self._left_blocklight
- left_blocklight = property(_load_left_blocklight)
-
- def _load_right(self):
- """Loads and sets data from lower-right chunk"""
- chunk_path = self.world.get_region_path(self.chunkX, self.chunkY + 1)
- try:
- chunk_data = get_lvldata(self.world,chunk_path, self.chunkX, self.chunkY + 1)
- self._right_skylight = get_skylight_array(chunk_data)
- self._right_blocklight = get_blocklight_array(chunk_data)
- self._right_blocks = get_blockarray(chunk_data)
- except NoSuchChunk:
- self._right_skylight = None
- self._right_blocklight = None
- self._right_blocks = None
-
- def _load_right_blocks(self):
- """Loads and returns lower-right block array"""
- if not hasattr(self, "_right_blocks"):
- self._load_right()
- return self._right_blocks
- right_blocks = property(_load_right_blocks)
-
- def _load_right_skylight(self):
- """Loads and returns lower-right skylight array"""
- if not hasattr(self, "_right_skylight"):
- self._load_right()
- return self._right_skylight
- right_skylight = property(_load_right_skylight)
-
- def _load_right_blocklight(self):
- """Loads and returns lower-right blocklight array"""
- if not hasattr(self, "_right_blocklight"):
- self._load_right()
- return self._right_blocklight
- right_blocklight = property(_load_right_blocklight)
-
- def _load_up_right(self):
- """Loads and sets data from upper-right chunk"""
- chunk_path = self.world.get_region_path(self.chunkX + 1, self.chunkY)
- try:
- chunk_data = get_lvldata(self.world,chunk_path, self.chunkX + 1, self.chunkY)
- self._up_right_skylight = get_skylight_array(chunk_data)
- self._up_right_blocklight = get_blocklight_array(chunk_data)
- self._up_right_blocks = get_blockarray(chunk_data)
- except NoSuchChunk:
- self._up_right_skylight = None
- self._up_right_blocklight = None
- self._up_right_blocks = None
-
- def _load_up_right_blocks(self):
- """Loads and returns upper-right block array"""
- if not hasattr(self, "_up_right_blocks"):
- self._load_up_right()
- return self._up_right_blocks
- up_right_blocks = property(_load_up_right_blocks)
-
- def _load_up_right_skylight(self):
- """Loads and returns lower-right skylight array"""
- if not hasattr(self, "_up_right_skylight"):
- self._load_up_right()
- return self._up_right_skylight
- up_right_skylight = property(_load_up_right_skylight)
-
- def _load_up_right_blocklight(self):
- """Loads and returns lower-right blocklight array"""
- if not hasattr(self, "_up_right_blocklight"):
- self._load_up_right()
- return self._up_right_blocklight
- up_right_blocklight = property(_load_up_right_blocklight)
-
- def _load_up_left(self):
- """Loads and sets data from upper-left chunk"""
- chunk_path = self.world.get_region_path(self.chunkX, self.chunkY - 1)
- try:
- chunk_data = get_lvldata(self.world,chunk_path, self.chunkX, self.chunkY - 1)
- self._up_left_skylight = get_skylight_array(chunk_data)
- self._up_left_blocklight = get_blocklight_array(chunk_data)
- self._up_left_blocks = get_blockarray(chunk_data)
- except NoSuchChunk:
- self._up_left_skylight = None
- self._up_left_blocklight = None
- self._up_left_blocks = None
-
- def _load_up_left_blocks(self):
- """Loads and returns lower-left block array"""
- if not hasattr(self, "_up_left_blocks"):
- self._load_up_left()
- return self._up_left_blocks
- up_left_blocks = property(_load_up_left_blocks)
-
- def _load_up_left_skylight(self):
- """Loads and returns lower-right skylight array"""
- if not hasattr(self, "_up_left_skylight"):
- self._load_up_left()
- return self._up_left_skylight
- up_left_skylight = property(_load_up_left_skylight)
-
- def _load_up_left_blocklight(self):
- """Loads and returns lower-left blocklight array"""
- if not hasattr(self, "_up_left_blocklight"):
- self._load_up_left()
- return self._up_left_blocklight
- up_left_blocklight = property(_load_up_left_blocklight)
-
- def chunk_render(self, img=None, xoff=0, yoff=0, cave=False):
- """Renders a chunk with the given parameters, and returns the image.
- If img is given, the chunk is rendered to that image object. Otherwise,
- a new one is created. xoff and yoff are offsets in the image.
-
- For cave mode, all blocks that have any direct sunlight are not
- rendered, and blocks are drawn with a color tint depending on their
- depth."""
-
- blockData = get_blockdata_array(self.level)
- blockData_expanded = numpy.empty((16,16,128), dtype=numpy.uint8)
- # Even elements get the lower 4 bits
- blockData_expanded[:,:,::2] = blockData & 0x0F
- # Odd elements get the upper 4 bits
- blockData_expanded[:,:,1::2] = blockData >> 4
-
-
- # Each block is 24x24
- # The next block on the X axis adds 12px to x and subtracts 6px from y in the image
- # The next block on the Y axis adds 12px to x and adds 6px to y in the image
- # The next block up on the Z axis subtracts 12 from y axis in the image
-
- # Since there are 16x16x128 blocks in a chunk, the image will be 384x1728
- # (height is 128*12 high, plus the size of the horizontal plane: 16*12)
- if not img:
- img = Image.new("RGBA", (384, 1728), (38,92,255,0))
-
- c_overviewer.render_loop(self, img, xoff, yoff, blockData_expanded)
-
- tileEntities = get_tileentity_data(self.level)
- for entity in tileEntities:
- if entity['id'] == 'Sign':
- msg=' \n'.join([entity['Text1'], entity['Text2'], entity['Text3'], entity['Text4']])
- if msg.strip():
- # convert the blockID coordinates from local chunk
- # coordinates to global world coordinates
- newPOI = dict(type="sign",
- x= entity['x'],
- y= entity['y'],
- z= entity['z'],
- msg=msg,
- chunk= (self.chunkX, self.chunkY),
- )
- if self.queue:
- self.queue.put(["newpoi", newPOI])
-
-
- # check to see if there are any signs in the persistentData list that are from this chunk.
- # if so, remove them from the persistentData list (since they're have been added to the world.POI
- # list above.
- if self.queue:
- self.queue.put(['removePOI', (self.chunkX, self.chunkY)])
-
- return img
-
-# Render 3 blending masks for lighting
-# first is top (+Z), second is left (-X), third is right (+Y)
-def generate_facemasks():
- white = Image.new("L", (24,24), 255)
-
- top = Image.new("L", (24,24), 0)
- left = Image.new("L", (24,24), 0)
- whole = Image.new("L", (24,24), 0)
-
- tex = textures.Textures()
- toppart = tex.transform_image_top(white)
- leftpart = tex.transform_image_side(white)
-
- # using the real PIL paste here (not alpha_over) because there is
- # no alpha channel (and it's mode "L")
- top.paste(toppart, (0,0))
- left.paste(leftpart, (0,6))
- right = left.transpose(Image.FLIP_LEFT_RIGHT)
-
- # Manually touch up 6 pixels that leave a gap, like in
- # textures._build_block()
- for x,y in [(13,23), (17,21), (21,19)]:
- right.putpixel((x,y), 255)
- for x,y in [(3,4), (7,2), (11,0)]:
- top.putpixel((x,y), 255)
-
- # special fix for chunk boundary stipple
- for x,y in [(13,11), (17,9), (21,7)]:
- right.putpixel((x,y), 0)
-
- return (top, left, right)
-facemasks = generate_facemasks()
-black_color = Image.new("RGB", (24,24), (0,0,0))
-white_color = Image.new("RGB", (24,24), (255,255,255))
-
-# Render 128 different color images for color coded depth blending in cave mode
-def generate_depthcolors():
- depth_colors = []
- r = 255
- g = 0
- b = 0
- for z in range(128):
- depth_colors.append(r)
- depth_colors.append(g)
- depth_colors.append(b)
-
- if z < 32:
- g += 7
- elif z < 64:
- r -= 7
- elif z < 96:
- b += 7
- else:
- g -= 7
-
- return depth_colors
-depth_colors = generate_depthcolors()
diff --git a/overviewer_core/quadtree.py b/overviewer_core/quadtree.py
deleted file mode 100644
index b42b606..0000000
--- a/overviewer_core/quadtree.py
+++ /dev/null
@@ -1,942 +0,0 @@
-# This file is part of the Minecraft Overviewer.
-#
-# Minecraft Overviewer is free software: you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or (at
-# your option) any later version.
-#
-# Minecraft Overviewer is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
-# Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with the Overviewer. If not, see .
-
-import itertools
-import os
-import os.path
-import functools
-import re
-import shutil
-import logging
-import stat
-import errno
-import time
-import random
-
-from PIL import Image
-
-from . import chunk
-from .optimizeimages import optimize_image
-from c_overviewer import get_render_mode_inheritance
-import util
-
-
-"""
-This module has routines related to generating a quadtree of tiles
-
-"""
-
-def iterate_base4(d):
- """Iterates over a base 4 number with d digits"""
- return itertools.product(xrange(4), repeat=d)
-
-class QuadtreeGen(object):
- def __init__(self, regionobj, destdir, bgcolor="#1A1A1A", depth=None, tiledir=None, forcerender=False, imgformat='png', imgquality=95, optimizeimg=None, rendermode="normal", rerender_prob=0.0):
- """Generates a quadtree from the world given into the
- given dest directory
-
- worldobj is a world.WorldRenderer object that has already been processed
-
- If depth is given, it overrides the calculated value. Otherwise, the
- minimum depth that contains all chunks is calculated and used.
-
- """
- self.forcerender = forcerender
- self.rerender_probability = rerender_prob
- self.imgformat = imgformat
- self.imgquality = imgquality
- self.optimizeimg = optimizeimg
- self.bgcolor = bgcolor
- self.rendermode = rendermode
- self.regionobj = regionobj
-
- # force png renderformat if we're using an overlay mode
- if 'overlay' in get_render_mode_inheritance(rendermode):
- self.imgformat = "png"
-
- # Make the destination dir
- if not os.path.exists(destdir):
- os.makedirs(os.path.abspath(destdir))
- if tiledir is None:
- tiledir = rendermode
- self.tiledir = tiledir
-
- if depth is None:
- # Determine quadtree depth (midpoint is always 0,0)
- for p in xrange(33):
- # Will 2^p tiles wide and high suffice?
-
- # X has twice as many chunks as tiles, then halved since this is a
- # radius
- xradius = 2**p
- # Y has 4 times as many chunks as tiles, then halved since this is
- # a radius
- yradius = 2*2**p
- if xradius >= self.regionobj.maxcol and -xradius <= self.regionobj.mincol and \
- yradius >= self.regionobj.maxrow and -yradius <= self.regionobj.minrow:
- break
-
- if p < 15:
- self.p = p
- else:
- raise ValueError("Your map is waaaay too big! Use the 'zoom' option in 'settings.py'. Overviewer is estimating %i zoom levels, but you probably want less." % (p,))
-
- else:
- self.p = depth
- xradius = 2**depth
- yradius = 2*2**depth
-
- print "depth:", self.p
-
- # Make new row and column ranges
- self.mincol = -xradius
- self.maxcol = xradius
- self.minrow = -yradius
- self.maxrow = yradius
-
- self.destdir = destdir
- self.full_tiledir = os.path.join(destdir, tiledir)
-
- # Check now if full_tiledir doesn't exist. If not, we can trigger
- # --fullrender, which skips some mtime checks to speed things up
- if not os.path.exists(self.full_tiledir):
- logging.debug("%s doesn't exist, doing a full render", self.full_tiledir)
- self.forcerender = True
-
- def __repr__(self):
- return "" % self.rendermode
-
- def _get_cur_depth(self):
- """How deep is the quadtree currently in the destdir? This glances in
- config.js to see what maxZoom is set to.
- returns -1 if it couldn't be detected, file not found, or nothing in
- config.js matched
- """
- indexfile = os.path.join(self.destdir, "overviewerConfig.js")
- if not os.path.exists(indexfile):
- return -1
- matcher = re.compile(r"zoomLevels(?:\'|\")\s*:\s*(\d+)")
- p = -1
- for line in open(indexfile, "r"):
- res = matcher.search(line)
- if res:
- p = int(res.group(1))
- break
- return p
-
- def _increase_depth(self):
- """Moves existing tiles into place for a larger tree"""
- getpath = functools.partial(os.path.join, self.full_tiledir)
-
- # At top level of the tree:
- # quadrant 0 is now 0/3
- # 1 is now 1/2
- # 2 is now 2/1
- # 3 is now 3/0
- # then all that needs to be done is to regenerate the new top level
- for dirnum in range(4):
- newnum = (3,2,1,0)[dirnum]
-
- newdir = "new" + str(dirnum)
- newdirpath = getpath(newdir)
-
- files = [str(dirnum)+"."+self.imgformat, str(dirnum)]
- newfiles = [str(newnum)+"."+self.imgformat, str(newnum)]
-
- os.mkdir(newdirpath)
- for f, newf in zip(files, newfiles):
- p = getpath(f)
- if os.path.exists(p):
- os.rename(p, getpath(newdir, newf))
- os.rename(newdirpath, getpath(str(dirnum)))
-
- def _decrease_depth(self):
- """If the map size decreases, or perhaps the user has a depth override
- in effect, re-arrange existing tiles for a smaller tree"""
- getpath = functools.partial(os.path.join, self.full_tiledir)
-
- # quadrant 0/3 goes to 0
- # 1/2 goes to 1
- # 2/1 goes to 2
- # 3/0 goes to 3
- # Just worry about the directories here, the files at the top two
- # levels are cheap enough to replace
- if os.path.exists(getpath("0", "3")):
- os.rename(getpath("0", "3"), getpath("new0"))
- shutil.rmtree(getpath("0"))
- os.rename(getpath("new0"), getpath("0"))
-
- if os.path.exists(getpath("1", "2")):
- os.rename(getpath("1", "2"), getpath("new1"))
- shutil.rmtree(getpath("1"))
- os.rename(getpath("new1"), getpath("1"))
-
- if os.path.exists(getpath("2", "1")):
- os.rename(getpath("2", "1"), getpath("new2"))
- shutil.rmtree(getpath("2"))
- os.rename(getpath("new2"), getpath("2"))
-
- if os.path.exists(getpath("3", "0")):
- os.rename(getpath("3", "0"), getpath("new3"))
- shutil.rmtree(getpath("3"))
- os.rename(getpath("new3"), getpath("3"))
-
- # Delete the files in the top directory to make sure they get re-created.
- files = [str(num)+"."+self.imgformat for num in xrange(4)] + ["base." + self.imgformat]
- for f in files:
- try:
- os.unlink(getpath(f))
- except OSError, e:
- pass # doesn't exist maybe?
-
- def check_depth(self):
- """Ensure the current quadtree is the correct depth. If it's not,
- employ some simple re-arranging of tiles to save on computation.
-
- """
-
- # If the tile directory has been deleted somehow, then don't bother
- # trying to rearrange things. It wouldn't do any good and would error
- # out anyways.
- if not os.path.exists(self.full_tiledir):
- return
-
- curdepth = self._get_cur_depth()
- if curdepth != -1:
- if self.p > curdepth:
- logging.warning("Your map seems to have expanded beyond its previous bounds.")
- logging.warning( "Doing some tile re-arrangements... just a sec...")
- for _ in xrange(self.p-curdepth):
- self._increase_depth()
- elif self.p < curdepth:
- logging.warning("Your map seems to have shrunk. Re-arranging tiles, just a sec...")
- for _ in xrange(curdepth - self.p):
- self._decrease_depth()
-
-
- def get_chunks_for_tile(self, tile):
- """Get chunks that are relevant to the given tile
-
- Returns a list of chunks where each item is
- (col, row, chunkx, chunky, regionobj)
- """
-
- chunklist = []
-
- unconvert_coords = util.unconvert_coords
- get_region = self.regionobj.regionfiles.get
-
- # Cached region object for consecutive iterations
- regionx = None
- regiony = None
- c = None
- mcr = None
-
- rowstart = tile.row
- rowend = rowstart+4
- colstart = tile.col
- colend = colstart+2
-
- # Start 16 rows up from the actual tile's row, since chunks are that tall.
- # Also, every other tile doesn't exist due to how chunks are arranged. See
- # http://docs.overviewer.org/en/latest/design/designdoc/#chunk-addressing
- for row, col in itertools.product(
- xrange(rowstart-16, rowend+1),
- xrange(colstart, colend+1)
- ):
- if row % 2 != col % 2:
- continue
-
- chunkx, chunky = unconvert_coords(col, row)
-
- regionx_ = chunkx//32
- regiony_ = chunky//32
- if regionx_ != regionx or regiony_ != regiony:
- regionx = regionx_
- regiony = regiony_
- _, _, fname, mcr = get_region((regionx, regiony),(None,None,None,None))
-
- if fname is not None and self.regionobj.chunk_exists(chunkx,chunky):
- chunklist.append((col, row, chunkx, chunky, mcr))
-
- return chunklist
-
- def get_compositetiles(self,zoom):
- """Returns the inner tiles at the given zoom level that need to be rendered
-
- """
- for path in iterate_base4(zoom):
- # This image is rendered at(relative to the worker's destdir):
- tilepath = [str(x) for x in path[:-1]]
- tilepath = os.sep.join(tilepath)
- name = str(path[-1])
-
- yield [self,tilepath, name]
-
- def render_compositetile(self, dest, name):
- """
- Renders a tile at os.path.join(dest, name)+".ext" by taking tiles from
- os.path.join(dest, name, "{0,1,2,3}.png")
- """
- imgformat = self.imgformat
- imgpath = os.path.join(dest, name) + "." + imgformat
-
- if name == "base":
- # Special case for the base tile. Its children are in the same
- # directory instead of in a sub-directory
- quadPath = [
- ((0,0),os.path.join(dest, "0." + imgformat)),
- ((192,0),os.path.join(dest, "1." + imgformat)),
- ((0, 192),os.path.join(dest, "2." + imgformat)),
- ((192,192),os.path.join(dest, "3." + imgformat)),
- ]
- else:
- quadPath = [
- ((0,0),os.path.join(dest, name, "0." + imgformat)),
- ((192,0),os.path.join(dest, name, "1." + imgformat)),
- ((0, 192),os.path.join(dest, name, "2." + imgformat)),
- ((192,192),os.path.join(dest, name, "3." + imgformat)),
- ]
-
- #stat the tile, we need to know if it exists and its mtime
- try:
- tile_mtime = os.stat(imgpath)[stat.ST_MTIME]
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
- tile_mtime = None
-
- #check mtimes on each part of the quad, this also checks if they exist
- max_mtime = 0
- needs_rerender = (tile_mtime is None) or self.forcerender
- quadPath_filtered = []
- for path in quadPath:
- try:
- quad_mtime = os.stat(path[1])[stat.ST_MTIME]
- quadPath_filtered.append(path)
- if quad_mtime > tile_mtime:
- needs_rerender = True
- max_mtime = max(max_mtime, quad_mtime)
- except OSError:
- # We need to stat all the quad files, so keep looping
- pass
- # do they all not exist?
- if not quadPath_filtered:
- if tile_mtime is not None:
- os.unlink(imgpath)
- return
- # quit now if we don't need rerender
- if not needs_rerender:
- return
- #logging.debug("writing out compositetile {0}".format(imgpath))
-
- # Create the actual image now
- img = Image.new("RGBA", (384, 384), self.bgcolor)
-
- # we'll use paste (NOT alpha_over) for quadtree generation because
- # this is just straight image stitching, not alpha blending
-
- for path in quadPath_filtered:
- try:
- quad = Image.open(path[1]).resize((192,192), Image.ANTIALIAS)
- img.paste(quad, path[0])
- except Exception, e:
- logging.warning("Couldn't open %s. It may be corrupt. Error was '%s'", path[1], e)
- logging.warning("I'm going to try and delete it. You will need to run the render again")
- try:
- os.unlink(path[1])
- except Exception, e:
- logging.error("I couldn't delete it. You will need to delete it yourself. Error was '%s'", e)
-
- # Save it
- if self.imgformat == 'jpg':
- img.save(imgpath, quality=self.imgquality, subsampling=0)
- else: # png
- img.save(imgpath)
-
- if self.optimizeimg:
- optimize_image(imgpath, self.imgformat, self.optimizeimg)
-
- os.utime(imgpath, (max_mtime, max_mtime))
-
- def render_worldtile(self, tile, check_tile=False):
- """Renders the given tile. All the other relevant information is
- already stored in this quadtree object or in self.world.
-
- This function is typically called in the child process. The tile is
- assumed to need rendering unless the check_tile flag is given.
-
- If check_tile is true, the mtimes of the chunk are compared with the
- mtime of this tile and the tile is conditionally rendered.
-
- The image is rendered and saved to disk in the place this quadtree is
- configured to store images.
-
- If there are no chunks, this tile is not saved. If this is the case but
- the tile exists, it is deleted
-
- There is no return value
- """
-
- # The poi_q (point of interest queue) is a multiprocessing Queue
- # object, and it gets stashed in the world object by the constructor to
- # RenderNode so we can find it right here.
- poi_queue = self.regionobj.poi_q
-
- imgpath = tile.get_filepath(self.full_tiledir, self.imgformat)
-
- # Calculate which chunks are relevant to this tile
- chunks = self.get_chunks_for_tile(tile)
-
- region = self.regionobj
-
- tile_mtime = None
- if check_tile:
- # stat the file, we need to know if it exists and its mtime
- try:
- tile_mtime = os.stat(imgpath)[stat.ST_MTIME]
- except OSError, e:
- # ignore only if the error was "file not found"
- if e.errno != errno.ENOENT:
- raise
-
- if not chunks:
- # No chunks were found in this tile
- if not check_tile:
- logging.warning("%s was requested for render, but no chunks found! This may be a bug", tile)
- try:
- os.unlink(imgpath)
- except OSError, e:
- # ignore only if the error was "file not found"
- if e.errno != errno.ENOENT:
- raise
- else:
- logging.debug("%s deleted", tile)
- return
-
- # Create the directory if not exists
- dirdest = os.path.dirname(imgpath)
- if not os.path.exists(dirdest):
- try:
- os.makedirs(dirdest)
- except OSError, e:
- # Ignore errno EEXIST: file exists. Due to a race condition,
- # two processes could conceivably try and create the same
- # directory at the same time
- if e.errno != errno.EEXIST:
- raise
-
- # Compute the maximum mtime of all the chunks that go into this tile.
- # At the end, we'll set the tile's mtime to this value.
- max_chunk_mtime = 0
- for col,row,chunkx,chunky,region in chunks:
- max_chunk_mtime = max(
- max_chunk_mtime,
- region.get_chunk_timestamp(chunkx, chunky)
- )
-
- if check_tile:
- # Look at all the chunks that touch this tile and their mtimes to
- # determine if this tile actually needs rendering
- try:
- needs_rerender = False
- get_region_mtime = world.get_region_mtime
-
- for col, row, chunkx, chunky, region in chunks:
-
- # don't even check if it's not in the regionlist
- if self.world.regionlist and os.path.abspath(region._filename) not in self.world.regionlist:
- continue
-
- # bail early if forcerender is set
- if self.forcerender:
- needs_rerender = True
- break
-
- # checking chunk mtime
- if region.get_chunk_timestamp(chunkx, chunky) > tile_mtime:
- needs_rerender = True
- break
-
- # stochastic render check
- if not needs_rerender and self.rerender_probability > 0.0 and random.random() < self.rerender_probability:
- needs_rerender = True
-
- # if after all that, we don't need a rerender, return
- if not needs_rerender:
- return
- except OSError:
- # couldn't get tile mtime, skip check and assume it does
- pass
-
- # We have all the necessary info and this tile has passed the checks
- # and should be rendered. So do it!
-
- #logging.debug("writing out worldtile {0}".format(imgpath))
-
- # Compile this image
- tileimg = Image.new("RGBA", (384, 384), self.bgcolor)
-
- rendermode = self.rendermode
- colstart = tile.col
- rowstart = tile.row
- # col colstart will get drawn on the image starting at x coordinates -(384/2)
- # row rowstart will get drawn on the image starting at y coordinates -(192/2)
- for col, row, chunkx, chunky, region in chunks:
- xpos = -192 + (col-colstart)*192
- ypos = -96 + (row-rowstart)*96
-
- # draw the chunk!
- try:
- a = chunk.ChunkRenderer((chunkx, chunky), self.regionobj, rendermode, poi_queue)
- a.chunk_render(tileimg, xpos, ypos, None)
- except chunk.ChunkCorrupt:
- # an error was already printed
- pass
-
- # Save them
- if self.imgformat == 'jpg':
- tileimg.save(imgpath, quality=self.imgquality, subsampling=0)
- else: # png
- tileimg.save(imgpath)
- #Add tile to list of rendered tiles
- poi_queue.put(['rendered',imgpath])
-
- if self.optimizeimg:
- optimize_image(imgpath, self.imgformat, self.optimizeimg)
-
- os.utime(imgpath, (max_chunk_mtime, max_chunk_mtime))
-
- def scan_chunks(self):
- """Scans the chunks of the world object and generates a dirty tree
- object holding the tiles that need to be rendered.
-
- Checks mtimes of tiles in the process, unless forcerender is set on the
- object, in which case all tiles that exist are marked as dirty.
-
- """
-
- depth = self.p
-
- dirty = DirtyTiles(depth)
-
- logging.debug(" Scanning chunks for tiles that need rendering...")
- chunkcount = 0
- stime = time.time()
-
- # For each chunk, do this:
- # For each tile that the chunk touches, do this:
- # Compare the last modified time of the chunk and tile. If the
- # tile is older, mark it in a DirtyTiles object as dirty.
- #
- # IDEA: check last render time against mtime of the region to short
- # circuit checking mtimes of all chunks in a region
- for chunkx, chunky, chunkmtime in self.regionobj.iterate_chunks():
- chunkcount += 1
- #if chunkcount % 10000 == 0:
- # logging.info(" %s chunks scanned", chunkcount)
-
- chunkcol, chunkrow = util.convert_coords(chunkx, chunky)
- logging.debug("Looking at chunk %s,%s", chunkcol, chunkrow)
-
- # find tile coordinates
- tilecol = chunkcol - chunkcol % 2
- tilerow = chunkrow - chunkrow % 4
-
- if chunkcol % 2 == 0:
- # This chunk is half-in one column and half-in another column.
- # tilecol is the right one, also do tilecol-2, the left one
- x_tiles = 2
- else:
- x_tiles = 1
-
- # The tile at tilecol,tilerow obviously contains chunk, but so do
- # the next 4 tiles down because chunks are very tall
- for i in xrange(x_tiles):
- for j in xrange(5):
-
- c = tilecol - 2*i
- r = tilerow + 4*j
- # Make sure the tile is in the range according to the given
- # depth. This won't happen unless the user has given -z to
- # render a smaller area of the map than everything
- if (
- c < self.mincol or
- c >= self.maxcol or
- r < self.minrow or
- r >= self.maxrow
- ):
- continue
-
- tile = Tile.compute_path(c, r, depth)
- print tile
-
- if self.forcerender:
- dirty.set_dirty(tile.path)
- continue
-
- # Stochastic check. Since we're scanning by chunks and not
- # by tiles, and the tiles get checked multiple times for
- # each chunk, this is only an approximation. The given
- # probability is for a particular tile that needs
- # rendering, but since a tile gets touched up to 32 times
- # (once for each chunk in it), divide the probability by
- # 32.
- if self.rerender_probability and self.rerender_probability/32 > random.random():
- dirty.set_dirty(tile.path)
- continue
-
- # Check if this tile has already been marked dirty. If so,
- # no need to do any of the below.
- if dirty.query_path(tile.path):
- continue
-
- # Check mtimes and conditionally add tile to dirty set
- tile_path = tile.get_filepath(self.full_tiledir, self.imgformat)
- try:
- tile_mtime = os.stat(tile_path)[stat.ST_MTIME]
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
- tile_mtime = 0
- #logging.debug("tile %s(%s) vs chunk %s,%s (%s)",
- # tile, tile_mtime, chunkcol, chunkrow, chunkmtime)
- if tile_mtime < chunkmtime:
- dirty.set_dirty(tile.path)
- #logging.debug(" Setting tile as dirty. Will render.")
-
- t = int(time.time()-stime)
- logging.debug("Done with scan for '%s'. %s chunks scanned in %s second%s",
- self.rendermode, chunkcount, t,
- "s" if t != 1 else "")
-
- #if logging.getLogger().isEnabledFor(logging.DEBUG):
- # logging.debug(" Counting tiles that need rendering...")
- # tilecount = 0
- # stime = time.time()
- # for _ in dirty.iterate_dirty():
- # tilecount += 1
- # logging.debug(" Done. %s tiles need to be rendered. (count took %s seconds)",
- # tilecount, int(time.time()-stime))
-
- return dirty
-
-
-class DirtyTiles(object):
- """This tree holds which tiles need rendering.
- Each instance is a node, and the root of a subtree.
-
- Each node knows its "level", which corresponds to the zoom level where 0 is
- the inner-most (most zoomed in) tiles.
-
- Instances hold the clean/dirty state of their children. Leaf nodes are
- images and do not physically exist in the tree, level 1 nodes keep track of
- leaf image state. Level 2 nodes keep track of level 1 state, and so fourth.
-
- In attempt to keep things memory efficient, subtrees that are completely
- dirty are collapsed
-
- """
- __slots__ = ("depth", "children")
- def __init__(self, depth):
- """Initialize a new tree with the specified depth. This actually
- initializes a node, which is the root of a subtree, with `depth` levels
- beneath it.
-
- """
- # Stores the depth of the tree according to this node. This is not the
- # depth of this node, but rather the number of levels below this node
- # (including this node).
- self.depth = depth
-
- # the self.children array holds the 4 children of this node. This
- # follows the same quadtree convention as elsewhere: children 0, 1, 2,
- # 3 are the upper-left, upper-right, lower-left, and lower-right
- # respectively
- # Values are:
- # False
- # All children down this subtree are clean
- # True
- # All children down this subtree are dirty
- # A DirtyTiles instance
- # the instance defines which children down that subtree are
- # clean/dirty.
- # A node with depth=1 cannot have a DirtyTiles instance in its
- # children since its leaves are images, not more tree
- self.children = [False] * 4
-
- def set_dirty(self, path):
- """Marks the requested leaf node as "dirty".
-
- Path is an iterable of integers representing the path to the leaf node
- that is requested to be marked as dirty.
-
- """
- path = list(path)
- assert len(path) == self.depth
- path.reverse()
- self._set_dirty_helper(path)
-
- def _set_dirty_helper(self, path):
- """Recursive call for set_dirty()
-
- Expects path to be a list in reversed order
-
- If *all* the nodes below this one are dirty, this function returns
- true. Otherwise, returns None.
-
- """
-
- if self.depth == 1:
- # Base case
- self.children[path[0]] = True
-
- # Check to see if all children are dirty
- if all(self.children):
- return True
- else:
- # Recursive case
-
- childnum = path.pop()
- child = self.children[childnum]
-
- if child == False:
- # Create a new node
- child = self.__class__(self.depth-1)
- child._set_dirty_helper(path)
- self.children[childnum] = child
- elif child == True:
- # Every child is already dirty. Nothing to do.
- return
- else:
- # subtree is mixed clean/dirty. Recurse
- ret = child._set_dirty_helper(path)
- if ret:
- # Child says it's completely dirty, so we can purge the
- # subtree and mark it as dirty. The subtree will be garbage
- # collected when this method exits.
- self.children[childnum] = True
-
- # Since we've marked an entire sub-tree as dirty, we may be
- # able to signal to our parent
- if all(x is True for x in self.children):
- return True
-
- def iterate_dirty(self, level=None):
- """Returns an iterator over every dirty tile in this subtree. Each item
- yielded is a sequence of integers representing the quadtree path to the
- dirty tile. Yielded sequences are of length self.depth.
-
- If level is None, iterates over tiles of the highest level, i.e.
- worldtiles. If level is a value between 0 and the depth of this tree,
- this method iterates over tiles at that level. Zoom level 0 is zoomed
- all the way out, zoom level `depth` is all the way in.
-
- In other words, specifying level causes the tree to be iterated as if
- it was only that depth.
-
- """
- if level is None:
- todepth = 1
- else:
- if not (level > 0 and level <= self.depth):
- raise ValueError("Level parameter must be between 1 and %s" % self.depth)
- todepth = self.depth - level + 1
-
- return (tuple(reversed(rpath)) for rpath in self._iterate_dirty_helper(todepth))
-
- def _iterate_dirty_helper(self, todepth):
- if self.depth == todepth:
- # Base case
- if self.children[0]: yield [0]
- if self.children[1]: yield [1]
- if self.children[2]: yield [2]
- if self.children[3]: yield [3]
-
- else:
- # Higher levels:
- for c, child in enumerate(self.children):
- if child == True:
- # All dirty down this subtree, iterate over every leaf
- for x in iterate_base4(self.depth-todepth):
- x = list(x)
- x.append(c)
- yield x
- elif child != False:
- # Mixed dirty/clean down this subtree, recurse
- for path in child._iterate_dirty_helper(todepth):
- path.append(c)
- yield path
-
- def query_path(self, path):
- """Queries for the state of the given tile in the tree.
-
- Returns False for "clean", True for "dirty"
-
- """
- # Traverse the tree down the given path. If the tree has been
- # collapsed, then just return what the subtree is. Otherwise, if we
- # find the specific DirtyTree requested, return its state using the
- # __nonzero__ call.
- treenode = self
- for pathelement in path:
- treenode = treenode.children[pathelement]
- if not isinstance(treenode, DirtyTiles):
- return treenode
-
- # If the method has not returned at this point, treenode is the
- # requested node, but it is an inner node with possibly mixed state
- # subtrees. If any of the children are True return True. This call
- # relies on the __nonzero__ method
- return bool(treenode)
-
- def __nonzero__(self):
- """Returns the boolean context of this particular node. If any
- descendent of this node is True return True. Otherwise, False.
-
- """
- # Any chilren that are True or are DirtyTiles that evaluate to True
- # IDEA: look at all children for True before recursing
- # Better idea: every node except the root /must/ have a dirty
- # descendent or it wouldn't exist. This assumption is only valid as
- # long as an unset_dirty() method or similar does not exist.
- return any(self.children)
-
- def count(self):
- """Returns the total number of dirty leaf nodes.
-
- """
- # TODO: Make this more efficient (although for even the largest trees,
- # this takes only seconds)
- c = 0
- for _ in self.iterate_dirty():
- c += 1
- return c
-
-class Tile(object):
- """A simple container class that represents a single render-tile.
-
- A render-tile is a tile that is rendered, not a tile composed of other
- tiles (composite-tile).
-
- """
- __slots__ = ("col", "row", "path")
- def __init__(self, col, row, path):
- """Initialize the tile obj with the given parameters. It's probably
- better to use one of the other constructors though
-
- """
- self.col = col
- self.row = row
- self.path = tuple(path)
-
- def __repr__(self):
- return "%s(%r,%r,%r)" % (self.__class__.__name__, self.col, self.row, self.path)
-
- def __eq__(self,other):
- return self.col == other.col and self.row == other.row and tuple(self.path) == tuple(other.path)
-
- def __ne__(self, other):
- return not self == other
-
- def get_filepath(self, tiledir, imgformat):
- """Returns the path to this file given the directory to the tiles
-
- """
- # os.path.join would be the proper way to do this path concatenation,
- # but it is surprisingly slow, probably because it checks each path
- # element if it begins with a slash. Since we know these components are
- # all relative, just concatinate with os.path.sep
- pathcomponents = [tiledir]
- pathcomponents.extend(str(x) for x in self.path)
- path = os.path.sep.join(pathcomponents)
- imgpath = ".".join((path, imgformat))
- return imgpath
-
- @classmethod
- def from_path(cls, path):
- """Constructor that takes a path and computes the col,row address of
- the tile and constructs a new tile object.
-
- """
- path = tuple(path)
-
- depth = len(path)
-
- # Radius of the world in chunk cols/rows
- # (Diameter in X is 2**depth, divided by 2 for a radius, multiplied by
- # 2 for 2 chunks per tile. Similarly for Y)
- xradius = 2**depth
- yradius = 2*2**depth
-
- col = -xradius
- row = -yradius
- xsize = xradius
- ysize = yradius
-
- for p in path:
- if p in (1,3):
- col += xsize
- if p in (2,3):
- row += ysize
- xsize //= 2
- ysize //= 2
-
- return cls(col, row, path)
-
- @classmethod
- def compute_path(cls, col, row, depth):
- """Constructor that takes a col,row of a tile and computes the path.
-
- """
- assert col % 2 == 0
- assert row % 4 == 0
-
- xradius = 2**depth
- yradius = 2*2**depth
-
- colbounds = [-xradius, xradius]
- rowbounds = [-yradius, yradius]
-
- path = []
-
- for level in xrange(depth):
- # Strategy: Find the midpoint of this level, and determine which
- # quadrant this row/col is in. Then set the bounds to that level
- # and repeat
-
- xmid = (colbounds[1] + colbounds[0]) // 2
- ymid = (rowbounds[1] + rowbounds[0]) // 2
-
- if col < xmid:
- if row < ymid:
- path.append(0)
- colbounds[1] = xmid
- rowbounds[1] = ymid
- else:
- path.append(2)
- colbounds[1] = xmid
- rowbounds[0] = ymid
- else:
- if row < ymid:
- path.append(1)
- colbounds[0] = xmid
- rowbounds[1] = ymid
- else:
- path.append(3)
- colbounds[0] = xmid
- rowbounds[0] = ymid
-
- return cls(col, row, path)
diff --git a/overviewer_core/rendernode.py b/overviewer_core/rendernode.py
deleted file mode 100644
index 646a599..0000000
--- a/overviewer_core/rendernode.py
+++ /dev/null
@@ -1,589 +0,0 @@
-# This file is part of the Minecraft Overviewer.
-#
-# Minecraft Overviewer is free software: you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or (at
-# your option) any later version.
-#
-# Minecraft Overviewer is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
-# Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with the Overviewer. If not, see .
-
-from __future__ import division
-import multiprocessing
-import Queue
-import os
-import os.path
-import functools
-import collections
-import logging
-import time
-
-from . import textures
-from . import util
-from . import quadtree
-import c_overviewer
-
-"""
-This module has routines related to distributing the render job to multiple nodes
-
-"""
-
-def catch_keyboardinterrupt(func):
- """Decorator that catches a keyboardinterrupt and raises a real exception
- so that multiprocessing will propagate it properly"""
- @functools.wraps(func)
- def newfunc(*args, **kwargs):
- try:
- return func(*args, **kwargs)
- except KeyboardInterrupt:
- logging.error("Ctrl-C caught!")
- raise Exception("Exiting")
- except:
- import traceback
- traceback.print_exc()
- raise
- return newfunc
-
-child_rendernode = None
-def pool_initializer(rendernode):
- logging.debug("Child process {0}".format(os.getpid()))
- #stash the quadtree objects in a global variable after fork() for windows compat.
- global child_rendernode
- child_rendernode = rendernode
-
- # make sure textures are generated for this process
- # and initialize c_overviewer
- textures.generate(path=rendernode.options.get('textures_path', None),
- north_direction=rendernode.options.get('north_direction', None))
- c_overviewer.init_chunk_render()
-
- # setup c_overviewer rendermode customs / options
- for mode in rendernode.builtin_custom_rendermodes:
- c_overviewer.add_custom_render_mode(mode, rendernode.builtin_custom_rendermodes[mode])
- for mode in rendernode.options.custom_rendermodes:
- c_overviewer.add_custom_render_mode(mode, rendernode.options.custom_rendermodes[mode])
- for mode in rendernode.options.rendermode_options:
- c_overviewer.set_render_mode_options(mode, rendernode.options.rendermode_options[mode])
-
- # load biome data in each process, if needed
- for qtree in rendernode.quadtrees:
- ## TODO biome stuffs
- pass
- #if qtree.world.useBiomeData:
- # # make sure we've at least *tried* to load the color arrays in this process...
- # textures.prepareBiomeData(qtree.world.worlddir)
- # if not textures.grasscolor or not textures.foliagecolor:
- # raise Exception("Can't find grasscolor.png or foliagecolor.png")
- # # only load biome data once
- # break
-
-
-class RenderNode(object):
- def __init__(self, quadtrees, options):
- """Distributes the rendering of a list of quadtrees.
-
- This class name is slightly misleading: it does not represent a worker
- process, it coordinates the rendering of the given quadtrees across
- many worker processes.
-
- This class tries not to make any assumptions on whether the given
- quadtrees share the same world or whether the given quadtrees share the
- same depth/structure. However, those assumptions have not been checked;
- quadtrees right now always share the same depth, structure, and
- associated world objects. Beware of mixing and matching quadtrees from
- different worlds!
-
- """
-
- if not len(quadtrees) > 0:
- raise ValueError("there must be at least one quadtree to work on")
-
- self.options = options
- # A list of quadtree.QuadTree objects representing each rendermode
- # requested
- self.quadtrees = quadtrees
- #List of changed tiles
- self.rendered_tiles = []
-
- #bind an index value to the quadtree so we can find it again
- #and figure out which worlds are where
- self.regionsets = []
- for i, q in enumerate(quadtrees):
- q._render_index = i
- i += 1
- if q.regionobj not in self.regionsets:
- self.regionsets.append(q.regionobj)
-
- # queue for receiving interesting events from the renderer
- # (like the discovery of signs!)
- # stash into the world object like we stash an index into the quadtree
- #
- # TODO: Managers spawn a sub-process to manage their objects. If p=1,
- # fall back to a non-managed queue (like Queue.Queue). (While the
- # management process won't do much processing, part of the point of p=1
- # is to ease debugging and profiling by keeping everything in one
- # process/thread)
- manager = multiprocessing.Manager()
- for world in self.regionsets:
- world.poi_q = manager.Queue()
-
- self._last_print_count = 0
- self._last_print_level = 0
- self._last_print_time = None
-
- def print_statusline(self, complete, total, level, unconditional=False):
- if unconditional:
- pass
- elif complete < 100:
- if not complete % 25 == 0:
- return
- elif complete < 1000:
- if not complete % 100 == 0:
- return
- else:
- if not complete % 1000 == 0:
- return
- logging.info("{0}/{1} ({4}%) tiles complete on level {2}/{3}".format(
- complete, total, level, self.max_p, '%.1f' % ( (100.0 * complete) / total) ))
-
- if logging.getLogger().isEnabledFor(logging.DEBUG):
- now = time.time()
- if self._last_print_level == level:
- deltacount = complete - self._last_print_count
- deltat = now - self._last_print_time
- if deltat > 0.03: # prevent very small numbers from producing weird averages. 0.03 chosen empirically
- avg = deltacount / deltat
- logging.debug("%i tiles rendered in %.1f seconds. Avg: %.1f tiles per sec",
- deltacount, deltat, avg)
-
- self._last_print_level = level
- self._last_print_count = complete
- self._last_print_time = now
- elif unconditional:
- self._last_print_level = level
- self._last_print_count = complete
- self._last_print_time = now
-
- def go(self, procs):
- """Renders all tiles"""
-
- # Signal to the quadtrees to scan the chunks and their respective tile
- # directories to find what needs to be rendered. We get from this the
- # total tiles that need to be rendered (at the highest level across all
- # quadtrees) as well as a list of [qtree, DirtyTiles object]
- total_worldtiles, dirty_list = self._get_dirty_tiles(procs)
-
- # Create a pool
- logging.debug("Parent process {0}".format(os.getpid()))
- if procs == 1:
- pool = FakePool()
- pool_initializer(self)
- else:
- pool_initializer(self)
- pool = multiprocessing.Pool(processes=procs,initializer=pool_initializer,initargs=(self,))
-
- #warm up the pool so it reports all the worker id's
- if logging.getLogger().level >= 10:
- pool.map(bool,xrange(multiprocessing.cpu_count()),1)
- else:
- pool.map_async(bool,xrange(multiprocessing.cpu_count()),1)
-
- # The list of quadtrees. There is 1 quadtree object per rendermode
- # requested
- quadtrees = self.quadtrees
-
- # Find the max zoom level (max_p). Even though each quadtree will
- # always have the same zoom level with the current implementation, this
- # bit of code does not make that assumption.
- # max_p is stored in the instance so self.print_statusline can see it
- max_p = 0
- for q in quadtrees:
- if q.p > max_p:
- max_p = q.p
- self.max_p = max_p
-
- # Set a reasonable batch size. Groups of tiles are sent to workers in
- # batches this large. It should be a multiple of the number of
- # quadtrees so that each worker gets corresponding tiles from each
- # quadtree in the typical case.
- batch_size = 4*len(quadtrees)
- while batch_size < 10:
- batch_size *= 2
- logging.debug("Will push tiles to worker processes in batches of %s", batch_size)
-
- # The next sections of code render the highest zoom level of tiles. The
- # section after render the other levels.
- logging.info("")
- logging.info("Rendering highest zoom level of tiles now.")
- logging.info("Rendering {0} rendermode{1}".format(len(quadtrees),'s' if len(quadtrees) > 1 else '' ))
- logging.info("Started {0} worker process{1}".format(
- procs, "es" if procs != 1 else ""))
- logging.info("There are {0} tiles to render at this level".format(total_worldtiles))
- logging.info("There are {0} total levels".format(self.max_p))
-
- # results is a queue of multiprocessing.AsyncResult objects. They are
- # appended to the end and held in the queue until they are pop'd and
- # the results collected.
- # complete holds the tally of the number of tiles rendered. Each
- # results object returns the number of tiles rendered and is
- # accumulated in complete
- results = collections.deque()
- complete = 0
-
- # Iterate over _apply_render_worldtiles(). That generator method
- # dispatches batches of tiles to the workers and yields results
- # objects. multiprocessing.AsyncResult objects are lazy objects that
- # are used to access the values returned by the worker's function,
- # which in this case, is render_worldtile_batch()
- timestamp = time.time()
-
- if total_worldtiles > 0:
- self.print_statusline(0, total_worldtiles, 1, True)
-
- for result in self._apply_render_worldtiles(dirty_list, pool, batch_size):
- results.append(result)
-
- # The results objects are lazy. The workers will process an item in
- # the pool when they get to it, and when we call result.get() it
- # blocks until the result is ready. We dont' want to add *all* the
- # tiles to the pool becuse we'd have to hold every result object in
- # memory. So we add a few batches to the pool / result objects to
- # the results queue, then drain the results queue, and repeat.
-
- # every second drain some of the queue
- timestamp2 = time.time()
- if timestamp2 >= timestamp + 1:
- timestamp = timestamp2
- count_to_remove = (1000//batch_size)
-
- # If there are less than count_to_remove items in the results
- # queue, drain the point of interest queue and count_to_remove
- # items from the results queue
- if count_to_remove < len(results):
- # Drain the point of interest queue for each world
- for world in self.worlds:
- try:
- while (1):
- # an exception will break us out of this loop
- item = world.poi_q.get(block=False)
- if item[0] == "newpoi":
- if item[1] not in world.POI:
- #print "got an item from the queue!"
- world.POI.append(item[1])
- elif item[0] == "removePOI":
- world.persistentData['POI'] = filter(
- lambda x: x['chunk'] != item[1],
- world.persistentData['POI']
- )
-
- elif item[0] == "rendered":
- self.rendered_tiles.append(item[1])
-
- except Queue.Empty:
- pass
- # Now drain the results queue. results has more than
- # count_to_remove items in it (as checked above)
- while count_to_remove > 0:
- count_to_remove -= 1
- complete += results.popleft().get()
- self.print_statusline(complete, total_worldtiles, 1)
-
- # If the results queue is getting too big, drain all but
- # 500//batch_size items from it
- if len(results) > (10000//batch_size):
- # Empty the queue before adding any more, so that memory
- # required has an upper bound
- while len(results) > (500//batch_size):
- complete += results.popleft().get()
- self.print_statusline(complete, total_worldtiles, 1)
-
- # Loop back to the top, add more items to the queue, and repeat
-
- # Added all there is to add to the workers. Wait for the rest of the
- # results to come in before continuing
- while len(results) > 0:
- complete += results.popleft().get()
- self.print_statusline(complete, total_worldtiles, 1)
-
- # Now drain the point of interest queues for each world
- for world in self.worlds:
- try:
- while (1):
- # an exception will break us out of this loop
- item = world.poi_q.get(block=False)
- if item[0] == "newpoi":
- if item[1] not in world.POI:
- #print "got an item from the queue!"
- world.POI.append(item[1])
- elif item[0] == "removePOI":
- world.persistentData['POI'] = filter(lambda x: x['chunk'] != item[1], world.persistentData['POI'])
- elif item[0] == "rendered":
- self.rendered_tiles.append(item[1])
-
- except Queue.Empty:
- pass
-
- # Print the final status line almost unconditionally
- if total_worldtiles > 0:
- self.print_statusline(complete, total_worldtiles, 1, True)
-
- ##########################################
- # The highest zoom level has been rendered.
- # Now do the lower zoom levels, working our way down to level 1
- for zoom in xrange(self.max_p-1, 0, -1):
- # "level" counts up for the status output
- level = self.max_p - zoom + 1
-
- assert len(results) == 0
-
- # Reset these for this zoom level
- complete = 0
- total = 0
-
- # Count up the total tiles to render at this zoom level
- for q in quadtrees:
- if zoom <= q.p:
- total += 4**zoom
-
- logging.info("Starting level {0}".format(level))
- timestamp = time.time()
-
- self.print_statusline(0, total, level, True)
-
- # Same deal as above. _apply_render_compositetile adds tiles in batch
- # to the worker pool and yields result objects that return the
- # number of tiles rendered.
- #
- # XXX Some quadtrees may not have tiles at this zoom level if we're
- # not assuming they all have the same depth!!
- for result in self._apply_render_compositetile(pool, zoom,batch_size):
- results.append(result)
- # every second drain some of the queue
- timestamp2 = time.time()
- if timestamp2 >= timestamp + 1:
- timestamp = timestamp2
- count_to_remove = (1000//batch_size)
- if count_to_remove < len(results):
- while count_to_remove > 0:
- count_to_remove -= 1
- complete += results.popleft().get()
- self.print_statusline(complete, total, level)
- if len(results) > (10000//batch_size):
- while len(results) > (500//batch_size):
- complete += results.popleft().get()
- self.print_statusline(complete, total, level)
- # Empty the queue
- while len(results) > 0:
- complete += results.popleft().get()
- self.print_statusline(complete, total, level)
-
- self.print_statusline(complete, total, level, True)
-
- logging.info("Done")
-
- pool.close()
- pool.join()
-
- # Do the final one right here:
- for q in quadtrees:
- q.render_compositetile(os.path.join(q.destdir, q.tiledir), "base")
-
- def _get_dirty_tiles(self, procs):
- """Returns two items:
- 1) The total number of tiles needing rendering
- 2) a list of (qtree, DirtyTiles) objects holding which tiles in the
- respective quadtrees need to be rendered
-
- """
- all_dirty = []
- total = 0
- numqtrees = len(self.quadtrees)
- procs = min(procs, numqtrees)
-
- # Create a private pool to do the chunk scanning. I purposfully don't
- # use the same pool as the rendering. The process of chunk scanning
- # seems to take a lot of memory. Even though the final tree only takes
- # a few megabytes at most, I suspect memory fragmentation causes the
- # process to take much more memory than that during the scanning
- # process. Since we use a private pool just for this purpose, the trees
- # are piped back to the master process and the fragmented
- # memory-hogging processes exit, returning that extra memory to the OS.
- if procs == 1:
- pool = FakePool()
- else:
- pool = multiprocessing.Pool(processes=procs)
-
- logging.info("Scanning chunks and determining tiles to update for each rendermode requested.")
- logging.info("Doing %s scan%s in %s worker process%s",
- numqtrees, "s" if numqtrees != 1 else "",
- procs, "es" if procs != 1 else "",
- )
-
- # Push all scan jobs to the workers
- results = []
- for q in self.quadtrees:
- r = pool.apply_async(scan_quadtree_chunks, (q,))
- results.append(r)
- pool.close()
-
- # Wait for workers to finish
- for q, r in zip(self.quadtrees, results):
- dirty, numtiles = r.get()
- total += numtiles
- all_dirty.append((q, dirty))
- pool.join() # ought to be redundant
-
- logging.info("%s finished. %s %s to be rendered at the highest level",
- "All scans" if numqtrees != 1 else "Scan",
- total,
- # Probably won't happen, but just in case:
- "total tiles need" if total != 1 else "tile needs",
- )
- return total, all_dirty
-
- def _apply_render_worldtiles(self, tileset, pool,batch_size):
- """This generator method dispatches batches of tiles to the given
- worker pool with the function render_worldtile_batch(). It yields
- multiprocessing.AsyncResult objects. Each result object returns the
- number of tiles rendered.
-
- tileset is a list of (QuadtreeGen object, DirtyTiles object)
-
- Returns an iterator over result objects. Each time a new result is
- requested, a new batch of tasks are added to the pool and a result
- object is returned.
- """
- # Make sure batch_size is a sane value
- if batch_size < len(self.quadtrees):
- batch_size = len(self.quadtrees)
-
- # tileset is a list of (quadtreegen object, dirtytiles tree object)
- # We want: a sequence of iterators that each iterate over
- # [qtree obj, tile obj] items
- def mktileiterable(qtree, dtiletree):
- return ([qtree, quadtree.Tile.from_path(tilepath)] for tilepath in dtiletree.iterate_dirty())
- iterables = []
- for qtree, dtiletree in tileset:
- tileiterable = mktileiterable(qtree, dtiletree)
- iterables.append(tileiterable)
-
- # batch is a list of (qtree index, Tile object). This list is slowly
- # added to and when it reaches size batch_size, it is sent off to the
- # pool.
- batch = []
-
- # roundrobin add tiles to a batch job (thus they should all roughly work on similar chunks)
- for job in util.roundrobin(iterables):
- # fixup so the worker knows which quadtree this is. It's a bit of a
- # hack but it helps not to keep re-sending the qtree objects to the
- # workers.
- job[0] = job[0]._render_index
- # Put this in the batch to be submited to the pool
- batch.append(job)
- if len(batch) >= batch_size:
- yield pool.apply_async(func=render_worldtile_batch, args= [batch])
- batch = []
- if len(batch):
- yield pool.apply_async(func=render_worldtile_batch, args= [batch])
-
- def _apply_render_compositetile(self, pool, zoom,batch_size):
- """Same as _apply_render_worltiles but for the compositetile routine.
- Returns an iterator that yields result objects from tasks that have
- been applied to the pool.
- """
-
- if batch_size < len(self.quadtrees):
- batch_size = len(self.quadtrees)
- batch = []
- jobcount = 0
- # roundrobin add tiles to a batch job (thus they should all roughly work on similar chunks)
- iterables = [q.get_compositetiles(zoom) for q in self.quadtrees if zoom <= q.p]
- for job in util.roundrobin(iterables):
- # fixup so the worker knows which quadtree this is
- job[0] = job[0]._render_index
- # Put this in the batch to be submited to the pool
- batch.append(job)
- jobcount += 1
- if jobcount >= batch_size:
- jobcount = 0
- yield pool.apply_async(func=render_compositetile_batch, args= [batch])
- batch = []
-
- if jobcount > 0:
- yield pool.apply_async(func=render_compositetile_batch, args= [batch])
-
-
-########################################################################################
-# The following three functions are entry points for workers in the multiprocessing pool
-
-@catch_keyboardinterrupt
-def render_worldtile_batch(batch):
- """Main entry point for workers processing a render-tile (also called a
- world tile). Returns the number of tiles rendered, which is the length of
- the batch list passed in
-
- batch should be a list of (qtree index, tile object)
-
- """
- # batch is a list of items to process. Each item is [quadtree_id, Tile object]
- global child_rendernode
- rendernode = child_rendernode
- count = 0
- #logging.debug("{0} working on batch of size {1}".format(os.getpid(),len(batch)))
- for job in batch:
- count += 1
- quadtree = rendernode.quadtrees[job[0]]
- tile = job[1]
-
- quadtree.render_worldtile(tile)
- return count
-
-@catch_keyboardinterrupt
-def render_compositetile_batch(batch):
- global child_rendernode
- rendernode = child_rendernode
- count = 0
- #logging.debug("{0} working on batch of size {1}".format(os.getpid(),len(batch)))
- for job in batch:
- count += 1
- quadtree = rendernode.quadtrees[job[0]]
- dest = quadtree.full_tiledir+os.sep+job[1]
- quadtree.render_compositetile(dest=dest,name=job[2])
- return count
-
-@catch_keyboardinterrupt
-def scan_quadtree_chunks(qtree):
- """The entry point for workers when scanning chunks for tiles needing
- updating. Builds and returns a dirtytiles tree.
-
- Returns two things: the dirtytree from qtree.scan_chunks(), and the total
- from the tree.count() method
-
- """
- logging.debug("Scanning chunks for rendermode '%s'", qtree.rendermode)
- tree = qtree.scan_chunks()
- return tree, tree.count()
-
-class FakeResult(object):
- def __init__(self, res):
- self.res = res
- def get(self):
- return self.res
-class FakePool(object):
- """A fake pool used to render things in sync. Implements a subset of
- multiprocessing.Pool"""
- def apply_async(self, func, args=(), kwargs=None):
- if not kwargs:
- kwargs = {}
- result = func(*args, **kwargs)
- return FakeResult(result)
- def close(self):
- pass
- def join(self):
- pass
-
diff --git a/overviewer_core/world.py b/overviewer_core/world.py
index ffa6878..f84fe08 100644
--- a/overviewer_core/world.py
+++ b/overviewer_core/world.py
@@ -28,7 +28,6 @@ import time
import numpy
-import chunk
import nbt
import textures
import util
@@ -157,27 +156,23 @@ class World(object):
if spawnY > 127:
spawnY = 127
- try:
- ## The filename of this chunk
- chunkFile = self.get_region_path(chunkX, chunkY)
- if chunkFile is not None:
- data = nbt.load_from_region(chunkFile, chunkX, chunkY)
- if data is not None:
- level = data[1]['Level']
- blockArray = numpy.frombuffer(level['Blocks'], dtype=numpy.uint8).reshape((16,16,128))
-
- ## The block for spawn *within* the chunk
- inChunkX = spawnX - (chunkX*16)
- inChunkZ = spawnZ - (chunkY*16)
-
- ## find the first air block
- while (blockArray[inChunkX, inChunkZ, spawnY] != 0):
- spawnY += 1
- if spawnY == 128:
- break
- except chunk.ChunkCorrupt:
- #ignore corrupt spawn, and continue
- pass
+ ## The filename of this chunk
+ chunkFile = self.get_region_path(chunkX, chunkY)
+ if chunkFile is not None:
+ data = nbt.load_from_region(chunkFile, chunkX, chunkY)
+ if data is not None:
+ level = data[1]['Level']
+ blockArray = numpy.frombuffer(level['Blocks'], dtype=numpy.uint8).reshape((16,16,128))
+
+ ## The block for spawn *within* the chunk
+ inChunkX = spawnX - (chunkX*16)
+ inChunkZ = spawnZ - (chunkY*16)
+
+ ## find the first air block
+ while (blockArray[inChunkX, inChunkZ, spawnY] != 0):
+ spawnY += 1
+ if spawnY == 128:
+ break
self.POI.append( dict(x=disp_spawnX, y=spawnY, z=disp_spawnZ,
msg="Spawn", type="spawn", chunk=(chunkX, chunkY)))
self.spawn = (disp_spawnX, spawnY, disp_spawnZ)