0

Initial work on generating markers.js from signposts

Details:
 * A queue object is passed to all renderers, allowing each process to
   avoid using shared memory when recording signpost data.
 * New overviewer.dat file that stores persistent data between runs.
   Currently used to store information on signs.  markers.js is
   generated by merging the stored POI list with the newly generated POI
   list.
 * POIs are tagged with their type (e.g. "spawn" or "sign").  This
   should be useful if different types of POIs needs to be
   handled/displayed differently

Known bugs:
 * If you delete the last sign in a chunk, it won't be removed from
   markers.js
This commit is contained in:
Andrew Chin
2010-10-20 22:11:34 -04:00
parent 32ba6c4424
commit cb363df3cd
3 changed files with 105 additions and 9 deletions

View File

@@ -81,6 +81,11 @@ def get_blockdata_array(level):
in a similar manner to skylight data"""
return numpy.frombuffer(level['Data'], dtype=numpy.uint8).reshape((16,16,64))
def get_tileentity_data(level):
"""Returns the TileEntities TAG_List from chunk dat file"""
data = level['TileEntities']
return data
def iterate_chunkblocks(xoff,yoff):
"""Iterates over the 16x16x128 blocks of a chunk in rendering order.
Yields (x,y,z,imgx,imgy)
@@ -100,12 +105,12 @@ def iterate_chunkblocks(xoff,yoff):
transparent_blocks = set([0, 6, 8, 9, 18, 20, 37, 38, 39, 40, 44, 50, 51, 52, 53,
59, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 81, 83, 85])
def render_and_save(chunkfile, cachedir, worldobj, cave=False):
def render_and_save(chunkfile, cachedir, worldobj, cave=False, queue=None):
"""Used as the entry point for the multiprocessing workers (since processes
can't target bound methods) or to easily render and save one chunk
Returns the image file location"""
a = ChunkRenderer(chunkfile, cachedir, worldobj)
a = ChunkRenderer(chunkfile, cachedir, worldobj, queue)
try:
return a.render_and_save(cave)
except ChunkCorrupt:
@@ -128,21 +133,29 @@ class ChunkCorrupt(Exception):
pass
class ChunkRenderer(object):
def __init__(self, chunkfile, cachedir, worldobj):
def __init__(self, chunkfile, cachedir, worldobj, queue):
"""Make a new chunk renderer for the given chunkfile.
chunkfile should be a full path to the .dat file to process
cachedir is a directory to save the resulting chunk images to
"""
self.queue = queue
if not os.path.exists(chunkfile):
raise ValueError("Could not find chunkfile")
self.chunkfile = chunkfile
destdir, filename = os.path.split(self.chunkfile)
filename_split = filename.split(".")
chunkcoords = filename_split[1:3]
chunkcoords = filename.split(".")[1:3]
self.coords = map(world.base36decode, chunkcoords)
self.blockid = ".".join(chunkcoords)
self.world = worldobj
# chunk coordinates (useful to converting local block coords to
# global block coords)
self.chunkX = int(filename_split[1], base=36)
self.chunkY = int(filename_split[2], base=36)
self.world = worldobj
# Cachedir here is the base directory of the caches. We need to go 2
# levels deeper according to the chunk file. Get the last 2 components
# of destdir and use that
@@ -293,7 +306,7 @@ class ChunkRenderer(object):
is up to date, this method doesn't render anything.
"""
blockid = self.blockid
oldimg, oldimg_path = self.find_oldimage(cave)
if oldimg:
@@ -474,6 +487,8 @@ class ChunkRenderer(object):
# Odd elements get the upper 4 bits
blockData_expanded[:,:,1::2] = blockData >> 4
tileEntities = get_tileentity_data(self.level)
# Each block is 24x24
# The next block on the X axis adds 12px to x and subtracts 6px from y in the image
@@ -504,6 +519,33 @@ class ChunkRenderer(object):
else:
t = textures.blockmap[blockid]
# see if we want to do anything else with this chunk
if blockid in (63, 68): # signs
# find the sign text from the TileEntities list
print "Found a sign!"
for entity in tileEntities:
if entity['id'] == 'Sign':
print "adding to POI list"
# TODO assert that the x,y,z of this entity matches
# the x,y,z of this block
# convert the blockID coordinates from local chunk
# coordinates to global world coordinates
newPOI = dict(type="sign",
x= x+(self.chunkX*16),
y= z,
z= y+(self.chunkY*16),
msg="%s\n%s\n%s\n%s" %
(entity['Text1'], entity['Text2'], entity['Text3'], entity['Text4']),
chunk= (self.chunkX, self.chunkY),
)
print "new POI: %s" % newPOI
self.queue.put(["newpoi", newPOI])
break
if not t:
continue

View File

@@ -25,6 +25,7 @@ import collections
import json
import logging
import util
import cPickle
from PIL import Image
@@ -143,12 +144,33 @@ class QuadtreeGen(object):
if not os.path.exists(tileDir): os.mkdir(tileDir)
blank.save(os.path.join(tileDir, "blank."+self.imgformat))
if skipjs:
return
# since we will only discover PointsOfInterest in chunks that need to be
# [re]rendered, POIs like signs in unchanged chunks will not be listed
# in self.world.POI. To make sure we don't remove these from markers.js
# we need to merge self.world.POI with the persistant data in world.PersistentData
#
modifiedChunks = map(lambda x: x['chunk'], filter(lambda x: x['type'] != 'spawn', self.world.POI))
for item in self.world.persistentData['POI']:
# if this previously discovered POI isn't in a modified chunk, keep it
if item['chunk'] not in modifiedChunks and item['type'] != 'spawn':
self.world.POI.append(item)
# else discard it, because self.world.POI will contain it (or not if it
# was deleted)
# write out the default marker table
with open(os.path.join(self.destdir, "markers.js"), 'w') as output:
output.write("var markerData=%s" % json.dumps(self.world.POI))
# save persistent data
self.world.persistentData['POI'] = self.world.POI
with open(self.world.pickleFile,"wb") as f:
cPickle.dump(self.world.persistentData,f)
# write out the default (empty, but documented) region table
with open(os.path.join(self.destdir, "regions.js"), 'w') as output:

View File

@@ -19,6 +19,7 @@ import os.path
import multiprocessing
import sys
import logging
import cPickle
import numpy
@@ -105,6 +106,20 @@ class WorldRenderer(object):
# a list of dictionaries, see below for an example
self.POI = []
# if it exists, open overviewer.dat, and read in the data structure
# info self.persistentData. This dictionary can hold any information
# that may be needed between runs.
# Currently only holds into about POIs (more more details, see quadtree)
self.pickleFile = os.path.join(self.cachedir,"overviewer.dat")
if os.path.exists(self.pickleFile):
with open(self.pickleFile,"rb") as p:
self.persistentData = cPickle.load(p)
else:
# some defaults
self.persistentData = dict(POI=[])
def _get_chunk_renderset(self):
"""Returns a set of (col, row) chunks that should be rendered. Returns
None if all chunks should be rendered"""
@@ -180,7 +195,8 @@ class WorldRenderer(object):
spawnY += 1
self.POI.append( dict(x=spawnX, y=spawnY, z=spawnZ, msg="Spawn"))
self.POI.append( dict(x=spawnX, y=spawnY, z=spawnZ,
msg="Spawn", type="spawn", chunk=(inChunkX,inChunkZ)))
def go(self, procs):
"""Starts the render. This returns when it is finished"""
@@ -242,6 +258,9 @@ class WorldRenderer(object):
inclusion_set = self._get_chunk_renderset()
results = {}
manager = multiprocessing.Manager()
q = manager.Queue()
if processes == 1:
# Skip the multiprocessing stuff
logging.debug("Rendering chunks synchronously since you requested 1 process")
@@ -254,9 +273,15 @@ class WorldRenderer(object):
results[(col, row)] = imgpath
continue
result = chunk.render_and_save(chunkfile, self.cachedir, self, cave=self.caves)
result = chunk.render_and_save(chunkfile, self.cachedir, self, cave=self.caves, queue=q)
results[(col, row)] = result
if i > 0:
try:
item = q.get(block=False)
if item[0] == "newpoi":
self.POI.append(item[1])
except:
pass
if 1000 % i == 0 or i % 1000 == 0:
logging.info("{0}/{1} chunks rendered".format(i, len(chunks)))
else:
@@ -274,13 +299,20 @@ class WorldRenderer(object):
result = pool.apply_async(chunk.render_and_save,
args=(chunkfile,self.cachedir,self),
kwds=dict(cave=self.caves))
kwds=dict(cave=self.caves, queue=q))
asyncresults.append((col, row, result))
pool.close()
for i, (col, row, result) in enumerate(asyncresults):
results[(col, row)] = result.get()
try:
item = q.get(block=False)
if item[0] == "newpoi":
self.POI.append(item[1])
except:
pass
if i > 0:
if 1000 % i == 0 or i % 1000 == 0:
logging.info("{0}/{1} chunks rendered".format(i, len(asyncresults)))