0

Initial work on generating markers.js from signposts

Details:
 * A queue object is passed to all renderers, allowing each process to
   avoid using shared memory when recording signpost data.
 * New overviewer.dat file that stores persistent data between runs.
   Currently used to store information on signs.  markers.js is
   generated by merging the stored POI list with the newly generated POI
   list.
 * POIs are tagged with their type (e.g. "spawn" or "sign").  This
   should be useful if different types of POIs needs to be
   handled/displayed differently

Known bugs:
 * If you delete the last sign in a chunk, it won't be removed from
   markers.js
This commit is contained in:
Andrew Chin
2010-10-20 22:11:34 -04:00
parent 32ba6c4424
commit cb363df3cd
3 changed files with 105 additions and 9 deletions

View File

@@ -81,6 +81,11 @@ def get_blockdata_array(level):
in a similar manner to skylight data""" in a similar manner to skylight data"""
return numpy.frombuffer(level['Data'], dtype=numpy.uint8).reshape((16,16,64)) return numpy.frombuffer(level['Data'], dtype=numpy.uint8).reshape((16,16,64))
def get_tileentity_data(level):
"""Returns the TileEntities TAG_List from chunk dat file"""
data = level['TileEntities']
return data
def iterate_chunkblocks(xoff,yoff): def iterate_chunkblocks(xoff,yoff):
"""Iterates over the 16x16x128 blocks of a chunk in rendering order. """Iterates over the 16x16x128 blocks of a chunk in rendering order.
Yields (x,y,z,imgx,imgy) Yields (x,y,z,imgx,imgy)
@@ -100,12 +105,12 @@ def iterate_chunkblocks(xoff,yoff):
transparent_blocks = set([0, 6, 8, 9, 18, 20, 37, 38, 39, 40, 44, 50, 51, 52, 53, transparent_blocks = set([0, 6, 8, 9, 18, 20, 37, 38, 39, 40, 44, 50, 51, 52, 53,
59, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 81, 83, 85]) 59, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 81, 83, 85])
def render_and_save(chunkfile, cachedir, worldobj, cave=False): def render_and_save(chunkfile, cachedir, worldobj, cave=False, queue=None):
"""Used as the entry point for the multiprocessing workers (since processes """Used as the entry point for the multiprocessing workers (since processes
can't target bound methods) or to easily render and save one chunk can't target bound methods) or to easily render and save one chunk
Returns the image file location""" Returns the image file location"""
a = ChunkRenderer(chunkfile, cachedir, worldobj) a = ChunkRenderer(chunkfile, cachedir, worldobj, queue)
try: try:
return a.render_and_save(cave) return a.render_and_save(cave)
except ChunkCorrupt: except ChunkCorrupt:
@@ -128,21 +133,29 @@ class ChunkCorrupt(Exception):
pass pass
class ChunkRenderer(object): class ChunkRenderer(object):
def __init__(self, chunkfile, cachedir, worldobj): def __init__(self, chunkfile, cachedir, worldobj, queue):
"""Make a new chunk renderer for the given chunkfile. """Make a new chunk renderer for the given chunkfile.
chunkfile should be a full path to the .dat file to process chunkfile should be a full path to the .dat file to process
cachedir is a directory to save the resulting chunk images to cachedir is a directory to save the resulting chunk images to
""" """
self.queue = queue
if not os.path.exists(chunkfile): if not os.path.exists(chunkfile):
raise ValueError("Could not find chunkfile") raise ValueError("Could not find chunkfile")
self.chunkfile = chunkfile self.chunkfile = chunkfile
destdir, filename = os.path.split(self.chunkfile) destdir, filename = os.path.split(self.chunkfile)
filename_split = filename.split(".")
chunkcoords = filename_split[1:3]
chunkcoords = filename.split(".")[1:3]
self.coords = map(world.base36decode, chunkcoords) self.coords = map(world.base36decode, chunkcoords)
self.blockid = ".".join(chunkcoords) self.blockid = ".".join(chunkcoords)
self.world = worldobj
# chunk coordinates (useful to converting local block coords to
# global block coords)
self.chunkX = int(filename_split[1], base=36)
self.chunkY = int(filename_split[2], base=36)
self.world = worldobj
# Cachedir here is the base directory of the caches. We need to go 2 # Cachedir here is the base directory of the caches. We need to go 2
# levels deeper according to the chunk file. Get the last 2 components # levels deeper according to the chunk file. Get the last 2 components
# of destdir and use that # of destdir and use that
@@ -474,6 +487,8 @@ class ChunkRenderer(object):
# Odd elements get the upper 4 bits # Odd elements get the upper 4 bits
blockData_expanded[:,:,1::2] = blockData >> 4 blockData_expanded[:,:,1::2] = blockData >> 4
tileEntities = get_tileentity_data(self.level)
# Each block is 24x24 # Each block is 24x24
# The next block on the X axis adds 12px to x and subtracts 6px from y in the image # The next block on the X axis adds 12px to x and subtracts 6px from y in the image
@@ -504,6 +519,33 @@ class ChunkRenderer(object):
else: else:
t = textures.blockmap[blockid] t = textures.blockmap[blockid]
# see if we want to do anything else with this chunk
if blockid in (63, 68): # signs
# find the sign text from the TileEntities list
print "Found a sign!"
for entity in tileEntities:
if entity['id'] == 'Sign':
print "adding to POI list"
# TODO assert that the x,y,z of this entity matches
# the x,y,z of this block
# convert the blockID coordinates from local chunk
# coordinates to global world coordinates
newPOI = dict(type="sign",
x= x+(self.chunkX*16),
y= z,
z= y+(self.chunkY*16),
msg="%s\n%s\n%s\n%s" %
(entity['Text1'], entity['Text2'], entity['Text3'], entity['Text4']),
chunk= (self.chunkX, self.chunkY),
)
print "new POI: %s" % newPOI
self.queue.put(["newpoi", newPOI])
break
if not t: if not t:
continue continue

View File

@@ -25,6 +25,7 @@ import collections
import json import json
import logging import logging
import util import util
import cPickle
from PIL import Image from PIL import Image
@@ -143,13 +144,34 @@ class QuadtreeGen(object):
if not os.path.exists(tileDir): os.mkdir(tileDir) if not os.path.exists(tileDir): os.mkdir(tileDir)
blank.save(os.path.join(tileDir, "blank."+self.imgformat)) blank.save(os.path.join(tileDir, "blank."+self.imgformat))
if skipjs: if skipjs:
return return
# since we will only discover PointsOfInterest in chunks that need to be
# [re]rendered, POIs like signs in unchanged chunks will not be listed
# in self.world.POI. To make sure we don't remove these from markers.js
# we need to merge self.world.POI with the persistant data in world.PersistentData
#
modifiedChunks = map(lambda x: x['chunk'], filter(lambda x: x['type'] != 'spawn', self.world.POI))
for item in self.world.persistentData['POI']:
# if this previously discovered POI isn't in a modified chunk, keep it
if item['chunk'] not in modifiedChunks and item['type'] != 'spawn':
self.world.POI.append(item)
# else discard it, because self.world.POI will contain it (or not if it
# was deleted)
# write out the default marker table # write out the default marker table
with open(os.path.join(self.destdir, "markers.js"), 'w') as output: with open(os.path.join(self.destdir, "markers.js"), 'w') as output:
output.write("var markerData=%s" % json.dumps(self.world.POI)) output.write("var markerData=%s" % json.dumps(self.world.POI))
# save persistent data
self.world.persistentData['POI'] = self.world.POI
with open(self.world.pickleFile,"wb") as f:
cPickle.dump(self.world.persistentData,f)
# write out the default (empty, but documented) region table # write out the default (empty, but documented) region table
with open(os.path.join(self.destdir, "regions.js"), 'w') as output: with open(os.path.join(self.destdir, "regions.js"), 'w') as output:
output.write('var regionData=[\n') output.write('var regionData=[\n')

View File

@@ -19,6 +19,7 @@ import os.path
import multiprocessing import multiprocessing
import sys import sys
import logging import logging
import cPickle
import numpy import numpy
@@ -105,6 +106,20 @@ class WorldRenderer(object):
# a list of dictionaries, see below for an example # a list of dictionaries, see below for an example
self.POI = [] self.POI = []
# if it exists, open overviewer.dat, and read in the data structure
# info self.persistentData. This dictionary can hold any information
# that may be needed between runs.
# Currently only holds into about POIs (more more details, see quadtree)
self.pickleFile = os.path.join(self.cachedir,"overviewer.dat")
if os.path.exists(self.pickleFile):
with open(self.pickleFile,"rb") as p:
self.persistentData = cPickle.load(p)
else:
# some defaults
self.persistentData = dict(POI=[])
def _get_chunk_renderset(self): def _get_chunk_renderset(self):
"""Returns a set of (col, row) chunks that should be rendered. Returns """Returns a set of (col, row) chunks that should be rendered. Returns
None if all chunks should be rendered""" None if all chunks should be rendered"""
@@ -180,7 +195,8 @@ class WorldRenderer(object):
spawnY += 1 spawnY += 1
self.POI.append( dict(x=spawnX, y=spawnY, z=spawnZ, msg="Spawn")) self.POI.append( dict(x=spawnX, y=spawnY, z=spawnZ,
msg="Spawn", type="spawn", chunk=(inChunkX,inChunkZ)))
def go(self, procs): def go(self, procs):
"""Starts the render. This returns when it is finished""" """Starts the render. This returns when it is finished"""
@@ -242,6 +258,9 @@ class WorldRenderer(object):
inclusion_set = self._get_chunk_renderset() inclusion_set = self._get_chunk_renderset()
results = {} results = {}
manager = multiprocessing.Manager()
q = manager.Queue()
if processes == 1: if processes == 1:
# Skip the multiprocessing stuff # Skip the multiprocessing stuff
logging.debug("Rendering chunks synchronously since you requested 1 process") logging.debug("Rendering chunks synchronously since you requested 1 process")
@@ -254,9 +273,15 @@ class WorldRenderer(object):
results[(col, row)] = imgpath results[(col, row)] = imgpath
continue continue
result = chunk.render_and_save(chunkfile, self.cachedir, self, cave=self.caves) result = chunk.render_and_save(chunkfile, self.cachedir, self, cave=self.caves, queue=q)
results[(col, row)] = result results[(col, row)] = result
if i > 0: if i > 0:
try:
item = q.get(block=False)
if item[0] == "newpoi":
self.POI.append(item[1])
except:
pass
if 1000 % i == 0 or i % 1000 == 0: if 1000 % i == 0 or i % 1000 == 0:
logging.info("{0}/{1} chunks rendered".format(i, len(chunks))) logging.info("{0}/{1} chunks rendered".format(i, len(chunks)))
else: else:
@@ -274,13 +299,20 @@ class WorldRenderer(object):
result = pool.apply_async(chunk.render_and_save, result = pool.apply_async(chunk.render_and_save,
args=(chunkfile,self.cachedir,self), args=(chunkfile,self.cachedir,self),
kwds=dict(cave=self.caves)) kwds=dict(cave=self.caves, queue=q))
asyncresults.append((col, row, result)) asyncresults.append((col, row, result))
pool.close() pool.close()
for i, (col, row, result) in enumerate(asyncresults): for i, (col, row, result) in enumerate(asyncresults):
results[(col, row)] = result.get() results[(col, row)] = result.get()
try:
item = q.get(block=False)
if item[0] == "newpoi":
self.POI.append(item[1])
except:
pass
if i > 0: if i > 0:
if 1000 % i == 0 or i % 1000 == 0: if 1000 % i == 0 or i % 1000 == 0:
logging.info("{0}/{1} chunks rendered".format(i, len(asyncresults))) logging.info("{0}/{1} chunks rendered".format(i, len(asyncresults)))