0

Merge branch 'master' into snapshot

This commit is contained in:
Andrew Chin
2014-08-26 11:55:00 -04:00
21 changed files with 871 additions and 348 deletions

View File

@@ -15,12 +15,15 @@ markers.js holds a list of which markerSets are attached to each tileSet
'''
import os
import time
import logging
import json
import sys
import re
import urllib2
import Queue
import multiprocessing
import gzip
from multiprocessing import Process
from multiprocessing import Pool
@@ -30,6 +33,8 @@ from overviewer_core import logger
from overviewer_core import nbt
from overviewer_core import configParser, world
UUID_LOOKUP_URL = 'https://sessionserver.mojang.com/session/minecraft/profile/'
def replaceBads(s):
"Replaces bad characters with good characters!"
bads = [" ", "(", ")"]
@@ -114,25 +119,78 @@ def handleEntities(rset, outputdir, render, rname, config):
logging.info("Done.")
def handlePlayers(rset, render, worldpath):
class PlayerDict(dict):
use_uuid = False
_name = ''
uuid_cache = None # A cache the UUID->profile lookups
@classmethod
def load_cache(cls, outputdir):
cache_file = os.path.join(outputdir, "uuidcache.dat")
pid = multiprocessing.current_process().pid
if os.path.exists(cache_file):
gz = gzip.GzipFile(cache_file)
cls.uuid_cache = json.load(gz)
logging.info("Loaded UUID cache from %r with %d entries", cache_file, len(cls.uuid_cache.keys()))
else:
cls.uuid_cache = {}
logging.info("Initialized an empty UUID cache")
cls.save_cache(outputdir)
@classmethod
def save_cache(cls, outputdir):
cache_file = os.path.join(outputdir, "uuidcache.dat")
gz = gzip.GzipFile(cache_file, "wb")
json.dump(cls.uuid_cache, gz)
logging.info("Wrote UUID cache with %d entries", len(cls.uuid_cache.keys()))
def __getitem__(self, item):
if item == "EntityId":
if not super(PlayerDict, self).has_key("EntityId"):
if self.use_uuid:
super(PlayerDict, self).__setitem__("EntityId", self.get_name_from_uuid())
else:
super(PlayerDict, self).__setitem__("EntityId", self._name)
return super(PlayerDict, self).__getitem__(item)
def get_name_from_uuid(self):
sname = self._name.replace('-','')
try:
profile = PlayerDict.uuid_cache[sname]
return profile['name']
except (KeyError,):
pass
try:
profile = json.loads(urllib2.urlopen(UUID_LOOKUP_URL + sname).read())
if 'name' in profile:
PlayerDict.uuid_cache[sname] = profile
return profile['name']
except (ValueError, urllib2.URLError):
logging.warning("Unable to get player name for UUID %s", self._name)
def handlePlayers(rset, render, worldpath, outputdir):
if not hasattr(rset, "_pois"):
rset._pois = dict(TileEntities=[], Entities=[])
# only handle this region set once
if 'Players' in rset._pois:
return
dimension = None
try:
dimension = {None: 0,
'DIM-1': -1,
'DIM1': 1}[rset.get_type()]
except KeyError, e:
mystdim = re.match(r"^DIM_MYST(\d+)$", e.message) # Dirty hack. Woo!
if mystdim:
dimension = int(mystdim.group(1))
else:
raise
playerdir = os.path.join(worldpath, "players")
if rset.get_type():
dimension = int(re.match(r"^DIM(_MYST)?(-?\d+)$", rset.get_type()).group(2))
else:
dimension = 0
playerdir = os.path.join(worldpath, "playerdata")
useUUIDs = True
if not os.path.isdir(playerdir):
playerdir = os.path.join(worldpath, "players")
useUUIDs = False
if os.path.isdir(playerdir):
playerfiles = os.listdir(playerdir)
playerfiles = [x for x in playerfiles if x.endswith(".dat")]
@@ -143,32 +201,40 @@ def handlePlayers(rset, render, worldpath):
isSinglePlayer = True
rset._pois['Players'] = []
for playerfile in playerfiles:
try:
data = nbt.load(os.path.join(playerdir, playerfile))[1]
data = PlayerDict(nbt.load(os.path.join(playerdir, playerfile))[1])
data.use_uuid = useUUIDs
if isSinglePlayer:
data = data['Data']['Player']
except IOError:
logging.warning("Skipping bad player dat file %r", playerfile)
continue
playername = playerfile.split(".")[0]
if isSinglePlayer:
playername = 'Player'
data._name = playername
if data['Dimension'] == dimension:
# Position at last logout
data['id'] = "Player"
data['EntityId'] = playername
data['x'] = int(data['Pos'][0])
data['y'] = int(data['Pos'][1])
data['z'] = int(data['Pos'][2])
# Time at last logout, calculated from last time the player's file was modified
data['time'] = time.localtime(os.path.getmtime(os.path.join(playerdir, playerfile)))
rset._pois['Players'].append(data)
if "SpawnX" in data and dimension == 0:
# Spawn position (bed or main spawn)
spawn = {"id": "PlayerSpawn",
"EntityId": playername,
"x": data['SpawnX'],
"y": data['SpawnY'],
"z": data['SpawnZ']}
spawn = PlayerDict()
spawn._name = playername
spawn["id"] = "PlayerSpawn"
spawn["x"] = data['SpawnX']
spawn["y"] = data['SpawnY']
spawn["z"] = data['SpawnZ']
rset._pois['Players'].append(spawn)
def handleManual(rset, manualpois):
@@ -220,6 +286,8 @@ def main():
markersets = set()
markers = dict()
PlayerDict.load_cache(destdir)
for rname, render in config['renders'].iteritems():
try:
worldpath = config['worlds'][render['world']]
@@ -259,7 +327,7 @@ def main():
if not options.skipscan:
handleEntities(rset, os.path.join(destdir, rname), render, rname, config)
handlePlayers(rset, render, worldpath)
handlePlayers(rset, render, worldpath, destdir)
handleManual(rset, render['manualpois'])
logging.info("Done handling POIs")
@@ -370,6 +438,8 @@ def main():
markerSetDict[name]['raw'].append(d)
#print markerSetDict
PlayerDict.save_cache(destdir)
with open(os.path.join(destdir, "markersDB.js"), "w") as output:
output.write("var markersDB=")
json.dump(markerSetDict, output, indent=2)

View File

@@ -119,7 +119,7 @@ overviewer.util = {
zoom = overviewer.mapView.options.currentTileSet.get('minZoom');
} else {
zoom = parseInt(zoom);
if (zoom < 0 && zoom + overviewer.mapView.options.currentTileSet.get('maxZoom') >= 0) {
if (zoom < 0) {
// if zoom is negative, treat it as a "zoom out from max"
zoom += overviewer.mapView.options.currentTileSet.get('maxZoom');
} else {
@@ -127,6 +127,13 @@ overviewer.util = {
zoom = overviewer.mapView.options.currentTileSet.get('defaultZoom');
}
}
// clip zoom
if (zoom > overviewer.mapView.options.currentTileSet.get('maxZoom'))
zoom = overviewer.mapView.options.currentTileSet.get('maxZoom');
if (zoom < overviewer.mapView.options.currentTileSet.get('minZoom'))
zoom = overviewer.mapView.options.currentTileSet.get('minZoom');
overviewer.map.setZoom(zoom);
}
@@ -512,9 +519,9 @@ overviewer.util = {
}
if (zoom == currTileset.get('maxZoom')) {
if (zoom >= currTileset.get('maxZoom')) {
zoom = 'max';
} else if (zoom == currTileset.get('minZoom')) {
} else if (zoom <= currTileset.get('minZoom')) {
zoom = 'min';
} else {
// default to (map-update friendly) negative zooms
@@ -556,7 +563,7 @@ overviewer.util = {
zoom = tsetModel.get('minZoom');
} else {
zoom = parseInt(zoom);
if (zoom < 0 && zoom + tsetModel.get('maxZoom') >= 0) {
if (zoom < 0) {
// if zoom is negative, treat it as a "zoom out from max"
zoom += tsetModel.get('maxZoom');
} else {
@@ -565,6 +572,12 @@ overviewer.util = {
}
}
// clip zoom
if (zoom > tsetModel.get('maxZoom'))
zoom = tsetModel.get('maxZoom');
if (zoom < tsetModel.get('minZoom'))
zoom = tsetModel.get('minZoom');
overviewer.map.setCenter(latlngcoords);
overviewer.map.setZoom(zoom);
var locationmarker = new overviewer.views.LocationIconView();

View File

@@ -19,6 +19,7 @@ import tempfile
import shutil
import logging
import stat
import errno
default_caps = {"chmod_works": True, "rename_works": True}
@@ -150,6 +151,20 @@ class FileReplacer(object):
else:
# copy permission bits, if needed
if self.caps.get("chmod_works") and os.path.exists(self.destname):
shutil.copymode(self.destname, self.tmpname)
try:
shutil.copymode(self.destname, self.tmpname)
except OSError, e:
# Ignore errno ENOENT: file does not exist. Due to a race
# condition, two processes could conceivably try and update
# the same temp file at the same time
if e.errno != errno.ENOENT:
raise
# atomic rename into place
os.rename(self.tmpname, self.destname)
try:
os.rename(self.tmpname, self.destname)
except OSError, e:
# Ignore errno ENOENT: file does not exist. Due to a race
# condition, two processes could conceivably try and update
# the same temp file at the same time
if e.errno != errno.ENOENT:
raise

View File

@@ -92,17 +92,50 @@ class LoggingObserver(Observer):
#this is an easy way to make the first update() call print a line
self.last_update = -101
# a fake ProgressBar, for the sake of ETA
class FakePBar(object):
def __init__(self):
self.maxval = None
self.currval = 0
self.finished = False
self.start_time = None
self.seconds_elapsed = 0
def finish(self):
self.update(self.maxval)
def update(self, value):
assert 0 <= value <= self.maxval
self.currval = value
if self.finished:
return False
if not self.start_time:
self.start_time = time.time()
self.seconds_elapsed = time.time() - self.start_time
if value == self.maxval:
self.finished = True
self.fake = FakePBar();
self.eta = progressbar.ETA()
def start(self, max_value):
self.fake.maxval = max_value
super(LoggingObserver, self).start(max_value)
def finish(self):
logging.info("Rendered %d of %d. %d%% complete", self.get_max_value(),
self.get_max_value(), 100.0)
self.fake.finish()
logging.info("Rendered %d of %d. %d%% complete. %s", self.get_max_value(),
self.get_max_value(), 100.0, self.eta.update(self.fake))
super(LoggingObserver, self).finish()
def update(self, current_value):
super(LoggingObserver, self).update(current_value)
self.fake.update(current_value)
if self._need_update():
logging.info("Rendered %d of %d. %d%% complete",
logging.info("Rendered %d of %d. %d%% complete. %s",
self.get_current_value(), self.get_max_value(),
self.get_percentage())
self.get_percentage(), self.eta.update(self.fake))
self.last_update = current_value
return True
return False
@@ -345,7 +378,7 @@ class ServerAnnounceObserver(Observer):
def update(self, current_value):
super(ServerAnnounceObserver, self).update(current_value)
if self._need_update(current_value):
if self._need_update():
self._send_output('Rendered %d of %d tiles, %d%% complete' %
(self.get_current_value(), self.get_max_value(),
self.get_percentage()))

View File

@@ -16,37 +16,117 @@
import os
import subprocess
import shlex
import logging
pngcrush = "pngcrush"
optipng = "optipng"
advdef = "advdef"
class Optimizer:
binaryname = ""
def check_programs(level):
path = os.environ.get("PATH").split(os.pathsep)
def __init__(self):
raise NotImplementedError("I can't let you do that, Dave.")
def optimize(self, img):
raise NotImplementedError("I can't let you do that, Dave.")
def exists_in_path(prog):
result = filter(lambda x: os.path.exists(os.path.join(x, prog)), path)
return len(result) != 0
def fire_and_forget(self, args):
subprocess.check_call(args)
def check_availability(self):
path = os.environ.get("PATH").split(os.pathsep)
def exists_in_path(prog):
result = filter(lambda x: os.path.exists(os.path.join(x, prog)), path)
return len(result) != 0
if (not exists_in_path(self.binaryname)) and (not exists_in_path(self.binaryname + ".exe")):
raise Exception("Optimization program '%s' was not found!" % self.binaryname)
for prog,l in [(pngcrush,1), (advdef,2)]:
if l <= level:
if (not exists_in_path(prog)) and (not exists_in_path(prog + ".exe")):
raise Exception("Optimization prog %s for level %d not found!" % (prog, l))
def is_crusher(self):
"""Should return True if the optimization is lossless, i.e. none of the actual image data will be changed."""
raise NotImplementedError("I'm so abstract I can't even say whether I'm a crusher.")
def optimize_image(imgpath, imgformat, optimizeimg):
if imgformat == 'png':
if optimizeimg >= 1:
# we can't do an atomic replace here because windows is terrible
# so instead, we make temp files, delete the old ones, and rename
# the temp files. go windows!
subprocess.Popen([pngcrush, imgpath, imgpath + ".tmp"],
stderr=subprocess.STDOUT, stdout=subprocess.PIPE).communicate()[0]
os.remove(imgpath)
os.rename(imgpath+".tmp", imgpath)
class NonAtomicOptimizer(Optimizer):
def cleanup(self, img):
os.remove(img)
os.rename(img + ".tmp", img)
if optimizeimg >= 2:
# the "-nc" it's needed to no broke the transparency of tiles
recompress_option = "-z2" if optimizeimg == 2 else "-z4"
subprocess.Popen([advdef, recompress_option,imgpath], stderr=subprocess.STDOUT,
stdout=subprocess.PIPE).communicate()[0]
def fire_and_forget(self, args, img):
subprocess.check_call(args)
self.cleanup(img)
class PNGOptimizer:
def __init__(self):
raise NotImplementedError("I can't let you do that, Dave.")
class JPEGOptimizer:
def __init__(self):
raise NotImplementedError("I can't let you do that, Dave.")
class pngnq(NonAtomicOptimizer, PNGOptimizer):
binaryname = "pngnq"
def __init__(self, sampling=3, dither="n"):
if sampling < 1 or sampling > 10:
raise Exception("Invalid sampling value '%d' for pngnq!" % sampling)
if dither not in ["n", "f"]:
raise Exception("Invalid dither method '%s' for pngnq!" % dither)
self.sampling = sampling
self.dither = dither
def optimize(self, img):
if img.endswith(".tmp"):
extension = ".tmp"
else:
extension = ".png.tmp"
args = [self.binaryname, "-s", str(self.sampling), "-f", "-e", extension, img]
# Workaround for poopbuntu 12.04 which ships an old broken pngnq
if self.dither != "n":
args.insert(1, "-Q")
args.insert(2, self.dither)
NonAtomicOptimizer.fire_and_forget(self, args, img)
def is_crusher(self):
return False
class pngcrush(NonAtomicOptimizer, PNGOptimizer):
binaryname = "pngcrush"
# really can't be bothered to add some interface for all
# the pngcrush options, it sucks anyway
def __init__(self, brute=False):
self.brute = brute
def optimize(self, img):
args = [self.binaryname, img, img + ".tmp"]
if self.brute == True: # Was the user an idiot?
args.insert(1, "-brute")
NonAtomicOptimizer.fire_and_forget(self, args, img)
def is_crusher(self):
return True
class optipng(Optimizer, PNGOptimizer):
binaryname = "optipng"
def __init__(self, olevel=2):
self.olevel = olevel
def optimize(self, img):
Optimizer.fire_and_forget(self, [self.binaryname, "-o" + str(self.olevel), "-quiet", img])
def is_crusher(self):
return True
def optimize_image(imgpath, imgformat, optimizers):
for opt in optimizers:
if imgformat == 'png':
if isinstance(opt, PNGOptimizer):
opt.optimize(imgpath)
elif imgformat == 'jpg':
if isinstance(opt, JPEGOptimizer):
opt.optimize(imgpath)

View File

@@ -46,6 +46,7 @@
from settingsValidators import *
import util
from observer import ProgressBarObserver, LoggingObserver, JSObserver
from optimizeimages import pngnq, optipng, pngcrush
import platform
import sys
@@ -72,7 +73,7 @@ renders = Setting(required=True, default=util.OrderedDict(),
"imgquality": Setting(required=False, validator=validateImgQuality, default=95),
"bgcolor": Setting(required=True, validator=validateBGColor, default="1a1a1a"),
"defaultzoom": Setting(required=True, validator=validateDefaultZoom, default=1),
"optimizeimg": Setting(required=True, validator=validateOptImg, default=0),
"optimizeimg": Setting(required=True, validator=validateOptImg, default=[]),
"nomarkers": Setting(required=False, validator=validateBool, default=None),
"texturepath": Setting(required=False, validator=validateTexturePath, default=None),
"renderchecks": Setting(required=False, validator=validateInt, default=None),

View File

@@ -5,7 +5,9 @@ from collections import namedtuple
import rendermodes
import util
from optimizeimages import Optimizer
from world import UPPER_LEFT, UPPER_RIGHT, LOWER_LEFT, LOWER_RIGHT
import logging
class ValidationException(Exception):
pass
@@ -155,8 +157,30 @@ def validateBGColor(color):
return color
def validateOptImg(opt):
return bool(opt)
def validateOptImg(optimizers):
if isinstance(optimizers, (int, long)):
from optimizeimages import pngcrush
logging.warning("You're using a deprecated definition of optimizeimg. "\
"We'll do what you say for now, but please fix this as soon as possible.")
optimizers = [pngcrush()]
if not isinstance(optimizers, list):
raise ValidationException("What you passed to optimizeimg is not a list. "\
"Make sure you specify them like [foo()], with square brackets.")
if optimizers:
for opt, next_opt in zip(optimizers, optimizers[1:]) + [(optimizers[-1], None)]:
if not isinstance(opt, Optimizer):
raise ValidationException("Invalid Optimizer!")
opt.check_availability()
# Check whether the chaining is somewhat sane
if next_opt:
if opt.is_crusher() and not next_opt.is_crusher():
logging.warning("You're feeding a crushed output into an optimizer that does not crush. "\
"This is most likely pointless, and wastes time.")
return optimizers
def validateTexturePath(path):
# Expand user dir in directories strings
@@ -201,15 +225,23 @@ def validateOutputDir(d):
return expand_path(d)
def validateCrop(value):
if len(value) != 4:
raise ValidationException("The value for the 'crop' setting must be a tuple of length 4")
a, b, c, d = tuple(int(x) for x in value)
if a >= c:
a, c = c, a
if b >= d:
b, d = d, b
return (a, b, c, d)
if not isinstance(value, list):
value = [value]
cropZones = []
for zone in value:
if not isinstance(zone, tuple) or len(zone) != 4:
raise ValidationException("The value for the 'crop' setting must be an array of tuples of length 4")
a, b, c, d = tuple(int(x) for x in zone)
if a >= c:
a, c = c, a
if b >= d:
b, d = d, b
cropZones.append((a, b, c, d))
return cropZones
def validateObserver(observer):
if all(map(lambda m: hasattr(observer, m), ['start', 'add', 'update', 'finish'])):

View File

@@ -161,17 +161,6 @@ class Textures(object):
return None
if verbose: logging.info('search_zip_paths: ' + ', '.join(search_zip_paths))
# we've sucessfully loaded something from here before, so let's quickly try
# this before searching again
if self.jar is not None:
for jarfilename in search_zip_paths:
try:
self.jar.getinfo(jarfilename)
if verbose: logging.info("Found (cached) %s in '%s'", jarfilename, self.jarpath)
return self.jar.open(jarfilename)
except (KeyError, IOError), e:
pass
# A texture path was given on the command line. Search this location
# for the file first.
if self.find_file_local_path:
@@ -227,6 +216,17 @@ class Textures(object):
if verbose: logging.info("Did not find the file in overviewer executable directory")
if verbose: logging.info("Looking for installed minecraft jar files...")
# we've sucessfully loaded something from here before, so let's quickly try
# this before searching again
if self.jar is not None:
for jarfilename in search_zip_paths:
try:
self.jar.getinfo(jarfilename)
if verbose: logging.info("Found (cached) %s in '%s'", jarfilename, self.jarpath)
return self.jar.open(jarfilename)
except (KeyError, IOError), e:
pass
# Find an installed minecraft client jar and look in it for the texture
# file we need.
versiondir = ""
@@ -638,23 +638,23 @@ class Textures(object):
increment = int(round((top[1] / 16.)*12.)) # range increment in the block height in pixels (half texture size)
crop_height = increment
top = top[0]
if side1 != None:
if side1 is not None:
side1 = side1.copy()
ImageDraw.Draw(side1).rectangle((0, 0,16,crop_height),outline=(0,0,0,0),fill=(0,0,0,0))
if side2 != None:
if side2 is not None:
side2 = side2.copy()
ImageDraw.Draw(side2).rectangle((0, 0,16,crop_height),outline=(0,0,0,0),fill=(0,0,0,0))
if side3 != None:
if side3 is not None:
side3 = side3.copy()
ImageDraw.Draw(side3).rectangle((0, 0,16,crop_height),outline=(0,0,0,0),fill=(0,0,0,0))
if side4 != None:
if side4 is not None:
side4 = side4.copy()
ImageDraw.Draw(side4).rectangle((0, 0,16,crop_height),outline=(0,0,0,0),fill=(0,0,0,0))
img = Image.new("RGBA", (24,24), self.bgcolor)
# first back sides
if side1 != None :
if side1 is not None :
side1 = self.transform_image_side(side1)
side1 = side1.transpose(Image.FLIP_LEFT_RIGHT)
@@ -666,7 +666,7 @@ class Textures(object):
alpha_over(img, side1, (0,0), side1)
if side2 != None :
if side2 is not None :
side2 = self.transform_image_side(side2)
# Darken this side.
@@ -676,12 +676,12 @@ class Textures(object):
alpha_over(img, side2, (12,0), side2)
if bottom != None :
if bottom is not None :
bottom = self.transform_image_top(bottom)
alpha_over(img, bottom, (0,12), bottom)
# front sides
if side3 != None :
if side3 is not None :
side3 = self.transform_image_side(side3)
# Darken this side
@@ -691,7 +691,7 @@ class Textures(object):
alpha_over(img, side3, (0,6), side3)
if side4 != None :
if side4 is not None :
side4 = self.transform_image_side(side4)
side4 = side4.transpose(Image.FLIP_LEFT_RIGHT)
@@ -702,7 +702,7 @@ class Textures(object):
alpha_over(img, side4, (12,6), side4)
if top != None :
if top is not None :
top = self.transform_image_top(top)
alpha_over(img, top, (0, increment), top)

View File

@@ -24,6 +24,7 @@ import functools
import time
import errno
import stat
import platform
from collections import namedtuple
from itertools import product, izip, chain
@@ -129,6 +130,14 @@ Bounds = namedtuple("Bounds", ("mincol", "maxcol", "minrow", "maxrow"))
# slowest, but SHOULD be specified if this is the first render because
# the scan will forgo tile stat calls. It's also useful for changing
# texture packs or other options that effect the output.
# 3
# A very special mode. Using this will not actually render
# anything, but will leave this tileset in the resulting
# map. Useful for renders that you want to keep, but not
# update. Since this mode is so simple, it's left out of the
# rest of this discussion.
#
# For 0 our caller has explicitly requested not to check mtimes on disk to
# speed things up. So the mode 0 chunk scan only looks at chunk mtimes and the
@@ -237,6 +246,13 @@ class TileSet(object):
useful for changing texture packs or other options that effect
the output.
3
A very special mode. Using this will not actually render
anything, but will leave this tileset in the resulting
map. Useful for renders that you want to keep, but not
update. Since this mode is so simple, it's left out of the
rest of this discussion.
imgformat
A string indicating the output format. Must be one of 'png' or
'jpeg'
@@ -246,11 +262,7 @@ class TileSet(object):
relevant in jpeg mode.
optimizeimg
an integer indiating optimizations to perform on png outputs. 0
indicates no optimizations. Only relevant in png mode.
1 indicates pngcrush is run on all output images
2 indicates pngcrush and advdef are run on all output images with advdef -z2
3 indicates pngcrush and advdef are run on all output images with advdef -z4
A list of optimizer instances to use.
rendermode
Perhaps the most important/relevant option: a string indicating the
@@ -389,6 +401,11 @@ class TileSet(object):
attribute for later use in iterate_work_items()
"""
# skip if we're told to
if self.options['renderchecks'] == 3:
return
# REMEMBER THAT ATTRIBUTES ASSIGNED IN THIS METHOD ARE NOT AVAILABLE IN
# THE do_work() METHOD (because this is only called in the main process
# not the workers)
@@ -415,15 +432,16 @@ class TileSet(object):
return 1
def get_phase_length(self, phase):
"""Returns the number of work items in a given phase, or None if there
is no good estimate.
"""Returns the number of work items in a given phase.
"""
# Yeah functional programming!
# and by functional we mean a bastardized python switch statement
return {
0: lambda: self.dirtytree.count_all(),
#there is no good way to guess this so just give total count
1: lambda: (4**(self.treedepth+1)-1)/3,
2: lambda: self.dirtytree.count_all(),
3: lambda: 0,
}[self.options['renderchecks']]()
def iterate_work_items(self, phase):
@@ -433,6 +451,10 @@ class TileSet(object):
This method returns an iterator over (obj, [dependencies, ...])
"""
# skip if asked to
if self.options['renderchecks'] == 3:
return
# The following block of code implementes the changelist functionality.
fd = self.options.get("changelist", None)
if fd:
@@ -535,6 +557,11 @@ class TileSet(object):
def bgcolorformat(color):
return "#%02x%02x%02x" % color[0:3]
isOverlay = self.options.get("overlay") or (not any(isinstance(x, rendermodes.Base) for x in self.options.get("rendermode")))
# don't update last render time if we're leaving this alone
last_rendertime = self.last_rendertime
if self.options['renderchecks'] != 3:
last_rendertime = self.max_chunk_mtime
d = dict(name = self.options.get('title'),
zoomLevels = self.treedepth,
@@ -545,13 +572,15 @@ class TileSet(object):
bgcolor = bgcolorformat(self.options.get('bgcolor')),
world = self.options.get('worldname_orig') +
(" - " + self.options.get('dimension')[0] if self.options.get('dimension')[1] != 0 else ''),
last_rendertime = self.max_chunk_mtime,
last_rendertime = last_rendertime,
imgextension = self.imgextension,
isOverlay = isOverlay,
poititle = self.options.get("poititle"),
showlocationmarker = self.options.get("showlocationmarker")
)
d['maxZoom'] = min(self.treedepth, d['maxZoom'])
d['minZoom'] = min(max(0, self.options.get("minzoom", 0)), d['maxZoom'])
d['defaultZoom'] = max(d['minZoom'], min(d['defaultZoom'], d['maxZoom']))
if isOverlay:
d.update({"tilesets": self.options.get("overlay")})
@@ -760,8 +789,8 @@ class TileSet(object):
# Compare the last modified time of the chunk and tile. If the
# tile is older, mark it in a RendertileSet object as dirty.
for chunkx, chunkz, chunkmtime in self.regionset.iterate_chunks():
for chunkx, chunkz, chunkmtime in self.regionset.iterate_chunks() if (markall or platform.system() == 'Windows') else self.regionset.iterate_newer_chunks(last_rendertime):
chunkcount += 1
if chunkmtime > max_chunk_mtime:
@@ -892,7 +921,11 @@ class TileSet(object):
try:
#quad = Image.open(path[1]).resize((192,192), Image.ANTIALIAS)
src = Image.open(path[1])
# optimizeimg may have converted them to a palette image in the meantime
if src.mode != "RGB" and src.mode != "RGBA":
src = src.convert("RGBA")
src.load()
quad = Image.new("RGBA", (192, 192), self.options['bgcolor'])
resize_half(quad, src)
img.paste(quad, path[0])
@@ -914,7 +947,14 @@ class TileSet(object):
if self.options['optimizeimg']:
optimize_image(tmppath, imgformat, self.options['optimizeimg'])
os.utime(tmppath, (max_mtime, max_mtime))
try:
os.utime(tmppath, (max_mtime, max_mtime))
except OSError, e:
# Ignore errno ENOENT: file does not exist. Due to a race
# condition, two processes could conceivably try and update
# the same temp file at the same time
if e.errno != errno.ENOENT:
raise
def _render_rendertile(self, tile):
"""Renders the given render-tile.
@@ -1017,7 +1057,7 @@ class TileSet(object):
if self.options['optimizeimg']:
optimize_image(tmppath, self.imgextension, self.options['optimizeimg'])
os.utime(tmppath, (max_chunk_mtime, max_chunk_mtime))
def _iterate_and_check_tiles(self, path):

View File

@@ -48,7 +48,8 @@ def findGitHash():
import overviewer_version
return overviewer_version.HASH
except Exception:
return "unknown"
pass
return "unknown"
def findGitVersion():
try:

View File

@@ -273,7 +273,7 @@ class RegionSet(object):
for x, y, regionfile in self._iterate_regionfiles():
# regionfile is a pathname
self.regionfiles[(x,y)] = regionfile
self.regionfiles[(x,y)] = (regionfile, os.path.getmtime(regionfile))
self.empty_chunk = [None,None]
logging.debug("Done scanning regions")
@@ -459,7 +459,7 @@ class RegionSet(object):
"""
for (regionx, regiony), regionfile in self.regionfiles.iteritems():
for (regionx, regiony), (regionfile, filemtime) in self.regionfiles.iteritems():
try:
mcr = self._get_regionobj(regionfile)
except nbt.CorruptRegionError:
@@ -468,6 +468,27 @@ class RegionSet(object):
for chunkx, chunky in mcr.get_chunks():
yield chunkx+32*regionx, chunky+32*regiony, mcr.get_chunk_timestamp(chunkx, chunky)
def iterate_newer_chunks(self, mtime):
"""Returns an iterator over all chunk metadata in this world. Iterates
over tuples of integers (x,z,mtime) for each chunk. Other chunk data
is not returned here.
"""
for (regionx, regiony), (regionfile, filemtime) in self.regionfiles.iteritems():
""" SKIP LOADING A REGION WHICH HAS NOT BEEN MODIFIED! """
if (filemtime < mtime):
continue
try:
mcr = self._get_regionobj(regionfile)
except nbt.CorruptRegionError:
logging.warning("Found a corrupt region file at %s,%s. Skipping it.", regionx, regiony)
continue
for chunkx, chunky in mcr.get_chunks():
yield chunkx+32*regionx, chunky+32*regiony, mcr.get_chunk_timestamp(chunkx, chunky)
def get_chunk_mtime(self, x, z):
"""Returns a chunk's mtime, or False if the chunk does not exist. This
is therefore a dual purpose method. It corrects for the given north
@@ -493,7 +514,7 @@ class RegionSet(object):
Coords can be either be global chunk coords, or local to a region
"""
regionfile = self.regionfiles.get((chunkX//32, chunkY//32),None)
(regionfile,filemtime) = self.regionfiles.get((chunkX//32, chunkY//32),(None, None))
return regionfile
def _iterate_regionfiles(self):
@@ -537,6 +558,8 @@ class RegionSetWrapper(object):
return self._r.get_chunk(x,z)
def iterate_chunks(self):
return self._r.iterate_chunks()
def iterate_newer_chunks(self,filemtime):
return self._r.iterate_newer_chunks(filemtime)
def get_chunk_mtime(self, x, z):
return self._r.get_chunk_mtime(x,z)
@@ -623,6 +646,11 @@ class RotatedRegionSet(RegionSetWrapper):
x,z = self.rotate(x,z)
yield x,z,mtime
def iterate_newer_chunks(self, filemtime):
for x,z,mtime in super(RotatedRegionSet, self).iterate_newer_chunks(filemtime):
x,z = self.rotate(x,z)
yield x,z,mtime
class CroppedRegionSet(RegionSetWrapper):
def __init__(self, rsetobj, xmin, zmin, xmax, zmax):
super(CroppedRegionSet, self).__init__(rsetobj)
@@ -646,6 +674,14 @@ class CroppedRegionSet(RegionSetWrapper):
self.xmin <= x <= self.xmax and
self.zmin <= z <= self.zmax
)
def iterate_newer_chunks(self, filemtime):
return ((x,z,mtime) for (x,z,mtime) in super(CroppedRegionSet,self).iterate_newer_chunks(filemtime)
if
self.xmin <= x <= self.xmax and
self.zmin <= z <= self.zmax
)
def get_chunk_mtime(self,x,z):
if (
self.xmin <= x <= self.xmax and
@@ -744,12 +780,7 @@ def get_worlds():
if not os.path.exists(world_dat): continue
info = nbt.load(world_dat)[1]
info['Data']['path'] = os.path.join(save_dir, dir).decode(loc)
if dir.startswith("World") and len(dir) == 6:
try:
world_n = int(dir[-1])
ret[world_n] = info['Data']
except ValueError:
pass
if 'LevelName' in info['Data'].keys():
ret[info['Data']['LevelName']] = info['Data']