0

genPOI: fix code style

One code style error can't really be fixed because despite
has_key being deprecated, "in" doesn't work on "super" for objects
that derive from dict, at least not in Python 2.7. Thanks Guido.

Also, I don't know which one of you it is that keeps putting semicolons
at the end of your statements but you seriously need to stop.
This commit is contained in:
Nicolas F
2019-03-10 18:05:32 +01:00
parent 6f5730d40a
commit c9b506a58c

View File

@@ -14,6 +14,7 @@ markers.js holds a list of which markerSets are attached to each tileSet
''' '''
import datetime
import gzip import gzip
import itertools import itertools
import json import json
@@ -24,20 +25,17 @@ import re
import sys import sys
import time import time
import urllib2 import urllib2
import datetime
from collections import defaultdict from collections import defaultdict
from contextlib import closing from contextlib import closing
from multiprocessing import Pool from multiprocessing import Pool
from optparse import OptionParser from optparse import OptionParser
from overviewer_core import logger from overviewer_core import configParser, logger, nbt, world
from overviewer_core import nbt
from overviewer_core import configParser, world
from overviewer_core.files import FileReplacer, get_fs_caps from overviewer_core.files import FileReplacer, get_fs_caps
UUID_LOOKUP_URL = 'https://sessionserver.mojang.com/session/minecraft/profile/' UUID_LOOKUP_URL = 'https://sessionserver.mojang.com/session/minecraft/profile/'
def replaceBads(s): def replaceBads(s):
"Replaces bad characters with good characters!" "Replaces bad characters with good characters!"
bads = [" ", "(", ")"] bads = [" ", "(", ")"]
@@ -46,6 +44,7 @@ def replaceBads(s):
x = x.replace(bad, "_") x = x.replace(bad, "_")
return x return x
# If you want to keep your stomach contents do not, under any circumstance, # If you want to keep your stomach contents do not, under any circumstance,
# read the body of the following function. You have been warned. # read the body of the following function. You have been warned.
# All of this could be replaced by a simple json.loads if Mojang had # All of this could be replaced by a simple json.loads if Mojang had
@@ -61,8 +60,8 @@ def replaceBads(s):
def jsonText(s): def jsonText(s):
if s is None or s == "null": if s is None or s == "null":
return "" return ""
if (s.startswith('"') and s.endswith('"')) or \ if ((s.startswith('"') and s.endswith('"')) or
(s.startswith('{') and s.endswith('}')): (s.startswith('{') and s.endswith('}'))):
try: try:
js = json.loads(s) js = json.loads(s)
except ValueError: except ValueError:
@@ -87,9 +86,12 @@ def jsonText(s):
else: else:
return s return s
# Since functions are not pickleable, we send their names instead. # Since functions are not pickleable, we send their names instead.
# Here, set up worker processes to have a name -> function map # Here, set up worker processes to have a name -> function map
bucketChunkFuncs = {} bucketChunkFuncs = {}
def initBucketChunks(config_path): def initBucketChunks(config_path):
global bucketChunkFuncs global bucketChunkFuncs
@@ -102,6 +104,7 @@ def initBucketChunks(config_path):
ff = f['filterFunction'] ff = f['filterFunction']
bucketChunkFuncs[ff.__name__] = ff bucketChunkFuncs[ff.__name__] = ff
# yes there's a double parenthesis here # yes there's a double parenthesis here
# see below for when this is called, and why we do this # see below for when this is called, and why we do this
# a smarter way would be functools.partial, but that's broken on python 2.6 # a smarter way would be functools.partial, but that's broken on python 2.6
@@ -126,7 +129,7 @@ def parseBucketChunks((bucket, rset, filters)):
d = create_marker_from_filter_result(poi, result) d = create_marker_from_filter_result(poi, result)
markers[name].append(d) markers[name].append(d)
except nbt.CorruptChunkError: except nbt.CorruptChunkError:
logging.warning("Ignoring POIs in corrupt chunk %d,%d", b[0], b[1]) logging.warning("Ignoring POIs in corrupt chunk %d,%d.", b[0], b[1])
except world.ChunkDoesntExist: except world.ChunkDoesntExist:
pass pass
@@ -135,10 +138,12 @@ def parseBucketChunks((bucket, rset, filters)):
if i == 250: if i == 250:
i = 0 i = 0
cnt = 250 + cnt cnt = 250 + cnt
logging.info("Found %d markers in thread %d so far at %d chunks", sum(len(v) for v in markers.itervalues()), pid, cnt); logging.info("Found %d markers in thread %d so far at %d chunks.",
sum(len(v) for v in markers.itervalues()), pid, cnt)
return markers return markers
def signWrangler(poi): def signWrangler(poi):
""" """
Just does the JSON things for signs Just does the JSON things for signs
@@ -157,9 +162,9 @@ def handleEntities(rset, config, config_path, filters, markers):
This function will not return anything, but it will update the parameter This function will not return anything, but it will update the parameter
`markers`. `markers`.
""" """
logging.info("Looking for entities in %r", rset) logging.info("Looking for entities in %r...", rset)
numbuckets = config['processes']; numbuckets = config['processes']
if numbuckets < 0: if numbuckets < 0:
numbuckets = multiprocessing.cpu_count() numbuckets = multiprocessing.cpu_count()
@@ -176,14 +181,13 @@ def handleEntities(rset, config, config_path, filters, markers):
d = create_marker_from_filter_result(poi, result) d = create_marker_from_filter_result(poi, result)
markers[name]['raw'].append(d) markers[name]['raw'].append(d)
except nbt.CorruptChunkError: except nbt.CorruptChunkError:
logging.warning("Ignoring POIs in corrupt chunk %d,%d", x,z) logging.warning("Ignoring POIs in corrupt chunk %d,%d.", x, z)
except world.ChunkDoesntExist: except world.ChunkDoesntExist:
# iterate_chunks() doesn't inspect chunks and filter out # iterate_chunks() doesn't inspect chunks and filter out
# placeholder ones. It's okay for this chunk to not exist. # placeholder ones. It's okay for this chunk to not exist.
pass pass
else: else:
buckets = [[] for i in range(numbuckets)]; buckets = [[] for i in range(numbuckets)]
for (x, z, mtime) in rset.iterate_chunks(): for (x, z, mtime) in rset.iterate_chunks():
i = x / 32 + z / 32 i = x / 32 + z / 32
@@ -191,17 +195,18 @@ def handleEntities(rset, config, config_path, filters, markers):
buckets[i].append([x, z]) buckets[i].append([x, z])
for b in buckets: for b in buckets:
logging.info("Buckets has %d entries", len(b)); logging.info("Buckets has %d entries.", len(b))
# Create a pool of processes and run all the functions # Create a pool of processes and run all the functions
pool = Pool(processes=numbuckets, initializer=initBucketChunks, initargs=(config_path,)) pool = Pool(processes=numbuckets, initializer=initBucketChunks, initargs=(config_path,))
# simplify the filters dict, so pickle doesn't have to do so much # simplify the filters dict, so pickle doesn't have to do so much
filters = [(name, filter_function.__name__) for name, __, filter_function, __, __, __ in filters] filters = [(name, filter_function.__name__) for name, __, filter_function, __, __, __
in filters]
results = pool.map(parseBucketChunks, ((buck, rset, filters) for buck in buckets)) results = pool.map(parseBucketChunks, ((buck, rset, filters) for buck in buckets))
logging.info("All the threads completed") logging.info("All the threads completed.")
for marker_dict in results: for marker_dict in results:
for name, marker_list in marker_dict.iteritems(): for name, marker_list in marker_dict.iteritems():
@@ -222,22 +227,23 @@ class PlayerDict(dict):
try: try:
with closing(gzip.GzipFile(cache_file)) as gz: with closing(gzip.GzipFile(cache_file)) as gz:
cls.uuid_cache = json.load(gz) cls.uuid_cache = json.load(gz)
logging.info("Loaded UUID cache from %r with %d entries", logging.info("Loaded UUID cache from %r with %d entries.",
cache_file, len(cls.uuid_cache.keys())) cache_file, len(cls.uuid_cache.keys()))
except (ValueError, IOError): except (ValueError, IOError):
logging.warning("Failed to load UUID cache -- it might be corrupt") logging.warning("Failed to load UUID cache -- it might be corrupt.")
cls.uuid_cache = {} cls.uuid_cache = {}
corrupted_cache = cache_file + ".corrupted." + datetime.datetime.now().isoformat() corrupted_cache = cache_file + ".corrupted." + datetime.datetime.now().isoformat()
try: try:
os.rename(cache_file, corrupted_cache) os.rename(cache_file, corrupted_cache)
logging.warning("If %s does not appear to contain meaningful data, you may safely delete it", corrupted_cache) logging.warning("If %s does not appear to contain meaningful data, you may "
"safely delete it.", corrupted_cache)
except OSError: except OSError:
logging.warning("Failed to backup corrupted UUID cache") logging.warning("Failed to backup corrupted UUID cache.")
logging.info("Initialized an empty UUID cache") logging.info("Initialized an empty UUID cache.")
else: else:
cls.uuid_cache = {} cls.uuid_cache = {}
logging.info("Initialized an empty UUID cache") logging.info("Initialized an empty UUID cache.")
@classmethod @classmethod
def save_cache(cls, outputdir): def save_cache(cls, outputdir):
@@ -247,7 +253,7 @@ class PlayerDict(dict):
with FileReplacer(cache_file, caps) as cache_file_name: with FileReplacer(cache_file, caps) as cache_file_name:
with closing(gzip.GzipFile(cache_file_name, "wb")) as gz: with closing(gzip.GzipFile(cache_file_name, "wb")) as gz:
json.dump(cls.uuid_cache, gz) json.dump(cls.uuid_cache, gz)
logging.info("Wrote UUID cache with %d entries", logging.info("Wrote UUID cache with %d entries.",
len(cls.uuid_cache.keys())) len(cls.uuid_cache.keys()))
def __getitem__(self, item): def __getitem__(self, item):
@@ -257,7 +263,6 @@ class PlayerDict(dict):
super(PlayerDict, self).__setitem__("EntityId", self.get_name_from_uuid()) super(PlayerDict, self).__setitem__("EntityId", self.get_name_from_uuid())
else: else:
super(PlayerDict, self).__setitem__("EntityId", self._name) super(PlayerDict, self).__setitem__("EntityId", self._name)
return super(PlayerDict, self).__getitem__(item) return super(PlayerDict, self).__getitem__(item)
def get_name_from_uuid(self): def get_name_from_uuid(self):
@@ -276,7 +281,7 @@ class PlayerDict(dict):
PlayerDict.uuid_cache[sname] = profile PlayerDict.uuid_cache[sname] = profile
return profile['name'] return profile['name']
except (ValueError, urllib2.URLError): except (ValueError, urllib2.URLError):
logging.warning("Unable to get player name for UUID %s", self._name) logging.warning("Unable to get player name for UUID %s.", self._name)
def handlePlayers(worldpath, filters, markers): def handlePlayers(worldpath, filters, markers):
@@ -309,7 +314,7 @@ def handlePlayers(worldpath, filters, markers):
if isSinglePlayer: if isSinglePlayer:
data = data['Data']['Player'] data = data['Data']['Player']
except (IOError, TypeError): except (IOError, TypeError):
logging.warning("Skipping bad player dat file %r", playerfile) logging.warning("Skipping bad player dat file %r.", playerfile)
continue continue
playername = playerfile.split(".")[0] playername = playerfile.split(".")[0]
@@ -412,7 +417,8 @@ def create_marker_from_filter_result(poi, result):
if 'polyline' in result and hasattr(result['polyline'], '__iter__'): if 'polyline' in result and hasattr(result['polyline'], '__iter__'):
d['polyline'] = [] d['polyline'] = []
for point in result['polyline']: for point in result['polyline']:
d['polyline'].append(dict(x=point['x'], y=point['y'], z=point['z'])) # point.copy() would work, but this validates better # point.copy() would work, but this validates better
d['polyline'].append(dict(x=point['x'], y=point['y'], z=point['z']))
if isinstance(result['color'], basestring): if isinstance(result['color'], basestring):
d['strokeColor'] = result['color'] d['strokeColor'] = result['color']
@@ -421,7 +427,8 @@ def create_marker_from_filter_result(poi, result):
if "createInfoWindow" in result: if "createInfoWindow" in result:
d["createInfoWindow"] = result['createInfoWindow'] d["createInfoWindow"] = result['createInfoWindow']
else: else:
raise ValueError("got an %s as result for POI with id %s" % (type(result).__name__, poi['id'])) raise ValueError("Got an %s as result for POI with id %s"
% (type(result).__name__, poi['id']))
return d return d
@@ -478,8 +485,8 @@ def main():
try: try:
worldpath = config['worlds'][render['world']] worldpath = config['worlds'][render['world']]
except KeyError: except KeyError:
logging.error("Render %s's world is '%s', but I could not find a corresponding entry in the worlds dictionary.", logging.error("Render %s's world is '%s', but I could not find a corresponding entry "
rname, render['world']) "in the worlds dictionary.", rname, render['world'])
return 1 return 1
render['worldname_orig'] = render['world'] render['worldname_orig'] = render['world']
render['world'] = worldpath render['world'] = worldpath
@@ -493,20 +500,23 @@ def main():
# get the regionset for this dimension # get the regionset for this dimension
rset = w.get_regionset(render['dimension'][1]) rset = w.get_regionset(render['dimension'][1])
if rset == None: # indicates no such dimension was found: if rset is None: # indicates no such dimension was found:
logging.warn("Sorry, you requested dimension '%s' for the render '%s', but I couldn't find it", render['dimension'][0], rname) logging.warn("Sorry, you requested dimension '%s' for the render '%s', but I couldn't "
"find it.", render['dimension'][0], rname)
continue continue
# find filters for this render # find filters for this render
for f in render['markers']: for f in render['markers']:
# internal identifier for this filter # internal identifier for this filter
name = replaceBads(f['name']) + hex(hash(f['filterFunction']))[-4:] + "_" + hex(hash(rname))[-4:] name = (replaceBads(f['name']) + hex(hash(f['filterFunction']))[-4:] + "_"
+ hex(hash(rname))[-4:])
# add it to the list of filters # add it to the list of filters
filters.add((name, f['name'], f['filterFunction'], rset, worldpath, rname)) filters.add((name, f['name'], f['filterFunction'], rset, worldpath, rname))
# add an entry in the menu to show markers found by this filter # add an entry in the menu to show markers found by this filter
group = dict(groupName=name, group = dict(
groupName=name,
displayName=f['name'], displayName=f['name'],
icon=f.get('icon', 'signpost_icon.png'), icon=f.get('icon', 'signpost_icon.png'),
createInfoWindow=f.get('createInfoWindow', True), createInfoWindow=f.get('createInfoWindow', True),
@@ -520,7 +530,8 @@ def main():
# apply filters to regionsets # apply filters to regionsets
if not options.skipscan: if not options.skipscan:
# group filters by rset # group filters by rset
keyfunc = lambda x: x[3] def keyfunc(x):
return x[3]
sfilters = sorted(filters, key=keyfunc) sfilters = sorted(filters, key=keyfunc)
for rset, rset_filters in itertools.groupby(sfilters, keyfunc): for rset, rset_filters in itertools.groupby(sfilters, keyfunc):
handleEntities(rset, config, options.config, list(rset_filters), markers) handleEntities(rset, config, options.config, list(rset_filters), markers)
@@ -528,9 +539,11 @@ def main():
# apply filters to players # apply filters to players
if not options.skipplayers: if not options.skipplayers:
PlayerDict.load_cache(destdir) PlayerDict.load_cache(destdir)
# group filters by worldpath, so we only search for players once per # group filters by worldpath, so we only search for players once per
# world # world
keyfunc = lambda x: x[4] def keyfunc(x):
return x[4]
sfilters = sorted(filters, key=keyfunc) sfilters = sorted(filters, key=keyfunc)
for worldpath, worldpath_filters in itertools.groupby(sfilters, keyfunc): for worldpath, worldpath_filters in itertools.groupby(sfilters, keyfunc):
handlePlayers(worldpath, list(worldpath_filters), markers) handlePlayers(worldpath, list(worldpath_filters), markers)
@@ -538,7 +551,8 @@ def main():
# add manual POIs # add manual POIs
# group filters by name of the render, because only filter functions for # group filters by name of the render, because only filter functions for
# the current render should be used on the current render's manualpois # the current render should be used on the current render's manualpois
keyfunc = lambda x: x[5] def keyfunc(x):
return x[5]
sfilters = sorted(filters, key=keyfunc) sfilters = sorted(filters, key=keyfunc)
for rname, rname_filters in itertools.groupby(sfilters, keyfunc): for rname, rname_filters in itertools.groupby(sfilters, keyfunc):
manualpois = config['renders'][rname]['manualpois'] manualpois = config['renders'][rname]['manualpois']
@@ -553,11 +567,11 @@ def main():
with open(os.path.join(destdir, "markersDB.js"), "w") as output: with open(os.path.join(destdir, "markersDB.js"), "w") as output:
output.write("var markersDB=") output.write("var markersDB=")
json.dump(markers, output, indent=2) json.dump(markers, output, indent=2)
output.write(";\n"); output.write(";\n")
with open(os.path.join(destdir, "markers.js"), "w") as output: with open(os.path.join(destdir, "markers.js"), "w") as output:
output.write("var markers=") output.write("var markers=")
json.dump(marker_groups, output, indent=2) json.dump(marker_groups, output, indent=2)
output.write(";\n"); output.write(";\n")
with open(os.path.join(destdir, "baseMarkers.js"), "w") as output: with open(os.path.join(destdir, "baseMarkers.js"), "w") as output:
output.write("overviewer.util.injectMarkerScript('markersDB.js');\n") output.write("overviewer.util.injectMarkerScript('markersDB.js');\n")
output.write("overviewer.util.injectMarkerScript('markers.js');\n") output.write("overviewer.util.injectMarkerScript('markers.js');\n")
@@ -565,5 +579,6 @@ def main():
output.write("overviewer.collections.haveSigns=true;\n") output.write("overviewer.collections.haveSigns=true;\n")
logging.info("Done") logging.info("Done")
if __name__ == "__main__": if __name__ == "__main__":
main() main()