757 lines
29 KiB
Python
757 lines
29 KiB
Python
# This file is part of the Minecraft Overviewer.
|
|
#
|
|
# Minecraft Overviewer is free software: you can redistribute it and/or
|
|
# modify it under the terms of the GNU General Public License as published
|
|
# by the Free Software Foundation, either version 3 of the License, or (at
|
|
# your option) any later version.
|
|
#
|
|
# Minecraft Overviewer is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
|
|
# Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License along
|
|
# with the Overviewer. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
import functools
|
|
import os
|
|
import os.path
|
|
import logging
|
|
import hashlib
|
|
import time
|
|
import random
|
|
import re
|
|
|
|
import numpy
|
|
|
|
from . import nbt
|
|
from . import cache
|
|
|
|
LOG = logging.getLogger(__name__)
|
|
|
|
"""
|
|
This module has routines for extracting information about available worlds
|
|
|
|
"""
|
|
|
|
class ChunkDoesntExist(Exception):
|
|
pass
|
|
|
|
def log_other_exceptions(func):
|
|
"""A decorator that prints out any errors that are not
|
|
ChunkDoesntExist errors. This should decorate any functions or
|
|
methods called by the C code, such as get_chunk(), because the C
|
|
code is likely to swallow exceptions. This will at least make them
|
|
visible.
|
|
"""
|
|
functools.wraps(func)
|
|
def newfunc(*args):
|
|
try:
|
|
return func(*args)
|
|
except ChunkDoesntExist:
|
|
raise
|
|
except Exception, e:
|
|
LOG.exception("%s raised this exception", func.func_name)
|
|
raise
|
|
return newfunc
|
|
|
|
|
|
class World(object):
|
|
"""Encapsulates the concept of a Minecraft "world". A Minecraft world is a
|
|
level.dat file, a players directory with info about each player, a data
|
|
directory with info about that world's maps, and one or more "dimension"
|
|
directories containing a set of region files with the actual world data.
|
|
|
|
This class deals with reading all the metadata about the world. Reading
|
|
the actual world data for each dimension from the region files is handled
|
|
by a RegionSet object.
|
|
|
|
Note that vanilla Minecraft servers and single player games have a single
|
|
world with multiple dimensions: one for the overworld, the nether, etc.
|
|
|
|
On Bukkit enabled servers, to support "multiworld," the server creates
|
|
multiple Worlds, each with a single dimension.
|
|
|
|
In this file, the World objects act as an interface for RegionSet objects.
|
|
The RegionSet objects are what's really important and are used for reading
|
|
block data for rendering. A RegionSet object will always correspond to a
|
|
set of region files, or what is colloquially referred to as a "world," or
|
|
more accurately as a dimension.
|
|
|
|
The only thing this class actually stores is a list of RegionSet objects
|
|
and the parsed level.dat data
|
|
|
|
"""
|
|
|
|
def __init__(self, worlddir):
|
|
self.worlddir = worlddir
|
|
|
|
# This list, populated below, will hold RegionSet files that are in
|
|
# this world
|
|
self.regionsets = []
|
|
|
|
# The level.dat file defines a minecraft world, so assert that this
|
|
# object corresponds to a world on disk
|
|
if not os.path.exists(os.path.join(self.worlddir, "level.dat")):
|
|
raise ValueError("level.dat not found in %s" % self.worlddir)
|
|
|
|
data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]['Data']
|
|
# it seems that reading a level.dat file is unstable, particularly with respect
|
|
# to the spawnX,Y,Z variables. So we'll try a few times to get a good reading
|
|
# empirically, it seems that 0,50,0 is a "bad" reading
|
|
# update: 0,50,0 is the default spawn, and may be valid is some cases
|
|
# more info is needed
|
|
data = nbt.load(os.path.join(self.worlddir, "level.dat"))[1]['Data']
|
|
|
|
|
|
# Hard-code this to only work with format version 19133, "Anvil"
|
|
if not ('version' in data and data['version'] == 19133):
|
|
LOG.critical("Sorry, This version of Minecraft-Overviewer only works with the 'Anvil' chunk format")
|
|
raise ValueError("World at %s is not compatible with Overviewer" % self.worlddir)
|
|
|
|
# This isn't much data, around 15 keys and values for vanilla worlds.
|
|
self.leveldat = data
|
|
|
|
|
|
# Scan worlddir to try to identify all region sets. Since different
|
|
# server mods like to arrange regions differently and there does not
|
|
# seem to be any set standard on what dimensions are in each world,
|
|
# just scan the directory heirarchy to find a directory with .mca
|
|
# files.
|
|
for root, dirs, files in os.walk(self.worlddir, followlinks=True):
|
|
# any .mcr files in this directory?
|
|
mcas = [x for x in files if x.endswith(".mca")]
|
|
if mcas:
|
|
# construct a regionset object for this
|
|
rel = os.path.relpath(root, self.worlddir)
|
|
rset = RegionSet(root, rel)
|
|
if root == os.path.join(self.worlddir, "region"):
|
|
self.regionsets.insert(0, rset)
|
|
else:
|
|
self.regionsets.append(rset)
|
|
|
|
# TODO move a lot of the following code into the RegionSet
|
|
|
|
|
|
try:
|
|
# level.dat should have the LevelName attribute so we'll use that
|
|
self.name = data['LevelName']
|
|
except KeyError:
|
|
# but very old ones might not? so we'll just go with the world dir name if they don't
|
|
self.name = os.path.basename(os.path.realpath(self.worlddir))
|
|
|
|
try:
|
|
# level.dat also has a RandomSeed attribute
|
|
self.seed = data['RandomSeed']
|
|
except KeyError:
|
|
self.seed = 0 # oh well
|
|
|
|
# TODO figure out where to handle regionlists
|
|
|
|
def get_regionsets(self):
|
|
return self.regionsets
|
|
def get_regionset(self, index):
|
|
if type(index) == int:
|
|
return self.regionsets[index]
|
|
else: # assume a get_type() value
|
|
candids = [x for x in self.regionsets if x.get_type() == index]
|
|
LOG.debug("You asked for %r, and I found the following candids: %r", index, candids)
|
|
if len(candids) > 0:
|
|
return candids[0]
|
|
else:
|
|
return None
|
|
|
|
|
|
def get_level_dat_data(self):
|
|
# Return a copy
|
|
return dict(self.data)
|
|
|
|
def find_true_spawn(self):
|
|
"""Returns the spawn point for this world. Since there is one spawn
|
|
point for a world across all dimensions (RegionSets), this method makes
|
|
sense as a member of the World class.
|
|
|
|
Returns (x, y, z)
|
|
|
|
"""
|
|
# The spawn Y coordinate is almost always the default of 64. Find the
|
|
# first air block above the stored spawn location for the true spawn
|
|
# location
|
|
|
|
## read spawn info from level.dat
|
|
data = self.leveldat
|
|
disp_spawnX = spawnX = data['SpawnX']
|
|
spawnY = data['SpawnY']
|
|
disp_spawnZ = spawnZ = data['SpawnZ']
|
|
|
|
## The chunk that holds the spawn location
|
|
chunkX = spawnX//16
|
|
chunkZ = spawnZ//16
|
|
|
|
## clamp spawnY to a sane value, in-chunk value
|
|
if spawnY < 0:
|
|
spawnY = 0
|
|
if spawnY > 255:
|
|
spawnY = 255
|
|
|
|
# Open up the chunk that the spawn is in
|
|
regionset = self.get_regionset(None)
|
|
if not regionset:
|
|
return None
|
|
try:
|
|
chunk = regionset.get_chunk(chunkX, chunkZ)
|
|
except ChunkDoesntExist:
|
|
return (spawnX, spawnY, spawnZ)
|
|
|
|
def getBlock(y):
|
|
"This is stupid and slow but I don't care"
|
|
targetSection = spawnY//16
|
|
for section in chunk['Sections']:
|
|
if section['Y'] == targetSection:
|
|
blockArray = section['Blocks']
|
|
return blockArray[inChunkX, inChunkZ, y % 16]
|
|
return 0
|
|
|
|
|
|
|
|
## The block for spawn *within* the chunk
|
|
inChunkX = spawnX - (chunkX*16)
|
|
inChunkZ = spawnZ - (chunkZ*16)
|
|
|
|
## find the first air block
|
|
while (getBlock(spawnY) != 0) and spawnY < 256:
|
|
spawnY += 1
|
|
|
|
return spawnX, spawnY, spawnZ
|
|
|
|
class RegionSet(object):
|
|
"""This object is the gateway to a particular Minecraft dimension within a
|
|
world. It corresponds to a set of region files containing the actual
|
|
world data. This object has methods for parsing and returning data from the
|
|
chunks from its regions.
|
|
|
|
See the docs for the World object for more information on the difference
|
|
between Worlds and RegionSets.
|
|
|
|
|
|
"""
|
|
|
|
def __init__(self, regiondir, rel):
|
|
"""Initialize a new RegionSet to access the region files in the given
|
|
directory.
|
|
|
|
regiondir is a path to a directory containing region files.
|
|
|
|
rel is the relative path of this directory, with respect to the
|
|
world directory.
|
|
|
|
cachesize, if specified, is the number of chunks to keep parsed and
|
|
in-memory.
|
|
|
|
"""
|
|
self.regiondir = os.path.normpath(regiondir)
|
|
self.rel = os.path.normpath(rel)
|
|
LOG.debug("regiondir is %r" % self.regiondir)
|
|
LOG.debug("rel is %r" % self.rel)
|
|
|
|
# we want to get rid of /regions, if it exists
|
|
if self.rel.endswith(os.path.normpath("/region")):
|
|
self.type = self.rel[0:-len(os.path.normpath("/region"))]
|
|
elif self.rel == "region":
|
|
# this is the main world
|
|
self.type = None
|
|
else:
|
|
LOG.warning("Unkown region type in %r", regiondir)
|
|
self.type = "__unknown"
|
|
|
|
LOG.debug("Scanning regions. Type is %r" % self.type)
|
|
|
|
# This is populated below. It is a mapping from (x,y) region coords to filename
|
|
self.regionfiles = {}
|
|
|
|
# This holds a cache of open regionfile objects
|
|
self.regioncache = cache.LRUCache(size=16, destructor=lambda regionobj: regionobj.close())
|
|
|
|
for x, y, regionfile in self._iterate_regionfiles():
|
|
# regionfile is a pathname
|
|
self.regionfiles[(x,y)] = regionfile
|
|
|
|
self.empty_chunk = [None,None]
|
|
LOG.debug("Done scanning regions")
|
|
|
|
# Re-initialize upon unpickling
|
|
def __getstate__(self):
|
|
return (self.regiondir, self.rel)
|
|
def __setstate__(self, state):
|
|
return self.__init__(*state)
|
|
|
|
def __repr__(self):
|
|
return "<RegionSet regiondir=%r>" % self.regiondir
|
|
|
|
def get_type(self):
|
|
"""Attempts to return a string describing the dimension
|
|
represented by this regionset. Usually this is the relative
|
|
path of the regionset within the world, minus the suffix
|
|
/region, but for the main world it's None.
|
|
"""
|
|
# path will be normalized in __init__
|
|
return self.type
|
|
|
|
def _get_regionobj(self, regionfilename):
|
|
# Check the cache first. If it's not there, create the
|
|
# nbt.MCRFileReader object, cache it, and return it
|
|
# May raise an nbt.CorruptRegionError
|
|
try:
|
|
return self.regioncache[regionfilename]
|
|
except KeyError:
|
|
region = nbt.load_region(regionfilename)
|
|
self.regioncache[regionfilename] = region
|
|
return region
|
|
|
|
#@log_other_exceptions
|
|
def get_chunk(self, x, z):
|
|
"""Returns a dictionary object representing the "Level" NBT Compound
|
|
structure for a chunk given its x, z coordinates. The coordinates given
|
|
are chunk coordinates. Raises ChunkDoesntExist exception if the given
|
|
chunk does not exist.
|
|
|
|
The returned dictionary corresponds to the "Level" structure in the
|
|
chunk file, with a few changes:
|
|
|
|
* The Biomes array is transformed into a 16x16 numpy array
|
|
|
|
* For each chunk section:
|
|
|
|
* The "Blocks" byte string is transformed into a 16x16x16 numpy array
|
|
* The Add array, if it exists, is bitshifted left 8 bits and
|
|
added into the Blocks array
|
|
* The "SkyLight" byte string is transformed into a 16x16x128 numpy
|
|
array
|
|
* The "BlockLight" byte string is transformed into a 16x16x128 numpy
|
|
array
|
|
* The "Data" byte string is transformed into a 16x16x128 numpy array
|
|
|
|
Warning: the returned data may be cached and thus should not be
|
|
modified, lest it affect the return values of future calls for the same
|
|
chunk.
|
|
"""
|
|
regionfile = self._get_region_path(x, z)
|
|
if regionfile is None:
|
|
raise ChunkDoesntExist("Chunk %s,%s doesn't exist (and neither does its region)" % (x,z))
|
|
|
|
# Try a few times to load and parse this chunk before giving up and
|
|
# raising an error
|
|
tries = 5
|
|
while True:
|
|
try:
|
|
region = self._get_regionobj(regionfile)
|
|
data = region.load_chunk(x, z)
|
|
except nbt.CorruptionError, e:
|
|
tries -= 1
|
|
if tries > 0:
|
|
# Flush the region cache to possibly read a new region file
|
|
# header
|
|
LOG.debug("Encountered a corrupt chunk at %s,%s. Flushing cache and retrying", x, z)
|
|
#LOG.debug("Error was:", exc_info=1)
|
|
del self.regioncache[regionfile]
|
|
time.sleep(0.5)
|
|
continue
|
|
else:
|
|
if isinstance(e, nbt.CorruptRegionError):
|
|
LOG.warning("Tried several times to read chunk %d,%d. Its region (%d,%d) may be corrupt. Giving up.",
|
|
x, z,x//32,z//32)
|
|
elif isinstance(e, nbt.CorruptChunkError):
|
|
LOG.warning("Tried several times to read chunk %d,%d. It may be corrupt. Giving up.",
|
|
x, z)
|
|
else:
|
|
LOG.warning("Tried several times to read chunk %d,%d. Unknown error. Giving up.",
|
|
x, z)
|
|
LOG.debug("Full traceback:", exc_info=1)
|
|
# Let this exception propagate out through the C code into
|
|
# tileset.py, where it is caught and gracefully continues
|
|
# with the next chunk
|
|
raise
|
|
else:
|
|
# no exception raised: break out of the loop
|
|
break
|
|
|
|
|
|
if data is None:
|
|
raise ChunkDoesntExist("Chunk %s,%s doesn't exist" % (x,z))
|
|
|
|
level = data[1]['Level']
|
|
chunk_data = level
|
|
|
|
# Turn the Biomes array into a 16x16 numpy array
|
|
try:
|
|
biomes = numpy.frombuffer(chunk_data['Biomes'], dtype=numpy.uint8)
|
|
biomes = biomes.reshape((16,16))
|
|
except KeyError:
|
|
# worlds converted by Jeb's program may be missing the Biomes key
|
|
biomes = numpy.zeros((16, 16), dtype=numpy.uint8)
|
|
chunk_data['Biomes'] = biomes
|
|
|
|
for section in chunk_data['Sections']:
|
|
|
|
# Turn the Blocks array into a 16x16x16 numpy matrix of shorts,
|
|
# adding in the additional block array if included.
|
|
blocks = numpy.frombuffer(section['Blocks'], dtype=numpy.uint8)
|
|
# Cast up to uint16, blocks can have up to 12 bits of data
|
|
blocks = blocks.astype(numpy.uint16)
|
|
blocks = blocks.reshape((16,16,16))
|
|
if "Add" in section:
|
|
# This section has additional bits to tack on to the blocks
|
|
# array. Add is a packed array with 4 bits per slot, so
|
|
# it needs expanding
|
|
additional = numpy.frombuffer(section['Add'], dtype=numpy.uint8)
|
|
additional = additional.astype(numpy.uint16).reshape((16,16,8))
|
|
additional_expanded = numpy.empty((16,16,16), dtype=numpy.uint16)
|
|
additional_expanded[:,:,::2] = (additional & 0x0F) << 8
|
|
additional_expanded[:,:,1::2] = (additional & 0xF0) << 4
|
|
blocks += additional_expanded
|
|
del additional
|
|
del additional_expanded
|
|
del section['Add'] # Save some memory
|
|
section['Blocks'] = blocks
|
|
|
|
# Turn the skylight array into a 16x16x16 matrix. The array comes
|
|
# packed 2 elements per byte, so we need to expand it.
|
|
skylight = numpy.frombuffer(section['SkyLight'], dtype=numpy.uint8)
|
|
skylight = skylight.reshape((16,16,8))
|
|
skylight_expanded = numpy.empty((16,16,16), dtype=numpy.uint8)
|
|
skylight_expanded[:,:,::2] = skylight & 0x0F
|
|
skylight_expanded[:,:,1::2] = (skylight & 0xF0) >> 4
|
|
del skylight
|
|
section['SkyLight'] = skylight_expanded
|
|
|
|
# Turn the BlockLight array into a 16x16x16 matrix, same as SkyLight
|
|
blocklight = numpy.frombuffer(section['BlockLight'], dtype=numpy.uint8)
|
|
blocklight = blocklight.reshape((16,16,8))
|
|
blocklight_expanded = numpy.empty((16,16,16), dtype=numpy.uint8)
|
|
blocklight_expanded[:,:,::2] = blocklight & 0x0F
|
|
blocklight_expanded[:,:,1::2] = (blocklight & 0xF0) >> 4
|
|
del blocklight
|
|
section['BlockLight'] = blocklight_expanded
|
|
|
|
# Turn the Data array into a 16x16x16 matrix, same as SkyLight
|
|
data = numpy.frombuffer(section['Data'], dtype=numpy.uint8)
|
|
data = data.reshape((16,16,8))
|
|
data_expanded = numpy.empty((16,16,16), dtype=numpy.uint8)
|
|
data_expanded[:,:,::2] = data & 0x0F
|
|
data_expanded[:,:,1::2] = (data & 0xF0) >> 4
|
|
del data
|
|
section['Data'] = data_expanded
|
|
|
|
return chunk_data
|
|
|
|
|
|
def iterate_chunks(self):
|
|
"""Returns an iterator over all chunk metadata in this world. Iterates
|
|
over tuples of integers (x,z,mtime) for each chunk. Other chunk data
|
|
is not returned here.
|
|
|
|
"""
|
|
|
|
for (regionx, regiony), regionfile in self.regionfiles.iteritems():
|
|
try:
|
|
mcr = self._get_regionobj(regionfile)
|
|
except nbt.CorruptRegionError:
|
|
LOG.warning("Found a corrupt region file at %s,%s. Skipping it.", regionx, regiony)
|
|
continue
|
|
for chunkx, chunky in mcr.get_chunks():
|
|
yield chunkx+32*regionx, chunky+32*regiony, mcr.get_chunk_timestamp(chunkx, chunky)
|
|
|
|
def get_chunk_mtime(self, x, z):
|
|
"""Returns a chunk's mtime, or False if the chunk does not exist. This
|
|
is therefore a dual purpose method. It corrects for the given north
|
|
direction as described in the docs for get_chunk()
|
|
|
|
"""
|
|
|
|
regionfile = self._get_region_path(x,z)
|
|
if regionfile is None:
|
|
return None
|
|
try:
|
|
data = self._get_regionobj(regionfile)
|
|
except nbt.CorruptRegionError:
|
|
LOG.warning("Ignoring request for chunk %s,%s; region %s,%s seems to be corrupt",
|
|
x,z, x//32,z//32)
|
|
return None
|
|
if data.chunk_exists(x,z):
|
|
return data.get_chunk_timestamp(x,z)
|
|
return None
|
|
|
|
def _get_region_path(self, chunkX, chunkY):
|
|
"""Returns the path to the region that contains chunk (chunkX, chunkY)
|
|
Coords can be either be global chunk coords, or local to a region
|
|
|
|
"""
|
|
regionfile = self.regionfiles.get((chunkX//32, chunkY//32),None)
|
|
return regionfile
|
|
|
|
def _iterate_regionfiles(self):
|
|
"""Returns an iterator of all of the region files, along with their
|
|
coordinates
|
|
|
|
Returns (regionx, regiony, filename)"""
|
|
|
|
LOG.debug("regiondir is %s, has type %r", self.regiondir, self.type)
|
|
|
|
for f in os.listdir(self.regiondir):
|
|
if re.match(r"^r\.-?\d+\.-?\d+\.mca$", f):
|
|
p = f.split(".")
|
|
x = int(p[1])
|
|
y = int(p[2])
|
|
if abs(x) > 500000 or abs(y) > 500000:
|
|
LOG.warning("Holy shit what is up with region file %s !?" % f)
|
|
yield (x, y, os.path.join(self.regiondir, f))
|
|
|
|
class RegionSetWrapper(object):
|
|
"""This is the base class for all "wrappers" of RegionSet objects. A
|
|
wrapper is an object that acts similarly to a subclass: some methods are
|
|
overridden and functionality is changed, others may not be. The difference
|
|
here is that these wrappers may wrap each other, forming chains.
|
|
|
|
In fact, subclasses of this object may act exactly as if they've subclassed
|
|
the original RegionSet object, except the first parameter of the
|
|
constructor is a regionset object, not a regiondir.
|
|
|
|
This class must implement the full public interface of RegionSet objects
|
|
|
|
"""
|
|
def __init__(self, rsetobj):
|
|
self._r = rsetobj
|
|
|
|
def get_type(self):
|
|
return self._r.get_type()
|
|
def get_biome_data(self, x, z):
|
|
return self._r.get_biome_data(x,z)
|
|
def get_chunk(self, x, z):
|
|
return self._r.get_chunk(x,z)
|
|
def iterate_chunks(self):
|
|
return self._r.iterate_chunks()
|
|
def get_chunk_mtime(self, x, z):
|
|
return self._r.get_chunk_mtime(x,z)
|
|
|
|
# see RegionSet.rotate. These values are chosen so that they can be
|
|
# passed directly to rot90; this means that they're the number of
|
|
# times to rotate by 90 degrees CCW
|
|
UPPER_LEFT = 0 ## - Return the world such that north is down the -Z axis (no rotation)
|
|
UPPER_RIGHT = 1 ## - Return the world such that north is down the +X axis (rotate 90 degrees counterclockwise)
|
|
LOWER_RIGHT = 2 ## - Return the world such that north is down the +Z axis (rotate 180 degrees)
|
|
LOWER_LEFT = 3 ## - Return the world such that north is down the -X axis (rotate 90 degrees clockwise)
|
|
|
|
class RotatedRegionSet(RegionSetWrapper):
|
|
"""A regionset, only rotated such that north points in the given direction
|
|
|
|
"""
|
|
|
|
# some class-level rotation constants
|
|
_NO_ROTATION = lambda x,z: (x,z)
|
|
_ROTATE_CLOCKWISE = lambda x,z: (-z,x)
|
|
_ROTATE_COUNTERCLOCKWISE = lambda x,z: (z,-x)
|
|
_ROTATE_180 = lambda x,z: (-x,-z)
|
|
|
|
# These take rotated coords and translate into un-rotated coords
|
|
_unrotation_funcs = [
|
|
_NO_ROTATION,
|
|
_ROTATE_COUNTERCLOCKWISE,
|
|
_ROTATE_180,
|
|
_ROTATE_CLOCKWISE,
|
|
]
|
|
|
|
# These translate un-rotated coordinates into rotated coordinates
|
|
_rotation_funcs = [
|
|
_NO_ROTATION,
|
|
_ROTATE_CLOCKWISE,
|
|
_ROTATE_180,
|
|
_ROTATE_COUNTERCLOCKWISE,
|
|
]
|
|
|
|
def __init__(self, rsetobj, north_dir):
|
|
self.north_dir = north_dir
|
|
self.unrotate = self._unrotation_funcs[north_dir]
|
|
self.rotate = self._rotation_funcs[north_dir]
|
|
|
|
super(RotatedRegionSet, self).__init__(rsetobj)
|
|
|
|
|
|
# Re-initialize upon unpickling. This is needed because we store a couple
|
|
# lambda functions as instance variables
|
|
def __getstate__(self):
|
|
return (self._r, self.north_dir)
|
|
def __setstate__(self, args):
|
|
self.__init__(args[0], args[1])
|
|
|
|
def get_chunk(self, x, z):
|
|
x,z = self.unrotate(x,z)
|
|
chunk_data = dict(super(RotatedRegionSet, self).get_chunk(x,z))
|
|
newsections = []
|
|
for section in chunk_data['Sections']:
|
|
section = dict(section)
|
|
newsections.append(section)
|
|
for arrayname in ['Blocks', 'Data', 'SkyLight', 'BlockLight']:
|
|
array = section[arrayname]
|
|
# Since the anvil change, arrays are arranged with axes Y,Z,X
|
|
# numpy.rot90 always rotates the first two axes, so for it to
|
|
# work, we need to temporarily move the X axis to the 0th axis.
|
|
array = numpy.swapaxes(array, 0,2)
|
|
array = numpy.rot90(array, self.north_dir)
|
|
array = numpy.swapaxes(array, 0,2)
|
|
section[arrayname] = array
|
|
chunk_data['Sections'] = newsections
|
|
|
|
# same as above, for biomes (Z/X indexed)
|
|
biomes = numpy.swapaxes(chunk_data['Biomes'], 0, 1)
|
|
biomes = numpy.rot90(biomes, self.north_dir)
|
|
chunk_data['Biomes'] = numpy.swapaxes(biomes, 0, 1)
|
|
return chunk_data
|
|
|
|
def get_chunk_mtime(self, x, z):
|
|
x,z = self.unrotate(x,z)
|
|
return super(RotatedRegionSet, self).get_chunk_mtime(x, z)
|
|
|
|
def iterate_chunks(self):
|
|
for x,z,mtime in super(RotatedRegionSet, self).iterate_chunks():
|
|
x,z = self.rotate(x,z)
|
|
yield x,z,mtime
|
|
|
|
class CroppedRegionSet(RegionSetWrapper):
|
|
def __init__(self, rsetobj, xmin, zmin, xmax, zmax):
|
|
super(CroppedRegionSet, self).__init__(rsetobj)
|
|
self.xmin = xmin//16
|
|
self.xmax = xmax//16
|
|
self.zmin = zmin//16
|
|
self.zmax = zmax//16
|
|
|
|
def get_chunk(self,x,z):
|
|
if (
|
|
self.xmin <= x <= self.xmax and
|
|
self.zmin <= z <= self.zmax
|
|
):
|
|
return super(CroppedRegionSet, self).get_chunk(x,z)
|
|
else:
|
|
raise ChunkDoesntExist("This chunk is out of the requested bounds")
|
|
|
|
def iterate_chunks(self):
|
|
return ((x,z,mtime) for (x,z,mtime) in super(CroppedRegionSet,self).iterate_chunks()
|
|
if
|
|
self.xmin <= x <= self.xmax and
|
|
self.zmin <= z <= self.zmax
|
|
)
|
|
def get_chunk_mtime(self,x,z):
|
|
if (
|
|
self.xmin <= x <= self.xmax and
|
|
self.zmin <= z <= self.zmax
|
|
):
|
|
return super(CroppedRegionSet, self).get_chunk_mtime(x,z)
|
|
else:
|
|
return None
|
|
|
|
class CachedRegionSet(RegionSetWrapper):
|
|
"""A regionset wrapper that implements caching of the results from
|
|
get_chunk()
|
|
|
|
"""
|
|
def __init__(self, rsetobj, cacheobjects):
|
|
"""Initialize this wrapper around the given regionset object and with
|
|
the given list of cache objects. The cache objects may be shared among
|
|
other CachedRegionSet objects.
|
|
|
|
"""
|
|
super(CachedRegionSet, self).__init__(rsetobj)
|
|
self.caches = cacheobjects
|
|
|
|
# Construct a key from the sequence of transformations and the real
|
|
# RegionSet object, so that items we place in the cache don't conflict
|
|
# with other worlds/transformation combinations.
|
|
obj = self._r
|
|
s = ""
|
|
while isinstance(obj, RegionSetWrapper):
|
|
s += obj.__class__.__name__ + "."
|
|
obj = obj._r
|
|
# obj should now be the actual RegionSet object
|
|
try:
|
|
s += obj.regiondir
|
|
except AttributeError:
|
|
s += repr(obj)
|
|
|
|
LOG.debug("Initializing a cache with key '%s'", s)
|
|
|
|
s = hashlib.md5(s).hexdigest()
|
|
|
|
self.key = s
|
|
|
|
def get_chunk(self, x, z):
|
|
key = hashlib.md5(repr((self.key, x, z))).hexdigest()
|
|
for i, cache in enumerate(self.caches):
|
|
try:
|
|
retval = cache[key]
|
|
# This did have it, no need to re-add it to this cache, just
|
|
# the ones before it
|
|
i -= 1
|
|
break
|
|
except KeyError:
|
|
pass
|
|
else:
|
|
retval = super(CachedRegionSet, self).get_chunk(x,z)
|
|
|
|
# Now add retval to all the caches that didn't have it, all the caches
|
|
# up to and including index i
|
|
for cache in self.caches[:i+1]:
|
|
cache[key] = retval
|
|
|
|
return retval
|
|
|
|
|
|
def get_save_dir():
|
|
"""Returns the path to the local saves directory
|
|
* On Windows, at %APPDATA%/.minecraft/saves/
|
|
* On Darwin, at $HOME/Library/Application Support/minecraft/saves/
|
|
* at $HOME/.minecraft/saves/
|
|
|
|
"""
|
|
|
|
savepaths = []
|
|
if "APPDATA" in os.environ:
|
|
savepaths += [os.path.join(os.environ['APPDATA'], ".minecraft", "saves")]
|
|
if "HOME" in os.environ:
|
|
savepaths += [os.path.join(os.environ['HOME'], "Library",
|
|
"Application Support", "minecraft", "saves")]
|
|
savepaths += [os.path.join(os.environ['HOME'], ".minecraft", "saves")]
|
|
|
|
for path in savepaths:
|
|
if os.path.exists(path):
|
|
return path
|
|
|
|
def get_worlds():
|
|
"Returns {world # or name : level.dat information}"
|
|
ret = {}
|
|
save_dir = get_save_dir()
|
|
|
|
# No dirs found - most likely not running from inside minecraft-dir
|
|
if not save_dir is None:
|
|
for dir in os.listdir(save_dir):
|
|
world_dat = os.path.join(save_dir, dir, "level.dat")
|
|
if not os.path.exists(world_dat): continue
|
|
info = nbt.load(world_dat)[1]
|
|
info['Data']['path'] = os.path.join(save_dir, dir)
|
|
if dir.startswith("World") and len(dir) == 6:
|
|
try:
|
|
world_n = int(dir[-1])
|
|
ret[world_n] = info['Data']
|
|
except ValueError:
|
|
pass
|
|
if 'LevelName' in info['Data'].keys():
|
|
ret[info['Data']['LevelName']] = info['Data']
|
|
|
|
for dir in os.listdir("."):
|
|
world_dat = os.path.join(dir, "level.dat")
|
|
if not os.path.exists(world_dat): continue
|
|
info = nbt.load(world_dat)[1]
|
|
info['Data']['path'] = os.path.join(".", dir)
|
|
if 'LevelName' in info['Data'].keys():
|
|
ret[info['Data']['LevelName']] = info['Data']
|
|
|
|
return ret
|