work on plotme migration
This commit is contained in:
@@ -46,7 +46,7 @@ interface ParcelData : AddedData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class ParcelDataHolder : AddedDataHolder(), ParcelData {
|
class ParcelDataHolder(addedMap: MutableAddedDataMap = mutableMapOf()) : AddedDataHolder(addedMap), ParcelData {
|
||||||
|
|
||||||
override var owner: ParcelOwner? = null
|
override var owner: ParcelOwner? = null
|
||||||
override var since: DateTime? = null
|
override var since: DateTime? = null
|
||||||
|
|||||||
@@ -85,25 +85,32 @@ class ParcelsPlugin : JavaPlugin() {
|
|||||||
fun loadOptions(): Boolean {
|
fun loadOptions(): Boolean {
|
||||||
when {
|
when {
|
||||||
optionsFile.exists() -> optionsMapper.readerForUpdating(options).readValue<Options>(optionsFile)
|
optionsFile.exists() -> optionsMapper.readerForUpdating(options).readValue<Options>(optionsFile)
|
||||||
optionsFile.tryCreate() -> {
|
else -> run {
|
||||||
options.addWorld("parcels")
|
options.addWorld("parcels")
|
||||||
try {
|
if (saveOptions()) {
|
||||||
optionsMapper.writeValue(optionsFile, options)
|
plogger.warn("Created options file with a world template. Please review it before next start.")
|
||||||
} catch (ex: Throwable) {
|
} else {
|
||||||
optionsFile.delete()
|
plogger.error("Failed to save options file ${optionsFile.canonicalPath}")
|
||||||
throw ex
|
|
||||||
}
|
}
|
||||||
plogger.warn("Created options file with a world template. Please review it before next start.")
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
else -> {
|
|
||||||
plogger.error("Failed to save options file ${optionsFile.canonicalPath}")
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun saveOptions(): Boolean {
|
||||||
|
if (optionsFile.tryCreate()) {
|
||||||
|
try {
|
||||||
|
optionsMapper.writeValue(optionsFile, options)
|
||||||
|
} catch (ex: Throwable) {
|
||||||
|
optionsFile.delete()
|
||||||
|
throw ex
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
override fun getDefaultWorldGenerator(worldName: String, generatorId: String?): ChunkGenerator? {
|
override fun getDefaultWorldGenerator(worldName: String, generatorId: String?): ChunkGenerator? {
|
||||||
return parcelProvider.getWorldGenerator(worldName)
|
return parcelProvider.getWorldGenerator(worldName)
|
||||||
}
|
}
|
||||||
@@ -119,6 +126,8 @@ class ParcelsPlugin : JavaPlugin() {
|
|||||||
listeners = ParcelListeners(parcelProvider, entityTracker)
|
listeners = ParcelListeners(parcelProvider, entityTracker)
|
||||||
registrator.registerListeners(listeners!!)
|
registrator.registerListeners(listeners!!)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
functionHelper.scheduleRepeating(100, 5, entityTracker::tick)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -33,15 +33,18 @@ class CommandsDebug(plugin: ParcelsPlugin) : AbstractParcelCommands(plugin) {
|
|||||||
fun ParcelScope.cmdMakeMess(context: ExecutionContext) {
|
fun ParcelScope.cmdMakeMess(context: ExecutionContext) {
|
||||||
val server = plugin.server
|
val server = plugin.server
|
||||||
val blockDatas = arrayOf(
|
val blockDatas = arrayOf(
|
||||||
server.createBlockData(Material.STICKY_PISTON),
|
server.createBlockData(Material.BLUE_WOOL),
|
||||||
|
server.createBlockData(Material.LIME_WOOL),
|
||||||
server.createBlockData(Material.GLASS),
|
server.createBlockData(Material.GLASS),
|
||||||
server.createBlockData(Material.STONE_SLAB),
|
server.createBlockData(Material.STONE_SLAB),
|
||||||
server.createBlockData(Material.QUARTZ_BLOCK)
|
server.createBlockData(Material.STONE),
|
||||||
|
server.createBlockData(Material.QUARTZ_BLOCK),
|
||||||
|
server.createBlockData(Material.BROWN_CONCRETE)
|
||||||
)
|
)
|
||||||
val random = Random()
|
val random = Random()
|
||||||
|
|
||||||
world.doBlockOperation(parcel.id, direction = RegionTraversal.UPWARD) { block ->
|
world.doBlockOperation(parcel.id, direction = RegionTraversal.UPWARD) { block ->
|
||||||
block.blockData = blockDatas[random.nextInt(4)]
|
block.blockData = blockDatas[random.nextInt(7)]
|
||||||
}.onProgressUpdate(1000, 1000) { progress, elapsedTime ->
|
}.onProgressUpdate(1000, 1000) { progress, elapsedTime ->
|
||||||
context.sendMessage(EMessageType.INFORMATIVE, "Mess progress: %.02f%%, %.2fs elapsed"
|
context.sendMessage(EMessageType.INFORMATIVE, "Mess progress: %.02f%%, %.2fs elapsed"
|
||||||
.format(progress * 100, elapsedTime / 1000.0))
|
.format(progress * 100, elapsedTime / 1000.0))
|
||||||
|
|||||||
@@ -146,7 +146,7 @@ class DefaultParcelGenerator(val name: String, private val o: DefaultGeneratorOp
|
|||||||
|
|
||||||
override fun getHomeLocation(parcel: ParcelId): Location {
|
override fun getHomeLocation(parcel: ParcelId): Location {
|
||||||
val bottom = getBottomBlock(parcel)
|
val bottom = getBottomBlock(parcel)
|
||||||
return Location(world, bottom.x.toDouble(), o.floorHeight + 1.0, bottom.z + (o.parcelSize - 1) / 2.0, -90F, 0F)
|
return Location(world, bottom.x.toDouble() + 0.5, o.floorHeight + 1.0, bottom.z + 0.5 + (o.parcelSize - 1) / 2.0, -90F, 0F)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun setOwnerBlock(parcel: ParcelId, owner: ParcelOwner?) {
|
override fun setOwnerBlock(parcel: ParcelId, owner: ParcelOwner?) {
|
||||||
|
|||||||
@@ -60,6 +60,21 @@ class ParcelProviderImpl(val plugin: ParcelsPlugin) : ParcelProvider {
|
|||||||
|
|
||||||
private fun loadStoredData() {
|
private fun loadStoredData() {
|
||||||
plugin.functionHelper.launchLazilyOnMainThread {
|
plugin.functionHelper.launchLazilyOnMainThread {
|
||||||
|
val migration = plugin.options.migration
|
||||||
|
if (migration.enabled) {
|
||||||
|
migration.instance?.newInstance()?.apply {
|
||||||
|
logger.warn("Migrating database now...")
|
||||||
|
migrateTo(plugin.storage).join()
|
||||||
|
logger.warn("Migration completed")
|
||||||
|
|
||||||
|
if (migration.disableWhenComplete) {
|
||||||
|
migration.enabled = false
|
||||||
|
plugin.saveOptions()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Loading all parcel data...")
|
||||||
val channel = plugin.storage.readAllParcelData()
|
val channel = plugin.storage.readAllParcelData()
|
||||||
do {
|
do {
|
||||||
val pair = channel.receiveOrNull() ?: break
|
val pair = channel.receiveOrNull() ?: break
|
||||||
@@ -67,6 +82,7 @@ class ParcelProviderImpl(val plugin: ParcelsPlugin) : ParcelProvider {
|
|||||||
pair.second?.let { parcel.copyDataIgnoringDatabase(it) }
|
pair.second?.let { parcel.copyDataIgnoringDatabase(it) }
|
||||||
} while (true)
|
} while (true)
|
||||||
|
|
||||||
|
logger.info("Loading data completed")
|
||||||
_dataIsLoaded = true
|
_dataIsLoaded = true
|
||||||
}.start()
|
}.start()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class ParcelEntityTracker(val parcelProvider: ParcelProvider) {
|
|||||||
*/
|
*/
|
||||||
fun tick() {
|
fun tick() {
|
||||||
map.editLoop { entity, parcel ->
|
map.editLoop { entity, parcel ->
|
||||||
if (entity.isDead || entity.isOnGround) {
|
if (entity.isDead) {
|
||||||
remove(); return@editLoop
|
remove(); return@editLoop
|
||||||
}
|
}
|
||||||
if (parcel.isPresentAnd { hasBlockVisitors }) {
|
if (parcel.isPresentAnd { hasBlockVisitors }) {
|
||||||
|
|||||||
@@ -83,9 +83,9 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
|
|||||||
* Prevents players from placing blocks outside of their parcels
|
* Prevents players from placing blocks outside of their parcels
|
||||||
*/
|
*/
|
||||||
@field:ListenerMarker(priority = NORMAL)
|
@field:ListenerMarker(priority = NORMAL)
|
||||||
val onBlockPlaceEvent = RegistratorListener<BlockBreakEvent> l@{ event ->
|
val onBlockPlaceEvent = RegistratorListener<BlockPlaceEvent> l@{ event ->
|
||||||
val (wo, ppa) = getWoAndPPa(event.block) ?: return@l
|
val (wo, ppa) = getWoAndPPa(event.block) ?: return@l
|
||||||
if (!event.player.hasBuildAnywhere && !ppa.isNullOr { !canBuild(event.player) }) {
|
if (!event.player.hasBuildAnywhere && ppa.isNullOr { !canBuild(event.player) }) {
|
||||||
event.isCancelled = true
|
event.isCancelled = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -184,66 +184,73 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
|
|||||||
}
|
}
|
||||||
|
|
||||||
when (event.action) {
|
when (event.action) {
|
||||||
Action.RIGHT_CLICK_BLOCK -> when (clickedBlock.type) {
|
Action.RIGHT_CLICK_BLOCK -> run {
|
||||||
REPEATER,
|
when (clickedBlock.type) {
|
||||||
COMPARATOR -> run {
|
REPEATER,
|
||||||
if (!parcel.canBuildN(user)) {
|
COMPARATOR -> run {
|
||||||
event.isCancelled = true; return@l
|
if (!parcel.canBuildN(user)) {
|
||||||
|
event.isCancelled = true; return@l
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
LEVER,
|
||||||
LEVER,
|
STONE_BUTTON,
|
||||||
STONE_BUTTON,
|
ANVIL,
|
||||||
ANVIL,
|
TRAPPED_CHEST,
|
||||||
TRAPPED_CHEST,
|
OAK_BUTTON, BIRCH_BUTTON, SPRUCE_BUTTON, JUNGLE_BUTTON, ACACIA_BUTTON, DARK_OAK_BUTTON,
|
||||||
OAK_BUTTON, BIRCH_BUTTON, SPRUCE_BUTTON, JUNGLE_BUTTON, ACACIA_BUTTON, DARK_OAK_BUTTON,
|
OAK_FENCE_GATE, BIRCH_FENCE_GATE, SPRUCE_FENCE_GATE, JUNGLE_FENCE_GATE, ACACIA_FENCE_GATE, DARK_OAK_FENCE_GATE,
|
||||||
OAK_FENCE_GATE, BIRCH_FENCE_GATE, SPRUCE_FENCE_GATE, JUNGLE_FENCE_GATE, ACACIA_FENCE_GATE, DARK_OAK_FENCE_GATE,
|
OAK_DOOR, BIRCH_DOOR, SPRUCE_DOOR, JUNGLE_DOOR, ACACIA_DOOR, DARK_OAK_DOOR,
|
||||||
OAK_DOOR, BIRCH_DOOR, SPRUCE_DOOR, JUNGLE_DOOR, ACACIA_DOOR, DARK_OAK_DOOR,
|
OAK_TRAPDOOR, BIRCH_TRAPDOOR, SPRUCE_TRAPDOOR, JUNGLE_TRAPDOOR, ACACIA_TRAPDOOR, DARK_OAK_TRAPDOOR
|
||||||
OAK_TRAPDOOR, BIRCH_TRAPDOOR, SPRUCE_TRAPDOOR, JUNGLE_TRAPDOOR, ACACIA_TRAPDOOR, DARK_OAK_TRAPDOOR
|
-> run {
|
||||||
-> run {
|
if (!user.hasBuildAnywhere && !parcel.isNullOr { canBuild(user) || allowInteractInputs }) {
|
||||||
if (!user.hasBuildAnywhere && !parcel.isNullOr { canBuild(user) || allowInteractInputs }) {
|
user.sendParcelMessage(nopermit = true, message = "You cannot use inputs in this parcel")
|
||||||
user.sendParcelMessage(nopermit = true, message = "You cannot use inputs in this parcel")
|
event.isCancelled = true; return@l
|
||||||
event.isCancelled = true; return@l
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
WHITE_BED, ORANGE_BED, MAGENTA_BED, LIGHT_BLUE_BED, YELLOW_BED, LIME_BED, PINK_BED, GRAY_BED, LIGHT_GRAY_BED, CYAN_BED, PURPLE_BED, BLUE_BED, BROWN_BED, GREEN_BED, RED_BED, BLACK_BED
|
WHITE_BED, ORANGE_BED, MAGENTA_BED, LIGHT_BLUE_BED, YELLOW_BED, LIME_BED, PINK_BED, GRAY_BED, LIGHT_GRAY_BED, CYAN_BED, PURPLE_BED, BLUE_BED, BROWN_BED, GREEN_BED, RED_BED, BLACK_BED
|
||||||
-> run {
|
-> run {
|
||||||
if (world.options.disableExplosions) {
|
if (world.options.disableExplosions) {
|
||||||
val bed = clickedBlock.blockData as Bed
|
val bed = clickedBlock.blockData as Bed
|
||||||
val head = if (bed == Bed.Part.FOOT) clickedBlock.getRelative(bed.facing) else clickedBlock
|
val head = if (bed == Bed.Part.FOOT) clickedBlock.getRelative(bed.facing) else clickedBlock
|
||||||
when (head.biome) {
|
when (head.biome) {
|
||||||
Biome.NETHER, Biome.THE_END -> run {
|
Biome.NETHER, Biome.THE_END -> run {
|
||||||
user.sendParcelMessage(nopermit = true, message = "You cannot use this bed because it would explode")
|
user.sendParcelMessage(nopermit = true, message = "You cannot use this bed because it would explode")
|
||||||
event.isCancelled = true; return@l
|
event.isCancelled = true; return@l
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
onPlayerInteractEvent_RightClick(event, world, parcel)
|
||||||
}
|
}
|
||||||
|
|
||||||
Action.RIGHT_CLICK_AIR -> if (event.hasItem()) {
|
Action.RIGHT_CLICK_AIR -> onPlayerInteractEvent_RightClick(event, world, parcel)
|
||||||
val item = event.item.type
|
|
||||||
if (world.options.blockedItems.contains(item)) {
|
|
||||||
user.sendParcelMessage(nopermit = true, message = "You cannot use this bed because it would explode")
|
|
||||||
event.isCancelled = true; return@l
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!parcel.canBuildN(user)) {
|
|
||||||
when (item) {
|
|
||||||
LAVA_BUCKET, WATER_BUCKET, BUCKET, FLINT_AND_STEEL -> event.isCancelled = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Action.PHYSICAL -> if (!user.hasBuildAnywhere && !parcel.isPresentAnd { canBuild(user) || allowInteractInputs }) {
|
Action.PHYSICAL -> if (!user.hasBuildAnywhere && !parcel.isPresentAnd { canBuild(user) || allowInteractInputs }) {
|
||||||
|
user.sendParcelMessage(nopermit = true, message = "You cannot use inputs in this parcel")
|
||||||
event.isCancelled = true; return@l
|
event.isCancelled = true; return@l
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Suppress("NON_EXHAUSTIVE_WHEN")
|
||||||
|
private fun onPlayerInteractEvent_RightClick(event: PlayerInteractEvent, world: ParcelWorld, parcel: Parcel?) {
|
||||||
|
if (event.hasItem()) {
|
||||||
|
val item = event.item.type
|
||||||
|
if (world.options.blockedItems.contains(item)) {
|
||||||
|
event.player.sendParcelMessage(nopermit = true, message = "You cannot use this item because it is disabled in this world")
|
||||||
|
event.isCancelled = true; return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!parcel.canBuildN(event.player)) {
|
||||||
|
when (item) {
|
||||||
|
LAVA_BUCKET, WATER_BUCKET, BUCKET, FLINT_AND_STEEL -> event.isCancelled = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Prevents players from breeding mobs, entering or opening boats/minecarts,
|
* Prevents players from breeding mobs, entering or opening boats/minecarts,
|
||||||
* rotating item frames, doing stuff with leashes, and putting stuff on armor stands.
|
* rotating item frames, doing stuff with leashes, and putting stuff on armor stands.
|
||||||
@@ -352,7 +359,7 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
|
|||||||
world.weatherDuration = Int.MAX_VALUE
|
world.weatherDuration = Int.MAX_VALUE
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: BlockFormEvent, BlockSpreadEvent, BlockFadeEvent
|
// TODO: BlockFormEvent, BlockSpreadEvent, BlockFadeEvent, Fireworks
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Prevents natural blocks forming
|
* Prevents natural blocks forming
|
||||||
@@ -370,10 +377,10 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
|
|||||||
val hasEntity = event is EntityBlockFormEvent
|
val hasEntity = event is EntityBlockFormEvent
|
||||||
val player = (event as? EntityBlockFormEvent)?.entity as? Player
|
val player = (event as? EntityBlockFormEvent)?.entity as? Player
|
||||||
|
|
||||||
val cancel: Boolean = when (block.type) {
|
val cancel: Boolean = when (event.newState.type) {
|
||||||
|
|
||||||
// prevent ice generation from Frost Walkers enchantment
|
// prevent ice generation from Frost Walkers enchantment
|
||||||
ICE -> player != null && !ppa.canBuild(player)
|
FROSTED_ICE -> player != null && !ppa.canBuild(player)
|
||||||
|
|
||||||
// prevent snow generation from weather
|
// prevent snow generation from weather
|
||||||
SNOW -> !hasEntity && wo.options.preventWeatherBlockChanges
|
SNOW -> !hasEntity && wo.options.preventWeatherBlockChanges
|
||||||
@@ -406,12 +413,13 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
|
|||||||
val onVehicleMoveEvent = RegistratorListener<VehicleMoveEvent> l@{ event ->
|
val onVehicleMoveEvent = RegistratorListener<VehicleMoveEvent> l@{ event ->
|
||||||
val (wo, ppa) = getWoAndPPa(event.to.block) ?: return@l
|
val (wo, ppa) = getWoAndPPa(event.to.block) ?: return@l
|
||||||
if (ppa == null) {
|
if (ppa == null) {
|
||||||
event.vehicle.eject()
|
|
||||||
event.vehicle.passengers.forEach {
|
event.vehicle.passengers.forEach {
|
||||||
if (it.type == EntityType.PLAYER) {
|
if (it.type == EntityType.PLAYER) {
|
||||||
(it as Player).sendParcelMessage(except = true, message = "Your ride ends here")
|
(it as Player).sendParcelMessage(except = true, message = "Your ride ends here")
|
||||||
} else it.remove()
|
} else it.remove()
|
||||||
}
|
}
|
||||||
|
event.vehicle.eject()
|
||||||
|
event.vehicle.remove()
|
||||||
} else if (ppa.hasBlockVisitors) {
|
} else if (ppa.hasBlockVisitors) {
|
||||||
event.to.subtract(event.to).add(event.from)
|
event.to.subtract(event.to).add(event.from)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import kotlin.reflect.KClass
|
|||||||
|
|
||||||
object GeneratorOptionsFactories : PolymorphicOptionsFactories<ParcelGenerator>("name", GeneratorOptions::class, DefaultGeneratorOptionsFactory())
|
object GeneratorOptionsFactories : PolymorphicOptionsFactories<ParcelGenerator>("name", GeneratorOptions::class, DefaultGeneratorOptionsFactory())
|
||||||
|
|
||||||
class GeneratorOptions(name: String, options: Any) : PolymorphicOptions<ParcelGenerator>(name, options, GeneratorOptionsFactories) {
|
class GeneratorOptions (name: String = "default", options: Any = DefaultGeneratorOptions()) : PolymorphicOptions<ParcelGenerator>(name, options, GeneratorOptionsFactories) {
|
||||||
fun newInstance(worldName: String) = factory.newInstance(key, options, worldName)
|
fun newInstance(worldName: String) = factory.newInstance(key, options, worldName)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,17 +1,21 @@
|
|||||||
package io.dico.parcels2.options
|
package io.dico.parcels2.options
|
||||||
|
|
||||||
import io.dico.parcels2.storage.migration.Migration
|
import io.dico.parcels2.storage.migration.Migration
|
||||||
|
import io.dico.parcels2.storage.migration.plotme.PlotmeMigration
|
||||||
import kotlin.reflect.KClass
|
import kotlin.reflect.KClass
|
||||||
|
|
||||||
object MigrationOptionsFactories : PolymorphicOptionsFactories<Migration>("kind", MigrationOptions::class, PlotmeMigrationFactory())
|
object MigrationOptionsFactories : PolymorphicOptionsFactories<Migration>("kind", MigrationOptions::class, PlotmeMigrationFactory())
|
||||||
|
|
||||||
class MigrationOptions(kind: String, options: Any) : SimplePolymorphicOptions<Migration>(kind, options, MigrationOptionsFactories)
|
class MigrationOptions(kind: String = "plotme-0.17", options: Any = PlotmeMigrationOptions()) : SimplePolymorphicOptions<Migration>(kind, options, MigrationOptionsFactories)
|
||||||
|
|
||||||
private class PlotmeMigrationFactory : PolymorphicOptionsFactory<Migration> {
|
private class PlotmeMigrationFactory : PolymorphicOptionsFactory<Migration> {
|
||||||
override val supportedKeys = listOf("plotme-0.17")
|
override val supportedKeys = listOf("plotme-0.17")
|
||||||
override val optionsClass: KClass<out Any> get() = TODO()
|
override val optionsClass: KClass<out Any> get() = PlotmeMigrationOptions::class
|
||||||
|
|
||||||
override fun newInstance(key: String, options: Any, vararg extra: Any?): Migration {
|
override fun newInstance(key: String, options: Any, vararg extra: Any?): Migration {
|
||||||
TODO()
|
return PlotmeMigration(options as PlotmeMigrationOptions)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class PlotmeMigrationOptions(val worldsFromTo: Map<String, String> = mapOf("plotworld" to "parcels"),
|
||||||
|
val storage: StorageOptions = StorageOptions(options = DataConnectionOptions(database = "plotme")))
|
||||||
@@ -10,14 +10,15 @@ import java.util.EnumSet
|
|||||||
class Options {
|
class Options {
|
||||||
var worlds: Map<String, WorldOptions> = hashMapOf()
|
var worlds: Map<String, WorldOptions> = hashMapOf()
|
||||||
private set
|
private set
|
||||||
var storage: StorageOptions = StorageOptions("postgresql", DataConnectionOptions())
|
var storage: StorageOptions = StorageOptions()
|
||||||
var tickWorktime: TickWorktimeOptions = TickWorktimeOptions(20, 1)
|
var tickWorktime: TickWorktimeOptions = TickWorktimeOptions(20, 1)
|
||||||
|
var migration = MigrationOptionsHolder()
|
||||||
|
|
||||||
fun addWorld(name: String,
|
fun addWorld(name: String,
|
||||||
generatorOptions: GeneratorOptions? = null,
|
generatorOptions: GeneratorOptions? = null,
|
||||||
worldOptions: RuntimeWorldOptions? = null) {
|
worldOptions: RuntimeWorldOptions? = null) {
|
||||||
val optionsHolder = WorldOptions(
|
val optionsHolder = WorldOptions(
|
||||||
generatorOptions ?: GeneratorOptions("default", DefaultGeneratorOptions()),
|
generatorOptions ?: GeneratorOptions(),
|
||||||
worldOptions ?: RuntimeWorldOptions()
|
worldOptions ?: RuntimeWorldOptions()
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -49,3 +50,9 @@ class RuntimeWorldOptions(var gameMode: GameMode? = GameMode.CREATIVE,
|
|||||||
var axisLimit: Int = 10)
|
var axisLimit: Int = 10)
|
||||||
|
|
||||||
class DataFileOptions(val location: String = "/flatfile-storage/")
|
class DataFileOptions(val location: String = "/flatfile-storage/")
|
||||||
|
|
||||||
|
class MigrationOptionsHolder {
|
||||||
|
var enabled = false
|
||||||
|
var disableWhenComplete = true
|
||||||
|
var instance: MigrationOptions? = MigrationOptions()
|
||||||
|
}
|
||||||
@@ -3,22 +3,36 @@ package io.dico.parcels2.options
|
|||||||
import com.zaxxer.hikari.HikariDataSource
|
import com.zaxxer.hikari.HikariDataSource
|
||||||
import io.dico.parcels2.logger
|
import io.dico.parcels2.logger
|
||||||
import io.dico.parcels2.storage.Storage
|
import io.dico.parcels2.storage.Storage
|
||||||
import io.dico.parcels2.storage.StorageWithCoroutineBacking
|
import io.dico.parcels2.storage.BackedStorage
|
||||||
import io.dico.parcels2.storage.exposed.ExposedBacking
|
import io.dico.parcels2.storage.exposed.ExposedBacking
|
||||||
import io.dico.parcels2.storage.getHikariConfig
|
import io.dico.parcels2.storage.getHikariConfig
|
||||||
|
import javax.sql.DataSource
|
||||||
|
|
||||||
object StorageOptionsFactories : PolymorphicOptionsFactories<Storage>("dialect", StorageOptions::class, ConnectionStorageFactory())
|
object StorageOptionsFactories : PolymorphicOptionsFactories<Storage>("dialect", StorageOptions::class, ConnectionStorageFactory())
|
||||||
|
|
||||||
class StorageOptions(dialect: String, options: Any) : SimplePolymorphicOptions<Storage>(dialect, options, StorageOptionsFactories)
|
class StorageOptions(dialect: String = "mariadb", options: Any = DataConnectionOptions()) : SimplePolymorphicOptions<Storage>(dialect, options, StorageOptionsFactories) {
|
||||||
|
|
||||||
|
fun getDataSourceFactory(): DataSourceFactory? {
|
||||||
|
return when (factory) {
|
||||||
|
is ConnectionStorageFactory -> factory.getDataSourceFactory(key, options)
|
||||||
|
else -> return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
typealias DataSourceFactory = () -> DataSource
|
||||||
|
|
||||||
private class ConnectionStorageFactory : PolymorphicOptionsFactory<Storage> {
|
private class ConnectionStorageFactory : PolymorphicOptionsFactory<Storage> {
|
||||||
override val optionsClass = DataConnectionOptions::class
|
override val optionsClass = DataConnectionOptions::class
|
||||||
override val supportedKeys: List<String> = listOf("postgresql", "mariadb")
|
override val supportedKeys: List<String> = listOf("postgresql", "mariadb")
|
||||||
|
|
||||||
override fun newInstance(key: String, options: Any, vararg extra: Any?): Storage {
|
fun getDataSourceFactory(key: String, options: Any): DataSourceFactory {
|
||||||
val hikariConfig = getHikariConfig(key, options as DataConnectionOptions)
|
val hikariConfig = getHikariConfig(key, options as DataConnectionOptions)
|
||||||
val dataSourceFactory = suspend { HikariDataSource(hikariConfig) }
|
return { HikariDataSource(hikariConfig) }
|
||||||
return StorageWithCoroutineBacking(ExposedBacking(dataSourceFactory))
|
}
|
||||||
|
|
||||||
|
override fun newInstance(key: String, options: Any, vararg extra: Any?): Storage {
|
||||||
|
return BackedStorage(ExposedBacking(getDataSourceFactory(key, options), (options as DataConnectionOptions).poolSize))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,12 @@
|
|||||||
package io.dico.parcels2.storage
|
package io.dico.parcels2.storage
|
||||||
|
|
||||||
import io.dico.parcels2.*
|
import io.dico.parcels2.*
|
||||||
|
import kotlinx.coroutines.experimental.CoroutineDispatcher
|
||||||
|
import kotlinx.coroutines.experimental.CoroutineScope
|
||||||
|
import kotlinx.coroutines.experimental.Deferred
|
||||||
|
import kotlinx.coroutines.experimental.Job
|
||||||
|
import kotlinx.coroutines.experimental.channels.ProducerScope
|
||||||
|
import kotlinx.coroutines.experimental.channels.ReceiveChannel
|
||||||
import kotlinx.coroutines.experimental.channels.SendChannel
|
import kotlinx.coroutines.experimental.channels.SendChannel
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
@@ -10,41 +16,58 @@ interface Backing {
|
|||||||
|
|
||||||
val isConnected: Boolean
|
val isConnected: Boolean
|
||||||
|
|
||||||
suspend fun init()
|
fun launchJob(job: Backing.() -> Unit): Job
|
||||||
|
|
||||||
suspend fun shutdown()
|
fun <T> launchFuture(future: Backing.() -> T): Deferred<T>
|
||||||
|
|
||||||
|
fun <T> openChannel(future: Backing.(SendChannel<T>) -> Unit): ReceiveChannel<T>
|
||||||
|
|
||||||
|
|
||||||
|
fun init()
|
||||||
|
|
||||||
|
fun shutdown()
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This producer function is capable of constantly reading parcels from a potentially infinite sequence,
|
* This producer function is capable of constantly reading parcels from a potentially infinite sequence,
|
||||||
* and provide parcel data for it as read from the database.
|
* and provide parcel data for it as read from the database.
|
||||||
*/
|
*/
|
||||||
suspend fun produceParcelData(channel: SendChannel<DataPair>, parcels: Sequence<ParcelId>)
|
fun produceParcelData(channel: SendChannel<DataPair>, parcels: Sequence<ParcelId>)
|
||||||
|
|
||||||
suspend fun produceAllParcelData(channel: SendChannel<DataPair>)
|
fun produceAllParcelData(channel: SendChannel<DataPair>)
|
||||||
|
|
||||||
suspend fun readParcelData(parcel: ParcelId): ParcelData?
|
fun readParcelData(parcel: ParcelId): ParcelData?
|
||||||
|
|
||||||
suspend fun getOwnedParcels(user: ParcelOwner): List<ParcelId>
|
fun getOwnedParcels(user: ParcelOwner): List<ParcelId>
|
||||||
|
|
||||||
suspend fun getNumParcels(user: ParcelOwner): Int = getOwnedParcels(user).size
|
fun getNumParcels(user: ParcelOwner): Int = getOwnedParcels(user).size
|
||||||
|
|
||||||
|
|
||||||
suspend fun setParcelData(parcel: ParcelId, data: ParcelData?)
|
fun setParcelData(parcel: ParcelId, data: ParcelData?)
|
||||||
|
|
||||||
suspend fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?)
|
fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?)
|
||||||
|
|
||||||
suspend fun setLocalPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus)
|
fun setLocalPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus)
|
||||||
|
|
||||||
suspend fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean)
|
fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean)
|
||||||
|
|
||||||
suspend fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean)
|
fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean)
|
||||||
|
|
||||||
|
|
||||||
suspend fun produceAllGlobalAddedData(channel: SendChannel<AddedDataPair<ParcelOwner>>)
|
fun produceAllGlobalAddedData(channel: SendChannel<AddedDataPair<ParcelOwner>>)
|
||||||
|
|
||||||
suspend fun readGlobalAddedData(owner: ParcelOwner): MutableAddedDataMap
|
fun readGlobalAddedData(owner: ParcelOwner): MutableAddedDataMap
|
||||||
|
|
||||||
suspend fun setGlobalPlayerStatus(owner: ParcelOwner, player: UUID, status: AddedStatus)
|
fun setGlobalPlayerStatus(owner: ParcelOwner, player: UUID, status: AddedStatus)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
abstract class AbstractBacking(val dispatcher: CoroutineDispatcher) {
|
||||||
|
|
||||||
|
fun launchJob(job: Backing.() -> Unit): Job
|
||||||
|
|
||||||
|
fun <T> launchFuture(future: Backing.() -> T): Deferred<T>
|
||||||
|
|
||||||
|
fun <T> openChannel(future: Backing.(SendChannel<T>) -> Unit): ReceiveChannel<T>
|
||||||
|
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,21 +3,16 @@
|
|||||||
package io.dico.parcels2.storage
|
package io.dico.parcels2.storage
|
||||||
|
|
||||||
import io.dico.parcels2.*
|
import io.dico.parcels2.*
|
||||||
import kotlinx.coroutines.experimental.*
|
import kotlinx.coroutines.experimental.Deferred
|
||||||
import kotlinx.coroutines.experimental.channels.ProducerScope
|
import kotlinx.coroutines.experimental.Job
|
||||||
import kotlinx.coroutines.experimental.channels.ReceiveChannel
|
import kotlinx.coroutines.experimental.channels.ReceiveChannel
|
||||||
import kotlinx.coroutines.experimental.channels.produce
|
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import java.util.concurrent.Executor
|
|
||||||
import java.util.concurrent.Executors
|
|
||||||
|
|
||||||
typealias DataPair = Pair<ParcelId, ParcelData?>
|
typealias DataPair = Pair<ParcelId, ParcelData?>
|
||||||
typealias AddedDataPair<TAttach> = Pair<TAttach, MutableAddedDataMap>
|
typealias AddedDataPair<TAttach> = Pair<TAttach, MutableAddedDataMap>
|
||||||
|
|
||||||
interface Storage {
|
interface Storage {
|
||||||
val name: String
|
val name: String
|
||||||
val syncDispatcher: CoroutineDispatcher
|
|
||||||
val asyncDispatcher: CoroutineDispatcher
|
|
||||||
val isConnected: Boolean
|
val isConnected: Boolean
|
||||||
|
|
||||||
fun init(): Job
|
fun init(): Job
|
||||||
@@ -54,55 +49,39 @@ interface Storage {
|
|||||||
fun setGlobalAddedStatus(owner: ParcelOwner, player: UUID, status: AddedStatus): Job
|
fun setGlobalAddedStatus(owner: ParcelOwner, player: UUID, status: AddedStatus): Job
|
||||||
}
|
}
|
||||||
|
|
||||||
class StorageWithCoroutineBacking internal constructor(val backing: Backing) : Storage {
|
class BackedStorage internal constructor(val b: Backing) : Storage {
|
||||||
override val name get() = backing.name
|
override val name get() = b.name
|
||||||
override val syncDispatcher = Executor { it.run() }.asCoroutineDispatcher()
|
override val isConnected get() = b.isConnected
|
||||||
val poolSize: Int get() = 4
|
|
||||||
override val asyncDispatcher = Executors.newFixedThreadPool(poolSize) { Thread(it, "Parcels2_StorageThread") }.asCoroutineDispatcher()
|
|
||||||
override val isConnected get() = backing.isConnected
|
|
||||||
val channelCapacity = 16
|
|
||||||
|
|
||||||
private inline fun <T> defer(noinline block: suspend CoroutineScope.() -> T): Deferred<T> {
|
override fun init() = b.launchJob { init() }
|
||||||
return async(context = asyncDispatcher, start = CoroutineStart.ATOMIC, block = block)
|
|
||||||
}
|
|
||||||
|
|
||||||
private inline fun job(noinline block: suspend CoroutineScope.() -> Unit): Job {
|
override fun shutdown() = b.launchJob { shutdown() }
|
||||||
return launch(context = asyncDispatcher, start = CoroutineStart.ATOMIC, block = block)
|
|
||||||
}
|
|
||||||
|
|
||||||
private inline fun <T> openChannel(noinline block: suspend ProducerScope<T>.() -> Unit): ReceiveChannel<T> {
|
|
||||||
return produce(asyncDispatcher, capacity = channelCapacity, block = block)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun init() = job { backing.init() }
|
|
||||||
|
|
||||||
override fun shutdown() = job { backing.shutdown() }
|
|
||||||
|
|
||||||
|
|
||||||
override fun readParcelData(parcel: ParcelId) = defer { backing.readParcelData(parcel) }
|
override fun readParcelData(parcel: ParcelId) = b.launchFuture { readParcelData(parcel) }
|
||||||
|
|
||||||
override fun readParcelData(parcels: Sequence<ParcelId>) = openChannel<DataPair> { backing.produceParcelData(channel, parcels) }
|
override fun readParcelData(parcels: Sequence<ParcelId>) = b.openChannel<DataPair> { produceParcelData(it, parcels) }
|
||||||
|
|
||||||
override fun readAllParcelData() = openChannel<DataPair> { backing.produceAllParcelData(channel) }
|
override fun readAllParcelData() = b.openChannel<DataPair> { produceAllParcelData(it) }
|
||||||
|
|
||||||
override fun getOwnedParcels(user: ParcelOwner) = defer { backing.getOwnedParcels(user) }
|
override fun getOwnedParcels(user: ParcelOwner) = b.launchFuture { getOwnedParcels(user) }
|
||||||
|
|
||||||
override fun getNumParcels(user: ParcelOwner) = defer { backing.getNumParcels(user) }
|
override fun getNumParcels(user: ParcelOwner) = b.launchFuture { getNumParcels(user) }
|
||||||
|
|
||||||
override fun setParcelData(parcel: ParcelId, data: ParcelData?) = job { backing.setParcelData(parcel, data) }
|
override fun setParcelData(parcel: ParcelId, data: ParcelData?) = b.launchJob { setParcelData(parcel, data) }
|
||||||
|
|
||||||
override fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?) = job { backing.setParcelOwner(parcel, owner) }
|
override fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?) = b.launchJob { setParcelOwner(parcel, owner) }
|
||||||
|
|
||||||
override fun setParcelPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus) = job { backing.setLocalPlayerStatus(parcel, player, status) }
|
override fun setParcelPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus) = b.launchJob { setLocalPlayerStatus(parcel, player, status) }
|
||||||
|
|
||||||
override fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean) = job { backing.setParcelAllowsInteractInventory(parcel, value) }
|
override fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean) = b.launchJob { setParcelAllowsInteractInventory(parcel, value) }
|
||||||
|
|
||||||
override fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean) = job { backing.setParcelAllowsInteractInputs(parcel, value) }
|
override fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean) = b.launchJob { setParcelAllowsInteractInputs(parcel, value) }
|
||||||
|
|
||||||
|
|
||||||
override fun readAllGlobalAddedData(): ReceiveChannel<AddedDataPair<ParcelOwner>> = openChannel { backing.produceAllGlobalAddedData(channel) }
|
override fun readAllGlobalAddedData(): ReceiveChannel<AddedDataPair<ParcelOwner>> = b.openChannel { produceAllGlobalAddedData(it) }
|
||||||
|
|
||||||
override fun readGlobalAddedData(owner: ParcelOwner): Deferred<MutableAddedDataMap?> = defer { backing.readGlobalAddedData(owner) }
|
override fun readGlobalAddedData(owner: ParcelOwner): Deferred<MutableAddedDataMap?> = b.launchFuture { readGlobalAddedData(owner) }
|
||||||
|
|
||||||
override fun setGlobalAddedStatus(owner: ParcelOwner, player: UUID, status: AddedStatus) = job { backing.setGlobalPlayerStatus(owner, player, status) }
|
override fun setGlobalAddedStatus(owner: ParcelOwner, player: UUID, status: AddedStatus) = b.launchJob { setGlobalPlayerStatus(owner, player, status) }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,118 @@
|
|||||||
|
package io.dico.parcels2.storage.exposed
|
||||||
|
|
||||||
|
import kotlinx.coroutines.experimental.*
|
||||||
|
import org.jetbrains.exposed.sql.*
|
||||||
|
import org.jetbrains.exposed.sql.statements.StatementContext
|
||||||
|
import org.jetbrains.exposed.sql.statements.StatementInterceptor
|
||||||
|
import org.jetbrains.exposed.sql.statements.expandArgs
|
||||||
|
import org.jetbrains.exposed.sql.transactions.*
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.sql.Connection
|
||||||
|
import kotlin.coroutines.experimental.CoroutineContext
|
||||||
|
|
||||||
|
fun <T> ctransaction(db: Database? = null, statement: suspend Transaction.() -> T): T {
|
||||||
|
return ctransaction(TransactionManager.manager.defaultIsolationLevel, 3, db, statement)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun <T> ctransaction(transactionIsolation: Int, repetitionAttempts: Int, db: Database? = null, statement: suspend Transaction.() -> T): T {
|
||||||
|
return transaction(transactionIsolation, repetitionAttempts, db) {
|
||||||
|
if (this !is CoroutineTransaction) throw IllegalStateException("ctransaction requires CoroutineTransactionManager.")
|
||||||
|
|
||||||
|
val job = async(context = manager.context, start = CoroutineStart.UNDISPATCHED) {
|
||||||
|
this@transaction.statement()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (job.isActive) {
|
||||||
|
runBlocking(context = Unconfined) {
|
||||||
|
job.join()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
job.getCompleted()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class CoroutineTransactionManager(private val db: Database,
|
||||||
|
dispatcher: CoroutineDispatcher,
|
||||||
|
override var defaultIsolationLevel: Int = DEFAULT_ISOLATION_LEVEL) : TransactionManager {
|
||||||
|
val context: CoroutineDispatcher = TransactionCoroutineDispatcher(dispatcher)
|
||||||
|
private val transaction = ThreadLocal<CoroutineTransaction?>()
|
||||||
|
|
||||||
|
override fun currentOrNull(): Transaction? {
|
||||||
|
|
||||||
|
|
||||||
|
return transaction.get()
|
||||||
|
?: null
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun newTransaction(isolation: Int): Transaction {
|
||||||
|
return CoroutineTransaction(this, CoroutineTransactionInterface(db, isolation, transaction)).also { transaction.set(it) }
|
||||||
|
}
|
||||||
|
|
||||||
|
private inner class TransactionCoroutineDispatcher(val delegate: CoroutineDispatcher) : CoroutineDispatcher() {
|
||||||
|
|
||||||
|
// When the thread changes, move the transaction to the new thread
|
||||||
|
override fun dispatch(context: CoroutineContext, block: Runnable) {
|
||||||
|
val existing = transaction.get()
|
||||||
|
|
||||||
|
val newContext: CoroutineContext
|
||||||
|
if (existing != null) {
|
||||||
|
transaction.set(null)
|
||||||
|
newContext = context // + existing
|
||||||
|
} else {
|
||||||
|
newContext = context
|
||||||
|
}
|
||||||
|
|
||||||
|
delegate.dispatch(newContext, Runnable {
|
||||||
|
if (existing != null) {
|
||||||
|
transaction.set(existing)
|
||||||
|
}
|
||||||
|
|
||||||
|
block.run()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private class CoroutineTransaction(val manager: CoroutineTransactionManager,
|
||||||
|
itf: CoroutineTransactionInterface) : Transaction(itf), CoroutineContext.Element {
|
||||||
|
companion object Key : CoroutineContext.Key<CoroutineTransaction>
|
||||||
|
|
||||||
|
override val key: CoroutineContext.Key<CoroutineTransaction> = Key
|
||||||
|
}
|
||||||
|
|
||||||
|
private class CoroutineTransactionInterface(override val db: Database, isolation: Int, val threadLocal: ThreadLocal<CoroutineTransaction?>) : TransactionInterface {
|
||||||
|
private val connectionLazy = lazy(LazyThreadSafetyMode.NONE) {
|
||||||
|
db.connector().apply {
|
||||||
|
autoCommit = false
|
||||||
|
transactionIsolation = isolation
|
||||||
|
}
|
||||||
|
}
|
||||||
|
override val connection: Connection
|
||||||
|
get() = connectionLazy.value
|
||||||
|
|
||||||
|
override val outerTransaction: CoroutineTransaction? = threadLocal.get()
|
||||||
|
|
||||||
|
override fun commit() {
|
||||||
|
if (connectionLazy.isInitialized())
|
||||||
|
connection.commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun rollback() {
|
||||||
|
if (connectionLazy.isInitialized() && !connection.isClosed) {
|
||||||
|
connection.rollback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
try {
|
||||||
|
if (connectionLazy.isInitialized()) connection.close()
|
||||||
|
} finally {
|
||||||
|
threadLocal.set(outerTransaction)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
@file:Suppress("NOTHING_TO_INLINE", "PARAMETER_NAME_CHANGED_ON_OVERRIDE", "LocalVariableName")
|
@file:Suppress("NOTHING_TO_INLINE", "PARAMETER_NAME_CHANGED_ON_OVERRIDE", "LocalVariableName", "UNUSED_EXPRESSION")
|
||||||
|
|
||||||
package io.dico.parcels2.storage.exposed
|
package io.dico.parcels2.storage.exposed
|
||||||
|
|
||||||
@@ -7,10 +7,10 @@ import io.dico.parcels2.*
|
|||||||
import io.dico.parcels2.storage.Backing
|
import io.dico.parcels2.storage.Backing
|
||||||
import io.dico.parcels2.storage.DataPair
|
import io.dico.parcels2.storage.DataPair
|
||||||
import io.dico.parcels2.util.toUUID
|
import io.dico.parcels2.util.toUUID
|
||||||
import kotlinx.coroutines.experimental.CoroutineStart
|
import kotlinx.coroutines.experimental.*
|
||||||
import kotlinx.coroutines.experimental.Unconfined
|
import kotlinx.coroutines.experimental.channels.LinkedListChannel
|
||||||
|
import kotlinx.coroutines.experimental.channels.ReceiveChannel
|
||||||
import kotlinx.coroutines.experimental.channels.SendChannel
|
import kotlinx.coroutines.experimental.channels.SendChannel
|
||||||
import kotlinx.coroutines.experimental.launch
|
|
||||||
import org.jetbrains.exposed.sql.*
|
import org.jetbrains.exposed.sql.*
|
||||||
import org.jetbrains.exposed.sql.SchemaUtils.create
|
import org.jetbrains.exposed.sql.SchemaUtils.create
|
||||||
import org.jetbrains.exposed.sql.transactions.transaction
|
import org.jetbrains.exposed.sql.transactions.transaction
|
||||||
@@ -21,14 +21,27 @@ import javax.sql.DataSource
|
|||||||
|
|
||||||
class ExposedDatabaseException(message: String? = null) : Exception(message)
|
class ExposedDatabaseException(message: String? = null) : Exception(message)
|
||||||
|
|
||||||
class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) : Backing {
|
class ExposedBacking(private val dataSourceFactory: () -> DataSource,
|
||||||
|
private val poolSize: Int) : Backing {
|
||||||
override val name get() = "Exposed"
|
override val name get() = "Exposed"
|
||||||
|
val dispatcher: CoroutineDispatcher = newFixedThreadPoolContext(4, "Parcels StorageThread")
|
||||||
|
|
||||||
private var dataSource: DataSource? = null
|
private var dataSource: DataSource? = null
|
||||||
private var database: Database? = null
|
private var database: Database? = null
|
||||||
private var isShutdown: Boolean = false
|
private var isShutdown: Boolean = false
|
||||||
|
|
||||||
override val isConnected get() = database != null
|
override val isConnected get() = database != null
|
||||||
|
|
||||||
|
override fun launchJob(job: Backing.() -> Unit): Job = launch(dispatcher) { transaction { job() } }
|
||||||
|
override fun <T> launchFuture(future: Backing.() -> T): Deferred<T> = async(dispatcher) { transaction { future() } }
|
||||||
|
|
||||||
|
override fun <T> openChannel(future: Backing.(SendChannel<T>) -> Unit): ReceiveChannel<T> {
|
||||||
|
val channel = LinkedListChannel<T>()
|
||||||
|
launchJob { future(channel) }
|
||||||
|
return channel
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun <T> transaction(statement: Transaction.() -> T) = transaction(database!!, statement)
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
init {
|
init {
|
||||||
Database.registerDialect("mariadb") {
|
Database.registerDialect("mariadb") {
|
||||||
@@ -37,24 +50,17 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun <T> transaction(statement: Transaction.() -> T) = transaction(database!!, statement)
|
override fun init() {
|
||||||
|
if (isShutdown || isConnected) throw IllegalStateException()
|
||||||
|
|
||||||
private suspend fun transactionLaunch(statement: suspend Transaction.() -> Unit): Unit = transaction(database!!) {
|
|
||||||
launch(context = Unconfined, start = CoroutineStart.UNDISPATCHED) {
|
|
||||||
statement(this@transaction)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override suspend fun init() {
|
|
||||||
if (isShutdown) throw IllegalStateException()
|
|
||||||
dataSource = dataSourceFactory()
|
dataSource = dataSourceFactory()
|
||||||
database = Database.connect(dataSource!!)
|
database = Database.connect(dataSource!!)
|
||||||
transaction(database) {
|
transaction(database!!) {
|
||||||
create(WorldsT, OwnersT, ParcelsT, ParcelOptionsT, AddedLocalT, AddedGlobalT)
|
create(WorldsT, OwnersT, ParcelsT, ParcelOptionsT, AddedLocalT, AddedGlobalT)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun shutdown() {
|
override fun shutdown() {
|
||||||
if (isShutdown) throw IllegalStateException()
|
if (isShutdown) throw IllegalStateException()
|
||||||
dataSource?.let {
|
dataSource?.let {
|
||||||
(it as? HikariDataSource)?.close()
|
(it as? HikariDataSource)?.close()
|
||||||
@@ -63,15 +69,15 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
|
|||||||
isShutdown = true
|
isShutdown = true
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun produceParcelData(channel: SendChannel<DataPair>, parcels: Sequence<ParcelId>) {
|
override fun produceParcelData(channel: SendChannel<DataPair>, parcels: Sequence<ParcelId>) {
|
||||||
for (parcel in parcels) {
|
for (parcel in parcels) {
|
||||||
val data = readParcelData(parcel)
|
val data = readParcelData(parcel)
|
||||||
channel.send(parcel to data)
|
channel.offer(parcel to data)
|
||||||
}
|
}
|
||||||
channel.close()
|
channel.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun produceAllParcelData(channel: SendChannel<Pair<ParcelId, ParcelData?>>) = transactionLaunch {
|
override fun produceAllParcelData(channel: SendChannel<Pair<ParcelId, ParcelData?>>) = ctransaction<Unit> {
|
||||||
ParcelsT.selectAll().forEach { row ->
|
ParcelsT.selectAll().forEach { row ->
|
||||||
val parcel = ParcelsT.getId(row) ?: return@forEach
|
val parcel = ParcelsT.getId(row) ?: return@forEach
|
||||||
val data = rowToParcelData(row)
|
val data = rowToParcelData(row)
|
||||||
@@ -80,12 +86,12 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
|
|||||||
channel.close()
|
channel.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun readParcelData(parcel: ParcelId): ParcelData? = transaction {
|
override fun readParcelData(parcel: ParcelId): ParcelData? = transaction {
|
||||||
val row = ParcelsT.getRow(parcel) ?: return@transaction null
|
val row = ParcelsT.getRow(parcel) ?: return@transaction null
|
||||||
rowToParcelData(row)
|
rowToParcelData(row)
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun getOwnedParcels(user: ParcelOwner): List<ParcelId> = transaction {
|
override fun getOwnedParcels(user: ParcelOwner): List<ParcelId> = transaction {
|
||||||
val user_id = OwnersT.getId(user) ?: return@transaction emptyList()
|
val user_id = OwnersT.getId(user) ?: return@transaction emptyList()
|
||||||
ParcelsT.select { ParcelsT.owner_id eq user_id }
|
ParcelsT.select { ParcelsT.owner_id eq user_id }
|
||||||
.orderBy(ParcelsT.claim_time, isAsc = true)
|
.orderBy(ParcelsT.claim_time, isAsc = true)
|
||||||
@@ -93,7 +99,7 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
|
|||||||
.toList()
|
.toList()
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun setParcelData(parcel: ParcelId, data: ParcelData?) {
|
override fun setParcelData(parcel: ParcelId, data: ParcelData?) {
|
||||||
if (data == null) {
|
if (data == null) {
|
||||||
transaction {
|
transaction {
|
||||||
ParcelsT.getId(parcel)?.let { id ->
|
ParcelsT.getId(parcel)?.let { id ->
|
||||||
@@ -125,7 +131,7 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
|
|||||||
setParcelAllowsInteractInventory(parcel, data.allowInteractInventory)
|
setParcelAllowsInteractInventory(parcel, data.allowInteractInventory)
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?) = transaction {
|
override fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?) = transaction {
|
||||||
val id = if (owner == null)
|
val id = if (owner == null)
|
||||||
ParcelsT.getId(parcel) ?: return@transaction
|
ParcelsT.getId(parcel) ?: return@transaction
|
||||||
else
|
else
|
||||||
@@ -140,11 +146,11 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun setLocalPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus) = transaction {
|
override fun setLocalPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus) = transaction {
|
||||||
AddedLocalT.setPlayerStatus(parcel, player, status)
|
AddedLocalT.setPlayerStatus(parcel, player, status)
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean): Unit = transaction {
|
override fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean): Unit = transaction {
|
||||||
val id = ParcelsT.getOrInitId(parcel)
|
val id = ParcelsT.getOrInitId(parcel)
|
||||||
ParcelOptionsT.upsert(ParcelOptionsT.parcel_id) {
|
ParcelOptionsT.upsert(ParcelOptionsT.parcel_id) {
|
||||||
it[ParcelOptionsT.parcel_id] = id
|
it[ParcelOptionsT.parcel_id] = id
|
||||||
@@ -152,7 +158,7 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean): Unit = transaction {
|
override fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean): Unit = transaction {
|
||||||
val id = ParcelsT.getOrInitId(parcel)
|
val id = ParcelsT.getOrInitId(parcel)
|
||||||
ParcelOptionsT.upsert(ParcelOptionsT.parcel_id) {
|
ParcelOptionsT.upsert(ParcelOptionsT.parcel_id) {
|
||||||
it[ParcelOptionsT.parcel_id] = id
|
it[ParcelOptionsT.parcel_id] = id
|
||||||
@@ -160,16 +166,16 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun produceAllGlobalAddedData(channel: SendChannel<Pair<ParcelOwner, MutableMap<UUID, AddedStatus>>>) = transactionLaunch {
|
override fun produceAllGlobalAddedData(channel: SendChannel<Pair<ParcelOwner, MutableMap<UUID, AddedStatus>>>) = ctransaction<Unit> {
|
||||||
AddedGlobalT.sendAllAddedData(channel)
|
AddedGlobalT.sendAllAddedData(channel)
|
||||||
channel.close()
|
channel.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun readGlobalAddedData(owner: ParcelOwner): MutableMap<UUID, AddedStatus> = transaction {
|
override fun readGlobalAddedData(owner: ParcelOwner): MutableMap<UUID, AddedStatus> = transaction {
|
||||||
return@transaction AddedGlobalT.readAddedData(OwnersT.getId(owner) ?: return@transaction hashMapOf())
|
return@transaction AddedGlobalT.readAddedData(OwnersT.getId(owner) ?: return@transaction hashMapOf())
|
||||||
}
|
}
|
||||||
|
|
||||||
override suspend fun setGlobalPlayerStatus(owner: ParcelOwner, player: UUID, status: AddedStatus) = transaction {
|
override fun setGlobalPlayerStatus(owner: ParcelOwner, player: UUID, status: AddedStatus) = transaction {
|
||||||
AddedGlobalT.setPlayerStatus(owner, player, status)
|
AddedGlobalT.setPlayerStatus(owner, player, status)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import org.jetbrains.exposed.sql.Table
|
|||||||
import org.jetbrains.exposed.sql.Transaction
|
import org.jetbrains.exposed.sql.Transaction
|
||||||
import org.jetbrains.exposed.sql.statements.InsertStatement
|
import org.jetbrains.exposed.sql.statements.InsertStatement
|
||||||
import org.jetbrains.exposed.sql.transactions.TransactionManager
|
import org.jetbrains.exposed.sql.transactions.TransactionManager
|
||||||
|
import org.jetbrains.exposed.sql.transactions.transaction
|
||||||
|
|
||||||
class UpsertStatement<Key : Any>(table: Table, conflictColumn: Column<*>? = null, conflictIndex: Index? = null)
|
class UpsertStatement<Key : Any>(table: Table, conflictColumn: Column<*>? = null, conflictIndex: Index? = null)
|
||||||
: InsertStatement<Key>(table, false) {
|
: InsertStatement<Key>(table, false) {
|
||||||
@@ -61,3 +62,4 @@ fun Table.indexR(customIndexName: String? = null, isUnique: Boolean = false, var
|
|||||||
}
|
}
|
||||||
|
|
||||||
fun Table.uniqueIndexR(customIndexName: String? = null, vararg columns: Column<*>): Index = indexR(customIndexName, true, *columns)
|
fun Table.uniqueIndexR(customIndexName: String? = null, vararg columns: Column<*>): Index = indexR(customIndexName, true, *columns)
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
package io.dico.parcels2.storage.migration
|
package io.dico.parcels2.storage.migration
|
||||||
|
|
||||||
import io.dico.parcels2.storage.Storage
|
import io.dico.parcels2.storage.Storage
|
||||||
|
import kotlinx.coroutines.experimental.Job
|
||||||
|
|
||||||
interface Migration {
|
interface Migration {
|
||||||
fun migrateTo(storage: Storage)
|
fun migrateTo(storage: Storage): Job
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,50 +1,50 @@
|
|||||||
|
@file:Suppress("RedundantSuspendModifier", "DEPRECATION")
|
||||||
|
|
||||||
package io.dico.parcels2.storage.migration.plotme
|
package io.dico.parcels2.storage.migration.plotme
|
||||||
|
|
||||||
import com.zaxxer.hikari.HikariDataSource
|
import com.zaxxer.hikari.HikariDataSource
|
||||||
import io.dico.parcels2.*
|
import io.dico.parcels2.*
|
||||||
|
import io.dico.parcels2.options.PlotmeMigrationOptions
|
||||||
import io.dico.parcels2.storage.Storage
|
import io.dico.parcels2.storage.Storage
|
||||||
import io.dico.parcels2.storage.migration.Migration
|
import io.dico.parcels2.storage.migration.Migration
|
||||||
import io.dico.parcels2.util.Vec2i
|
import io.dico.parcels2.util.Vec2i
|
||||||
import io.dico.parcels2.util.isValid
|
import io.dico.parcels2.util.isValid
|
||||||
import io.dico.parcels2.util.toUUID
|
import io.dico.parcels2.util.toUUID
|
||||||
import io.dico.parcels2.util.uuid
|
import io.dico.parcels2.util.uuid
|
||||||
import kotlinx.coroutines.experimental.asCoroutineDispatcher
|
import kotlinx.coroutines.experimental.*
|
||||||
import kotlinx.coroutines.experimental.launch
|
|
||||||
import org.bukkit.Bukkit
|
import org.bukkit.Bukkit
|
||||||
import org.jetbrains.exposed.sql.*
|
import org.jetbrains.exposed.sql.*
|
||||||
import org.slf4j.LoggerFactory
|
import org.slf4j.LoggerFactory
|
||||||
import java.io.ByteArrayOutputStream
|
import java.io.ByteArrayOutputStream
|
||||||
import java.sql.Blob
|
import java.sql.Blob
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import java.util.concurrent.Executors
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
import javax.sql.DataSource
|
import javax.sql.DataSource
|
||||||
|
import kotlin.coroutines.experimental.coroutineContext
|
||||||
|
|
||||||
class PlotmeMigration(val parcelProvider: ParcelProvider,
|
class PlotmeMigration(val options: PlotmeMigrationOptions) : Migration {
|
||||||
val worldMapper: Map<String, String>,
|
|
||||||
val dataSourceFactory: () -> DataSource) : Migration {
|
|
||||||
private var dataSource: DataSource? = null
|
private var dataSource: DataSource? = null
|
||||||
private var database: Database? = null
|
private var database: Database? = null
|
||||||
private var isShutdown: Boolean = false
|
private var isShutdown: Boolean = false
|
||||||
private val dispatcher = Executors.newSingleThreadExecutor { Thread(it, "PlotMe Migration Thread") }.asCoroutineDispatcher()
|
|
||||||
private val mlogger = LoggerFactory.getLogger("PlotMe Migrator")
|
private val mlogger = LoggerFactory.getLogger("PlotMe Migrator")
|
||||||
|
|
||||||
private fun <T> transaction(statement: Transaction.() -> T) = org.jetbrains.exposed.sql.transactions.transaction(database!!, statement)
|
private fun <T> transaction(statement: Transaction.() -> T) = org.jetbrains.exposed.sql.transactions.transaction(database!!, statement)
|
||||||
|
|
||||||
override fun migrateTo(storage: Storage) {
|
override fun migrateTo(storage: Storage): Job {
|
||||||
launch(context = dispatcher) {
|
return launch(context = storage.asyncDispatcher) {
|
||||||
init()
|
init()
|
||||||
doWork(storage)
|
transaction { launch(context = Unconfined, start = CoroutineStart.UNDISPATCHED) { doWork(storage) } }
|
||||||
shutdown()
|
shutdown()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun init() {
|
suspend fun init() {
|
||||||
if (isShutdown) throw IllegalStateException()
|
if (isShutdown) throw IllegalStateException()
|
||||||
dataSource = dataSourceFactory()
|
dataSource = options.storage.getDataSourceFactory()!!()
|
||||||
database = Database.connect(dataSource!!)
|
database = Database.connect(dataSource!!)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun shutdown() {
|
suspend fun shutdown() {
|
||||||
if (isShutdown) throw IllegalStateException()
|
if (isShutdown) throw IllegalStateException()
|
||||||
dataSource?.let {
|
dataSource?.let {
|
||||||
(it as? HikariDataSource)?.close()
|
(it as? HikariDataSource)?.close()
|
||||||
@@ -53,22 +53,23 @@ class PlotmeMigration(val parcelProvider: ParcelProvider,
|
|||||||
isShutdown = true
|
isShutdown = true
|
||||||
}
|
}
|
||||||
|
|
||||||
val parcelsCache = hashMapOf<String, MutableMap<Vec2i, ParcelData>>()
|
private val parcelsCache = hashMapOf<String, MutableMap<Vec2i, ParcelData>>()
|
||||||
|
|
||||||
private fun getMap(worldName: String): MutableMap<Vec2i, ParcelData>? {
|
private fun getMap(worldName: String): MutableMap<Vec2i, ParcelData>? {
|
||||||
val mapped = worldMapper[worldName] ?: return null
|
val mapped = options.worldsFromTo[worldName] ?: return null
|
||||||
return parcelsCache.computeIfAbsent(mapped) { mutableMapOf() }
|
return parcelsCache.computeIfAbsent(mapped) { mutableMapOf() }
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun getData(worldName: String, position: Vec2i): ParcelData? {
|
private fun getData(worldName: String, position: Vec2i): ParcelData? {
|
||||||
return getMap(worldName)?.computeIfAbsent(position) { ParcelDataHolder() }
|
return getMap(worldName)?.computeIfAbsent(position) { ParcelDataHolder(addedMap = ConcurrentHashMap()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fun doWork(target: Storage): Unit = transaction {
|
suspend fun doWork(target: Storage): Unit {
|
||||||
if (!PlotmePlotsT.exists()) {
|
if (!PlotmePlotsT.exists()) {
|
||||||
mlogger.warn("Plotme tables don't appear to exist. Exiting.")
|
mlogger.warn("Plotme tables don't appear to exist. Exiting.")
|
||||||
return@transaction
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
parcelsCache.clear()
|
parcelsCache.clear()
|
||||||
|
|
||||||
iterPlotmeTable(PlotmePlotsT) { data, row ->
|
iterPlotmeTable(PlotmePlotsT) { data, row ->
|
||||||
@@ -76,22 +77,29 @@ class PlotmeMigration(val parcelProvider: ParcelProvider,
|
|||||||
data.owner = ParcelOwner(row[owner_uuid]?.toUUID(), row[owner_name])
|
data.owner = ParcelOwner(row[owner_uuid]?.toUUID(), row[owner_name])
|
||||||
}
|
}
|
||||||
|
|
||||||
iterPlotmeTable(PlotmeAllowedT) { data, row ->
|
launch(context = target.asyncDispatcher) {
|
||||||
val uuid = row[player_uuid]?.toUUID()
|
iterPlotmeTable(PlotmeAllowedT) { data, row ->
|
||||||
?: Bukkit.getOfflinePlayer(row[player_name]).takeIf { it.isValid }?.uuid
|
val uuid = row[player_uuid]?.toUUID()
|
||||||
?: return@iterPlotmeTable
|
?: Bukkit.getOfflinePlayer(row[player_name]).takeIf { it.isValid }?.uuid
|
||||||
|
?: return@iterPlotmeTable
|
||||||
|
|
||||||
data.setAddedStatus(uuid, AddedStatus.ALLOWED)
|
data.setAddedStatus(uuid, AddedStatus.ALLOWED)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
iterPlotmeTable(PlotmeDeniedT) { data, row ->
|
launch(context = target.asyncDispatcher) {
|
||||||
val uuid = row[PlotmeAllowedT.player_uuid]?.toUUID()
|
iterPlotmeTable(PlotmeDeniedT) { data, row ->
|
||||||
?: Bukkit.getOfflinePlayer(row[PlotmeAllowedT.player_name]).takeIf { it.isValid }?.uuid
|
val uuid = row[player_uuid]?.toUUID()
|
||||||
?: return@iterPlotmeTable
|
?: Bukkit.getOfflinePlayer(row[player_name]).takeIf { it.isValid }?.uuid
|
||||||
|
?: return@iterPlotmeTable
|
||||||
|
|
||||||
data.setAddedStatus(uuid, AddedStatus.BANNED)
|
data.setAddedStatus(uuid, AddedStatus.BANNED)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
println(coroutineContext[Job]!!.children)
|
||||||
|
coroutineContext[Job]!!.joinChildren()
|
||||||
|
|
||||||
for ((worldName, map) in parcelsCache) {
|
for ((worldName, map) in parcelsCache) {
|
||||||
val world = ParcelWorldId(worldName)
|
val world = ParcelWorldId(worldName)
|
||||||
for ((pos, data) in map) {
|
for ((pos, data) in map) {
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ const val uppercase: Boolean = false
|
|||||||
fun String.toCorrectCase() = if (uppercase) this else toLowerCase()
|
fun String.toCorrectCase() = if (uppercase) this else toLowerCase()
|
||||||
|
|
||||||
sealed class PlotmeTable(name: String) : Table(name) {
|
sealed class PlotmeTable(name: String) : Table(name) {
|
||||||
val px = PlotmePlotsT.integer("idX").primaryKey()
|
val px = integer("idX").primaryKey()
|
||||||
val pz = PlotmePlotsT.integer("idZ").primaryKey()
|
val pz = integer("idZ").primaryKey()
|
||||||
val world_name = PlotmePlotsT.varchar("world", 32).primaryKey()
|
val world_name = varchar("world", 32).primaryKey()
|
||||||
}
|
}
|
||||||
|
|
||||||
object PlotmePlotsT : PlotmeTable("plotmePlots".toCorrectCase()) {
|
object PlotmePlotsT : PlotmeTable("plotmePlots".toCorrectCase()) {
|
||||||
@@ -18,8 +18,8 @@ object PlotmePlotsT : PlotmeTable("plotmePlots".toCorrectCase()) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sealed class PlotmePlotPlayerMap(name: String) : PlotmeTable(name) {
|
sealed class PlotmePlotPlayerMap(name: String) : PlotmeTable(name) {
|
||||||
val player_name = PlotmePlotsT.varchar("player", 32)
|
val player_name = varchar("player", 32)
|
||||||
val player_uuid = PlotmePlotsT.blob("playerid").nullable()
|
val player_uuid = blob("playerid").nullable()
|
||||||
}
|
}
|
||||||
|
|
||||||
object PlotmeAllowedT : PlotmePlotPlayerMap("plotmeAllowed".toCorrectCase())
|
object PlotmeAllowedT : PlotmePlotPlayerMap("plotmeAllowed".toCorrectCase())
|
||||||
|
|||||||
@@ -4,9 +4,12 @@ import io.dico.parcels2.logger
|
|||||||
import java.io.File
|
import java.io.File
|
||||||
|
|
||||||
fun File.tryCreate(): Boolean {
|
fun File.tryCreate(): Boolean {
|
||||||
|
if (exists()) {
|
||||||
|
return !isDirectory
|
||||||
|
}
|
||||||
val parent = parentFile
|
val parent = parentFile
|
||||||
if (parent == null || !(parent.exists() || parent.mkdirs()) || !createNewFile()) {
|
if (parent == null || !(parent.exists() || parent.mkdirs()) || !createNewFile()) {
|
||||||
logger.warn("Failed to create file ${canonicalPath}")
|
logger.warn("Failed to create file $canonicalPath")
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
|
|||||||
@@ -6,7 +6,36 @@
|
|||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<root level="debug">
|
<root level="info">
|
||||||
<appender-ref ref="STDOUT" />
|
<appender-ref ref="STDOUT" />
|
||||||
</root>
|
</root>
|
||||||
|
|
||||||
|
<appender name="SQLLOG" class="ch.qos.logback.core.RollingFileAppender">
|
||||||
|
<append>true</append>
|
||||||
|
<file>C:/Parcels/sql.log</file>
|
||||||
|
|
||||||
|
<encoder>
|
||||||
|
<!-- old pattern <pattern>%d{HH:mm:ss.SSS} %magenta(%-8.-8(%thread)) %highlight(%-5level) %boldCyan(%8.-32logger{32}) - %msg</pattern>-->
|
||||||
|
<pattern>%magenta(%-8.-8(%thread)) %highlight(%-5level) %boldCyan(%6.-32logger{32}) - %msg</pattern>
|
||||||
|
</encoder>
|
||||||
|
|
||||||
|
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
|
||||||
|
<fileNamePattern>C:/Parcels/sql%i.log</fileNamePattern>
|
||||||
|
<minIndex>1</minIndex>
|
||||||
|
<maxIndex>3</maxIndex>
|
||||||
|
</rollingPolicy>
|
||||||
|
|
||||||
|
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
|
||||||
|
<maxFileSize>1MB</maxFileSize>
|
||||||
|
</triggeringPolicy>
|
||||||
|
</appender>
|
||||||
|
|
||||||
|
<appender name="ASYNC_SQLLOG" class="ch.qos.logback.classic.AsyncAppender">
|
||||||
|
<appender-ref ref="SQLLOG"/>
|
||||||
|
<neverBlock>true</neverBlock>
|
||||||
|
</appender>
|
||||||
|
|
||||||
|
<logger name="Exposed" level="DEBUG">
|
||||||
|
<appender-ref ref="ASYNC_SQLLOG"/>
|
||||||
|
</logger>
|
||||||
</configuration>
|
</configuration>
|
||||||
Reference in New Issue
Block a user