Archived
0

work on plotme migration

This commit is contained in:
Dico
2018-08-04 00:13:09 +01:00
parent 703e02d6b2
commit 3573f9ade6
21 changed files with 431 additions and 201 deletions

View File

@@ -46,7 +46,7 @@ interface ParcelData : AddedData {
}
}
class ParcelDataHolder : AddedDataHolder(), ParcelData {
class ParcelDataHolder(addedMap: MutableAddedDataMap = mutableMapOf()) : AddedDataHolder(addedMap), ParcelData {
override var owner: ParcelOwner? = null
override var since: DateTime? = null

View File

@@ -85,25 +85,32 @@ class ParcelsPlugin : JavaPlugin() {
fun loadOptions(): Boolean {
when {
optionsFile.exists() -> optionsMapper.readerForUpdating(options).readValue<Options>(optionsFile)
optionsFile.tryCreate() -> {
else -> run {
options.addWorld("parcels")
try {
optionsMapper.writeValue(optionsFile, options)
} catch (ex: Throwable) {
optionsFile.delete()
throw ex
if (saveOptions()) {
plogger.warn("Created options file with a world template. Please review it before next start.")
} else {
plogger.error("Failed to save options file ${optionsFile.canonicalPath}")
}
plogger.warn("Created options file with a world template. Please review it before next start.")
return false
}
else -> {
plogger.error("Failed to save options file ${optionsFile.canonicalPath}")
return false
}
}
return true
}
fun saveOptions(): Boolean {
if (optionsFile.tryCreate()) {
try {
optionsMapper.writeValue(optionsFile, options)
} catch (ex: Throwable) {
optionsFile.delete()
throw ex
}
return true
}
return false
}
override fun getDefaultWorldGenerator(worldName: String, generatorId: String?): ChunkGenerator? {
return parcelProvider.getWorldGenerator(worldName)
}
@@ -119,6 +126,8 @@ class ParcelsPlugin : JavaPlugin() {
listeners = ParcelListeners(parcelProvider, entityTracker)
registrator.registerListeners(listeners!!)
}
functionHelper.scheduleRepeating(100, 5, entityTracker::tick)
}
}

View File

@@ -33,15 +33,18 @@ class CommandsDebug(plugin: ParcelsPlugin) : AbstractParcelCommands(plugin) {
fun ParcelScope.cmdMakeMess(context: ExecutionContext) {
val server = plugin.server
val blockDatas = arrayOf(
server.createBlockData(Material.STICKY_PISTON),
server.createBlockData(Material.BLUE_WOOL),
server.createBlockData(Material.LIME_WOOL),
server.createBlockData(Material.GLASS),
server.createBlockData(Material.STONE_SLAB),
server.createBlockData(Material.QUARTZ_BLOCK)
server.createBlockData(Material.STONE),
server.createBlockData(Material.QUARTZ_BLOCK),
server.createBlockData(Material.BROWN_CONCRETE)
)
val random = Random()
world.doBlockOperation(parcel.id, direction = RegionTraversal.UPWARD) { block ->
block.blockData = blockDatas[random.nextInt(4)]
block.blockData = blockDatas[random.nextInt(7)]
}.onProgressUpdate(1000, 1000) { progress, elapsedTime ->
context.sendMessage(EMessageType.INFORMATIVE, "Mess progress: %.02f%%, %.2fs elapsed"
.format(progress * 100, elapsedTime / 1000.0))

View File

@@ -146,7 +146,7 @@ class DefaultParcelGenerator(val name: String, private val o: DefaultGeneratorOp
override fun getHomeLocation(parcel: ParcelId): Location {
val bottom = getBottomBlock(parcel)
return Location(world, bottom.x.toDouble(), o.floorHeight + 1.0, bottom.z + (o.parcelSize - 1) / 2.0, -90F, 0F)
return Location(world, bottom.x.toDouble() + 0.5, o.floorHeight + 1.0, bottom.z + 0.5 + (o.parcelSize - 1) / 2.0, -90F, 0F)
}
override fun setOwnerBlock(parcel: ParcelId, owner: ParcelOwner?) {

View File

@@ -60,6 +60,21 @@ class ParcelProviderImpl(val plugin: ParcelsPlugin) : ParcelProvider {
private fun loadStoredData() {
plugin.functionHelper.launchLazilyOnMainThread {
val migration = plugin.options.migration
if (migration.enabled) {
migration.instance?.newInstance()?.apply {
logger.warn("Migrating database now...")
migrateTo(plugin.storage).join()
logger.warn("Migration completed")
if (migration.disableWhenComplete) {
migration.enabled = false
plugin.saveOptions()
}
}
}
logger.info("Loading all parcel data...")
val channel = plugin.storage.readAllParcelData()
do {
val pair = channel.receiveOrNull() ?: break
@@ -67,6 +82,7 @@ class ParcelProviderImpl(val plugin: ParcelsPlugin) : ParcelProvider {
pair.second?.let { parcel.copyDataIgnoringDatabase(it) }
} while (true)
logger.info("Loading data completed")
_dataIsLoaded = true
}.start()
}

View File

@@ -26,7 +26,7 @@ class ParcelEntityTracker(val parcelProvider: ParcelProvider) {
*/
fun tick() {
map.editLoop { entity, parcel ->
if (entity.isDead || entity.isOnGround) {
if (entity.isDead) {
remove(); return@editLoop
}
if (parcel.isPresentAnd { hasBlockVisitors }) {

View File

@@ -83,9 +83,9 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
* Prevents players from placing blocks outside of their parcels
*/
@field:ListenerMarker(priority = NORMAL)
val onBlockPlaceEvent = RegistratorListener<BlockBreakEvent> l@{ event ->
val onBlockPlaceEvent = RegistratorListener<BlockPlaceEvent> l@{ event ->
val (wo, ppa) = getWoAndPPa(event.block) ?: return@l
if (!event.player.hasBuildAnywhere && !ppa.isNullOr { !canBuild(event.player) }) {
if (!event.player.hasBuildAnywhere && ppa.isNullOr { !canBuild(event.player) }) {
event.isCancelled = true
}
}
@@ -184,66 +184,73 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
}
when (event.action) {
Action.RIGHT_CLICK_BLOCK -> when (clickedBlock.type) {
REPEATER,
COMPARATOR -> run {
if (!parcel.canBuildN(user)) {
event.isCancelled = true; return@l
Action.RIGHT_CLICK_BLOCK -> run {
when (clickedBlock.type) {
REPEATER,
COMPARATOR -> run {
if (!parcel.canBuildN(user)) {
event.isCancelled = true; return@l
}
}
}
LEVER,
STONE_BUTTON,
ANVIL,
TRAPPED_CHEST,
OAK_BUTTON, BIRCH_BUTTON, SPRUCE_BUTTON, JUNGLE_BUTTON, ACACIA_BUTTON, DARK_OAK_BUTTON,
OAK_FENCE_GATE, BIRCH_FENCE_GATE, SPRUCE_FENCE_GATE, JUNGLE_FENCE_GATE, ACACIA_FENCE_GATE, DARK_OAK_FENCE_GATE,
OAK_DOOR, BIRCH_DOOR, SPRUCE_DOOR, JUNGLE_DOOR, ACACIA_DOOR, DARK_OAK_DOOR,
OAK_TRAPDOOR, BIRCH_TRAPDOOR, SPRUCE_TRAPDOOR, JUNGLE_TRAPDOOR, ACACIA_TRAPDOOR, DARK_OAK_TRAPDOOR
-> run {
if (!user.hasBuildAnywhere && !parcel.isNullOr { canBuild(user) || allowInteractInputs }) {
user.sendParcelMessage(nopermit = true, message = "You cannot use inputs in this parcel")
event.isCancelled = true; return@l
LEVER,
STONE_BUTTON,
ANVIL,
TRAPPED_CHEST,
OAK_BUTTON, BIRCH_BUTTON, SPRUCE_BUTTON, JUNGLE_BUTTON, ACACIA_BUTTON, DARK_OAK_BUTTON,
OAK_FENCE_GATE, BIRCH_FENCE_GATE, SPRUCE_FENCE_GATE, JUNGLE_FENCE_GATE, ACACIA_FENCE_GATE, DARK_OAK_FENCE_GATE,
OAK_DOOR, BIRCH_DOOR, SPRUCE_DOOR, JUNGLE_DOOR, ACACIA_DOOR, DARK_OAK_DOOR,
OAK_TRAPDOOR, BIRCH_TRAPDOOR, SPRUCE_TRAPDOOR, JUNGLE_TRAPDOOR, ACACIA_TRAPDOOR, DARK_OAK_TRAPDOOR
-> run {
if (!user.hasBuildAnywhere && !parcel.isNullOr { canBuild(user) || allowInteractInputs }) {
user.sendParcelMessage(nopermit = true, message = "You cannot use inputs in this parcel")
event.isCancelled = true; return@l
}
}
}
WHITE_BED, ORANGE_BED, MAGENTA_BED, LIGHT_BLUE_BED, YELLOW_BED, LIME_BED, PINK_BED, GRAY_BED, LIGHT_GRAY_BED, CYAN_BED, PURPLE_BED, BLUE_BED, BROWN_BED, GREEN_BED, RED_BED, BLACK_BED
-> run {
if (world.options.disableExplosions) {
val bed = clickedBlock.blockData as Bed
val head = if (bed == Bed.Part.FOOT) clickedBlock.getRelative(bed.facing) else clickedBlock
when (head.biome) {
Biome.NETHER, Biome.THE_END -> run {
user.sendParcelMessage(nopermit = true, message = "You cannot use this bed because it would explode")
event.isCancelled = true; return@l
WHITE_BED, ORANGE_BED, MAGENTA_BED, LIGHT_BLUE_BED, YELLOW_BED, LIME_BED, PINK_BED, GRAY_BED, LIGHT_GRAY_BED, CYAN_BED, PURPLE_BED, BLUE_BED, BROWN_BED, GREEN_BED, RED_BED, BLACK_BED
-> run {
if (world.options.disableExplosions) {
val bed = clickedBlock.blockData as Bed
val head = if (bed == Bed.Part.FOOT) clickedBlock.getRelative(bed.facing) else clickedBlock
when (head.biome) {
Biome.NETHER, Biome.THE_END -> run {
user.sendParcelMessage(nopermit = true, message = "You cannot use this bed because it would explode")
event.isCancelled = true; return@l
}
}
}
}
}
onPlayerInteractEvent_RightClick(event, world, parcel)
}
Action.RIGHT_CLICK_AIR -> if (event.hasItem()) {
val item = event.item.type
if (world.options.blockedItems.contains(item)) {
user.sendParcelMessage(nopermit = true, message = "You cannot use this bed because it would explode")
event.isCancelled = true; return@l
}
if (!parcel.canBuildN(user)) {
when (item) {
LAVA_BUCKET, WATER_BUCKET, BUCKET, FLINT_AND_STEEL -> event.isCancelled = true
}
}
}
Action.RIGHT_CLICK_AIR -> onPlayerInteractEvent_RightClick(event, world, parcel)
Action.PHYSICAL -> if (!user.hasBuildAnywhere && !parcel.isPresentAnd { canBuild(user) || allowInteractInputs }) {
user.sendParcelMessage(nopermit = true, message = "You cannot use inputs in this parcel")
event.isCancelled = true; return@l
}
}
}
@Suppress("NON_EXHAUSTIVE_WHEN")
private fun onPlayerInteractEvent_RightClick(event: PlayerInteractEvent, world: ParcelWorld, parcel: Parcel?) {
if (event.hasItem()) {
val item = event.item.type
if (world.options.blockedItems.contains(item)) {
event.player.sendParcelMessage(nopermit = true, message = "You cannot use this item because it is disabled in this world")
event.isCancelled = true; return
}
if (!parcel.canBuildN(event.player)) {
when (item) {
LAVA_BUCKET, WATER_BUCKET, BUCKET, FLINT_AND_STEEL -> event.isCancelled = true
}
}
}
}
/*
* Prevents players from breeding mobs, entering or opening boats/minecarts,
* rotating item frames, doing stuff with leashes, and putting stuff on armor stands.
@@ -352,7 +359,7 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
world.weatherDuration = Int.MAX_VALUE
}
// TODO: BlockFormEvent, BlockSpreadEvent, BlockFadeEvent
// TODO: BlockFormEvent, BlockSpreadEvent, BlockFadeEvent, Fireworks
/*
* Prevents natural blocks forming
@@ -370,10 +377,10 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
val hasEntity = event is EntityBlockFormEvent
val player = (event as? EntityBlockFormEvent)?.entity as? Player
val cancel: Boolean = when (block.type) {
val cancel: Boolean = when (event.newState.type) {
// prevent ice generation from Frost Walkers enchantment
ICE -> player != null && !ppa.canBuild(player)
FROSTED_ICE -> player != null && !ppa.canBuild(player)
// prevent snow generation from weather
SNOW -> !hasEntity && wo.options.preventWeatherBlockChanges
@@ -406,12 +413,13 @@ class ParcelListeners(val parcelProvider: ParcelProvider, val entityTracker: Par
val onVehicleMoveEvent = RegistratorListener<VehicleMoveEvent> l@{ event ->
val (wo, ppa) = getWoAndPPa(event.to.block) ?: return@l
if (ppa == null) {
event.vehicle.eject()
event.vehicle.passengers.forEach {
if (it.type == EntityType.PLAYER) {
(it as Player).sendParcelMessage(except = true, message = "Your ride ends here")
} else it.remove()
}
event.vehicle.eject()
event.vehicle.remove()
} else if (ppa.hasBlockVisitors) {
event.to.subtract(event.to).add(event.from)
}

View File

@@ -10,7 +10,7 @@ import kotlin.reflect.KClass
object GeneratorOptionsFactories : PolymorphicOptionsFactories<ParcelGenerator>("name", GeneratorOptions::class, DefaultGeneratorOptionsFactory())
class GeneratorOptions(name: String, options: Any) : PolymorphicOptions<ParcelGenerator>(name, options, GeneratorOptionsFactories) {
class GeneratorOptions (name: String = "default", options: Any = DefaultGeneratorOptions()) : PolymorphicOptions<ParcelGenerator>(name, options, GeneratorOptionsFactories) {
fun newInstance(worldName: String) = factory.newInstance(key, options, worldName)
}

View File

@@ -1,17 +1,21 @@
package io.dico.parcels2.options
import io.dico.parcels2.storage.migration.Migration
import io.dico.parcels2.storage.migration.plotme.PlotmeMigration
import kotlin.reflect.KClass
object MigrationOptionsFactories : PolymorphicOptionsFactories<Migration>("kind", MigrationOptions::class, PlotmeMigrationFactory())
class MigrationOptions(kind: String, options: Any) : SimplePolymorphicOptions<Migration>(kind, options, MigrationOptionsFactories)
class MigrationOptions(kind: String = "plotme-0.17", options: Any = PlotmeMigrationOptions()) : SimplePolymorphicOptions<Migration>(kind, options, MigrationOptionsFactories)
private class PlotmeMigrationFactory : PolymorphicOptionsFactory<Migration> {
override val supportedKeys = listOf("plotme-0.17")
override val optionsClass: KClass<out Any> get() = TODO()
override val optionsClass: KClass<out Any> get() = PlotmeMigrationOptions::class
override fun newInstance(key: String, options: Any, vararg extra: Any?): Migration {
TODO()
return PlotmeMigration(options as PlotmeMigrationOptions)
}
}
class PlotmeMigrationOptions(val worldsFromTo: Map<String, String> = mapOf("plotworld" to "parcels"),
val storage: StorageOptions = StorageOptions(options = DataConnectionOptions(database = "plotme")))

View File

@@ -10,14 +10,15 @@ import java.util.EnumSet
class Options {
var worlds: Map<String, WorldOptions> = hashMapOf()
private set
var storage: StorageOptions = StorageOptions("postgresql", DataConnectionOptions())
var storage: StorageOptions = StorageOptions()
var tickWorktime: TickWorktimeOptions = TickWorktimeOptions(20, 1)
var migration = MigrationOptionsHolder()
fun addWorld(name: String,
generatorOptions: GeneratorOptions? = null,
worldOptions: RuntimeWorldOptions? = null) {
val optionsHolder = WorldOptions(
generatorOptions ?: GeneratorOptions("default", DefaultGeneratorOptions()),
generatorOptions ?: GeneratorOptions(),
worldOptions ?: RuntimeWorldOptions()
)
@@ -49,3 +50,9 @@ class RuntimeWorldOptions(var gameMode: GameMode? = GameMode.CREATIVE,
var axisLimit: Int = 10)
class DataFileOptions(val location: String = "/flatfile-storage/")
class MigrationOptionsHolder {
var enabled = false
var disableWhenComplete = true
var instance: MigrationOptions? = MigrationOptions()
}

View File

@@ -3,22 +3,36 @@ package io.dico.parcels2.options
import com.zaxxer.hikari.HikariDataSource
import io.dico.parcels2.logger
import io.dico.parcels2.storage.Storage
import io.dico.parcels2.storage.StorageWithCoroutineBacking
import io.dico.parcels2.storage.BackedStorage
import io.dico.parcels2.storage.exposed.ExposedBacking
import io.dico.parcels2.storage.getHikariConfig
import javax.sql.DataSource
object StorageOptionsFactories : PolymorphicOptionsFactories<Storage>("dialect", StorageOptions::class, ConnectionStorageFactory())
class StorageOptions(dialect: String, options: Any) : SimplePolymorphicOptions<Storage>(dialect, options, StorageOptionsFactories)
class StorageOptions(dialect: String = "mariadb", options: Any = DataConnectionOptions()) : SimplePolymorphicOptions<Storage>(dialect, options, StorageOptionsFactories) {
fun getDataSourceFactory(): DataSourceFactory? {
return when (factory) {
is ConnectionStorageFactory -> factory.getDataSourceFactory(key, options)
else -> return null
}
}
}
typealias DataSourceFactory = () -> DataSource
private class ConnectionStorageFactory : PolymorphicOptionsFactory<Storage> {
override val optionsClass = DataConnectionOptions::class
override val supportedKeys: List<String> = listOf("postgresql", "mariadb")
override fun newInstance(key: String, options: Any, vararg extra: Any?): Storage {
fun getDataSourceFactory(key: String, options: Any): DataSourceFactory {
val hikariConfig = getHikariConfig(key, options as DataConnectionOptions)
val dataSourceFactory = suspend { HikariDataSource(hikariConfig) }
return StorageWithCoroutineBacking(ExposedBacking(dataSourceFactory))
return { HikariDataSource(hikariConfig) }
}
override fun newInstance(key: String, options: Any, vararg extra: Any?): Storage {
return BackedStorage(ExposedBacking(getDataSourceFactory(key, options), (options as DataConnectionOptions).poolSize))
}
}

View File

@@ -1,6 +1,12 @@
package io.dico.parcels2.storage
import io.dico.parcels2.*
import kotlinx.coroutines.experimental.CoroutineDispatcher
import kotlinx.coroutines.experimental.CoroutineScope
import kotlinx.coroutines.experimental.Deferred
import kotlinx.coroutines.experimental.Job
import kotlinx.coroutines.experimental.channels.ProducerScope
import kotlinx.coroutines.experimental.channels.ReceiveChannel
import kotlinx.coroutines.experimental.channels.SendChannel
import java.util.UUID
@@ -10,41 +16,58 @@ interface Backing {
val isConnected: Boolean
suspend fun init()
fun launchJob(job: Backing.() -> Unit): Job
suspend fun shutdown()
fun <T> launchFuture(future: Backing.() -> T): Deferred<T>
fun <T> openChannel(future: Backing.(SendChannel<T>) -> Unit): ReceiveChannel<T>
fun init()
fun shutdown()
/**
* This producer function is capable of constantly reading parcels from a potentially infinite sequence,
* and provide parcel data for it as read from the database.
*/
suspend fun produceParcelData(channel: SendChannel<DataPair>, parcels: Sequence<ParcelId>)
fun produceParcelData(channel: SendChannel<DataPair>, parcels: Sequence<ParcelId>)
suspend fun produceAllParcelData(channel: SendChannel<DataPair>)
fun produceAllParcelData(channel: SendChannel<DataPair>)
suspend fun readParcelData(parcel: ParcelId): ParcelData?
fun readParcelData(parcel: ParcelId): ParcelData?
suspend fun getOwnedParcels(user: ParcelOwner): List<ParcelId>
fun getOwnedParcels(user: ParcelOwner): List<ParcelId>
suspend fun getNumParcels(user: ParcelOwner): Int = getOwnedParcels(user).size
fun getNumParcels(user: ParcelOwner): Int = getOwnedParcels(user).size
suspend fun setParcelData(parcel: ParcelId, data: ParcelData?)
fun setParcelData(parcel: ParcelId, data: ParcelData?)
suspend fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?)
fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?)
suspend fun setLocalPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus)
fun setLocalPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus)
suspend fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean)
fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean)
suspend fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean)
fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean)
suspend fun produceAllGlobalAddedData(channel: SendChannel<AddedDataPair<ParcelOwner>>)
fun produceAllGlobalAddedData(channel: SendChannel<AddedDataPair<ParcelOwner>>)
suspend fun readGlobalAddedData(owner: ParcelOwner): MutableAddedDataMap
fun readGlobalAddedData(owner: ParcelOwner): MutableAddedDataMap
suspend fun setGlobalPlayerStatus(owner: ParcelOwner, player: UUID, status: AddedStatus)
fun setGlobalPlayerStatus(owner: ParcelOwner, player: UUID, status: AddedStatus)
}
}
abstract class AbstractBacking(val dispatcher: CoroutineDispatcher) {
fun launchJob(job: Backing.() -> Unit): Job
fun <T> launchFuture(future: Backing.() -> T): Deferred<T>
fun <T> openChannel(future: Backing.(SendChannel<T>) -> Unit): ReceiveChannel<T>
}

View File

@@ -3,21 +3,16 @@
package io.dico.parcels2.storage
import io.dico.parcels2.*
import kotlinx.coroutines.experimental.*
import kotlinx.coroutines.experimental.channels.ProducerScope
import kotlinx.coroutines.experimental.Deferred
import kotlinx.coroutines.experimental.Job
import kotlinx.coroutines.experimental.channels.ReceiveChannel
import kotlinx.coroutines.experimental.channels.produce
import java.util.UUID
import java.util.concurrent.Executor
import java.util.concurrent.Executors
typealias DataPair = Pair<ParcelId, ParcelData?>
typealias AddedDataPair<TAttach> = Pair<TAttach, MutableAddedDataMap>
interface Storage {
val name: String
val syncDispatcher: CoroutineDispatcher
val asyncDispatcher: CoroutineDispatcher
val isConnected: Boolean
fun init(): Job
@@ -54,55 +49,39 @@ interface Storage {
fun setGlobalAddedStatus(owner: ParcelOwner, player: UUID, status: AddedStatus): Job
}
class StorageWithCoroutineBacking internal constructor(val backing: Backing) : Storage {
override val name get() = backing.name
override val syncDispatcher = Executor { it.run() }.asCoroutineDispatcher()
val poolSize: Int get() = 4
override val asyncDispatcher = Executors.newFixedThreadPool(poolSize) { Thread(it, "Parcels2_StorageThread") }.asCoroutineDispatcher()
override val isConnected get() = backing.isConnected
val channelCapacity = 16
class BackedStorage internal constructor(val b: Backing) : Storage {
override val name get() = b.name
override val isConnected get() = b.isConnected
private inline fun <T> defer(noinline block: suspend CoroutineScope.() -> T): Deferred<T> {
return async(context = asyncDispatcher, start = CoroutineStart.ATOMIC, block = block)
}
override fun init() = b.launchJob { init() }
private inline fun job(noinline block: suspend CoroutineScope.() -> Unit): Job {
return launch(context = asyncDispatcher, start = CoroutineStart.ATOMIC, block = block)
}
private inline fun <T> openChannel(noinline block: suspend ProducerScope<T>.() -> Unit): ReceiveChannel<T> {
return produce(asyncDispatcher, capacity = channelCapacity, block = block)
}
override fun init() = job { backing.init() }
override fun shutdown() = job { backing.shutdown() }
override fun shutdown() = b.launchJob { shutdown() }
override fun readParcelData(parcel: ParcelId) = defer { backing.readParcelData(parcel) }
override fun readParcelData(parcel: ParcelId) = b.launchFuture { readParcelData(parcel) }
override fun readParcelData(parcels: Sequence<ParcelId>) = openChannel<DataPair> { backing.produceParcelData(channel, parcels) }
override fun readParcelData(parcels: Sequence<ParcelId>) = b.openChannel<DataPair> { produceParcelData(it, parcels) }
override fun readAllParcelData() = openChannel<DataPair> { backing.produceAllParcelData(channel) }
override fun readAllParcelData() = b.openChannel<DataPair> { produceAllParcelData(it) }
override fun getOwnedParcels(user: ParcelOwner) = defer { backing.getOwnedParcels(user) }
override fun getOwnedParcels(user: ParcelOwner) = b.launchFuture { getOwnedParcels(user) }
override fun getNumParcels(user: ParcelOwner) = defer { backing.getNumParcels(user) }
override fun getNumParcels(user: ParcelOwner) = b.launchFuture { getNumParcels(user) }
override fun setParcelData(parcel: ParcelId, data: ParcelData?) = job { backing.setParcelData(parcel, data) }
override fun setParcelData(parcel: ParcelId, data: ParcelData?) = b.launchJob { setParcelData(parcel, data) }
override fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?) = job { backing.setParcelOwner(parcel, owner) }
override fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?) = b.launchJob { setParcelOwner(parcel, owner) }
override fun setParcelPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus) = job { backing.setLocalPlayerStatus(parcel, player, status) }
override fun setParcelPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus) = b.launchJob { setLocalPlayerStatus(parcel, player, status) }
override fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean) = job { backing.setParcelAllowsInteractInventory(parcel, value) }
override fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean) = b.launchJob { setParcelAllowsInteractInventory(parcel, value) }
override fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean) = job { backing.setParcelAllowsInteractInputs(parcel, value) }
override fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean) = b.launchJob { setParcelAllowsInteractInputs(parcel, value) }
override fun readAllGlobalAddedData(): ReceiveChannel<AddedDataPair<ParcelOwner>> = openChannel { backing.produceAllGlobalAddedData(channel) }
override fun readAllGlobalAddedData(): ReceiveChannel<AddedDataPair<ParcelOwner>> = b.openChannel { produceAllGlobalAddedData(it) }
override fun readGlobalAddedData(owner: ParcelOwner): Deferred<MutableAddedDataMap?> = defer { backing.readGlobalAddedData(owner) }
override fun readGlobalAddedData(owner: ParcelOwner): Deferred<MutableAddedDataMap?> = b.launchFuture { readGlobalAddedData(owner) }
override fun setGlobalAddedStatus(owner: ParcelOwner, player: UUID, status: AddedStatus) = job { backing.setGlobalPlayerStatus(owner, player, status) }
override fun setGlobalAddedStatus(owner: ParcelOwner, player: UUID, status: AddedStatus) = b.launchJob { setGlobalPlayerStatus(owner, player, status) }
}

View File

@@ -0,0 +1,118 @@
package io.dico.parcels2.storage.exposed
import kotlinx.coroutines.experimental.*
import org.jetbrains.exposed.sql.*
import org.jetbrains.exposed.sql.statements.StatementContext
import org.jetbrains.exposed.sql.statements.StatementInterceptor
import org.jetbrains.exposed.sql.statements.expandArgs
import org.jetbrains.exposed.sql.transactions.*
import org.slf4j.LoggerFactory
import java.sql.Connection
import kotlin.coroutines.experimental.CoroutineContext
fun <T> ctransaction(db: Database? = null, statement: suspend Transaction.() -> T): T {
return ctransaction(TransactionManager.manager.defaultIsolationLevel, 3, db, statement)
}
fun <T> ctransaction(transactionIsolation: Int, repetitionAttempts: Int, db: Database? = null, statement: suspend Transaction.() -> T): T {
return transaction(transactionIsolation, repetitionAttempts, db) {
if (this !is CoroutineTransaction) throw IllegalStateException("ctransaction requires CoroutineTransactionManager.")
val job = async(context = manager.context, start = CoroutineStart.UNDISPATCHED) {
this@transaction.statement()
}
if (job.isActive) {
runBlocking(context = Unconfined) {
job.join()
}
}
job.getCompleted()
}
}
class CoroutineTransactionManager(private val db: Database,
dispatcher: CoroutineDispatcher,
override var defaultIsolationLevel: Int = DEFAULT_ISOLATION_LEVEL) : TransactionManager {
val context: CoroutineDispatcher = TransactionCoroutineDispatcher(dispatcher)
private val transaction = ThreadLocal<CoroutineTransaction?>()
override fun currentOrNull(): Transaction? {
return transaction.get()
?: null
}
override fun newTransaction(isolation: Int): Transaction {
return CoroutineTransaction(this, CoroutineTransactionInterface(db, isolation, transaction)).also { transaction.set(it) }
}
private inner class TransactionCoroutineDispatcher(val delegate: CoroutineDispatcher) : CoroutineDispatcher() {
// When the thread changes, move the transaction to the new thread
override fun dispatch(context: CoroutineContext, block: Runnable) {
val existing = transaction.get()
val newContext: CoroutineContext
if (existing != null) {
transaction.set(null)
newContext = context // + existing
} else {
newContext = context
}
delegate.dispatch(newContext, Runnable {
if (existing != null) {
transaction.set(existing)
}
block.run()
})
}
}
}
private class CoroutineTransaction(val manager: CoroutineTransactionManager,
itf: CoroutineTransactionInterface) : Transaction(itf), CoroutineContext.Element {
companion object Key : CoroutineContext.Key<CoroutineTransaction>
override val key: CoroutineContext.Key<CoroutineTransaction> = Key
}
private class CoroutineTransactionInterface(override val db: Database, isolation: Int, val threadLocal: ThreadLocal<CoroutineTransaction?>) : TransactionInterface {
private val connectionLazy = lazy(LazyThreadSafetyMode.NONE) {
db.connector().apply {
autoCommit = false
transactionIsolation = isolation
}
}
override val connection: Connection
get() = connectionLazy.value
override val outerTransaction: CoroutineTransaction? = threadLocal.get()
override fun commit() {
if (connectionLazy.isInitialized())
connection.commit()
}
override fun rollback() {
if (connectionLazy.isInitialized() && !connection.isClosed) {
connection.rollback()
}
}
override fun close() {
try {
if (connectionLazy.isInitialized()) connection.close()
} finally {
threadLocal.set(outerTransaction)
}
}
}

View File

@@ -1,4 +1,4 @@
@file:Suppress("NOTHING_TO_INLINE", "PARAMETER_NAME_CHANGED_ON_OVERRIDE", "LocalVariableName")
@file:Suppress("NOTHING_TO_INLINE", "PARAMETER_NAME_CHANGED_ON_OVERRIDE", "LocalVariableName", "UNUSED_EXPRESSION")
package io.dico.parcels2.storage.exposed
@@ -7,10 +7,10 @@ import io.dico.parcels2.*
import io.dico.parcels2.storage.Backing
import io.dico.parcels2.storage.DataPair
import io.dico.parcels2.util.toUUID
import kotlinx.coroutines.experimental.CoroutineStart
import kotlinx.coroutines.experimental.Unconfined
import kotlinx.coroutines.experimental.*
import kotlinx.coroutines.experimental.channels.LinkedListChannel
import kotlinx.coroutines.experimental.channels.ReceiveChannel
import kotlinx.coroutines.experimental.channels.SendChannel
import kotlinx.coroutines.experimental.launch
import org.jetbrains.exposed.sql.*
import org.jetbrains.exposed.sql.SchemaUtils.create
import org.jetbrains.exposed.sql.transactions.transaction
@@ -21,14 +21,27 @@ import javax.sql.DataSource
class ExposedDatabaseException(message: String? = null) : Exception(message)
class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) : Backing {
class ExposedBacking(private val dataSourceFactory: () -> DataSource,
private val poolSize: Int) : Backing {
override val name get() = "Exposed"
val dispatcher: CoroutineDispatcher = newFixedThreadPoolContext(4, "Parcels StorageThread")
private var dataSource: DataSource? = null
private var database: Database? = null
private var isShutdown: Boolean = false
override val isConnected get() = database != null
override fun launchJob(job: Backing.() -> Unit): Job = launch(dispatcher) { transaction { job() } }
override fun <T> launchFuture(future: Backing.() -> T): Deferred<T> = async(dispatcher) { transaction { future() } }
override fun <T> openChannel(future: Backing.(SendChannel<T>) -> Unit): ReceiveChannel<T> {
val channel = LinkedListChannel<T>()
launchJob { future(channel) }
return channel
}
private fun <T> transaction(statement: Transaction.() -> T) = transaction(database!!, statement)
companion object {
init {
Database.registerDialect("mariadb") {
@@ -37,24 +50,17 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
}
}
private fun <T> transaction(statement: Transaction.() -> T) = transaction(database!!, statement)
override fun init() {
if (isShutdown || isConnected) throw IllegalStateException()
private suspend fun transactionLaunch(statement: suspend Transaction.() -> Unit): Unit = transaction(database!!) {
launch(context = Unconfined, start = CoroutineStart.UNDISPATCHED) {
statement(this@transaction)
}
}
override suspend fun init() {
if (isShutdown) throw IllegalStateException()
dataSource = dataSourceFactory()
database = Database.connect(dataSource!!)
transaction(database) {
transaction(database!!) {
create(WorldsT, OwnersT, ParcelsT, ParcelOptionsT, AddedLocalT, AddedGlobalT)
}
}
override suspend fun shutdown() {
override fun shutdown() {
if (isShutdown) throw IllegalStateException()
dataSource?.let {
(it as? HikariDataSource)?.close()
@@ -63,15 +69,15 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
isShutdown = true
}
override suspend fun produceParcelData(channel: SendChannel<DataPair>, parcels: Sequence<ParcelId>) {
override fun produceParcelData(channel: SendChannel<DataPair>, parcels: Sequence<ParcelId>) {
for (parcel in parcels) {
val data = readParcelData(parcel)
channel.send(parcel to data)
channel.offer(parcel to data)
}
channel.close()
}
override suspend fun produceAllParcelData(channel: SendChannel<Pair<ParcelId, ParcelData?>>) = transactionLaunch {
override fun produceAllParcelData(channel: SendChannel<Pair<ParcelId, ParcelData?>>) = ctransaction<Unit> {
ParcelsT.selectAll().forEach { row ->
val parcel = ParcelsT.getId(row) ?: return@forEach
val data = rowToParcelData(row)
@@ -80,12 +86,12 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
channel.close()
}
override suspend fun readParcelData(parcel: ParcelId): ParcelData? = transaction {
override fun readParcelData(parcel: ParcelId): ParcelData? = transaction {
val row = ParcelsT.getRow(parcel) ?: return@transaction null
rowToParcelData(row)
}
override suspend fun getOwnedParcels(user: ParcelOwner): List<ParcelId> = transaction {
override fun getOwnedParcels(user: ParcelOwner): List<ParcelId> = transaction {
val user_id = OwnersT.getId(user) ?: return@transaction emptyList()
ParcelsT.select { ParcelsT.owner_id eq user_id }
.orderBy(ParcelsT.claim_time, isAsc = true)
@@ -93,7 +99,7 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
.toList()
}
override suspend fun setParcelData(parcel: ParcelId, data: ParcelData?) {
override fun setParcelData(parcel: ParcelId, data: ParcelData?) {
if (data == null) {
transaction {
ParcelsT.getId(parcel)?.let { id ->
@@ -125,7 +131,7 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
setParcelAllowsInteractInventory(parcel, data.allowInteractInventory)
}
override suspend fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?) = transaction {
override fun setParcelOwner(parcel: ParcelId, owner: ParcelOwner?) = transaction {
val id = if (owner == null)
ParcelsT.getId(parcel) ?: return@transaction
else
@@ -140,11 +146,11 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
}
}
override suspend fun setLocalPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus) = transaction {
override fun setLocalPlayerStatus(parcel: ParcelId, player: UUID, status: AddedStatus) = transaction {
AddedLocalT.setPlayerStatus(parcel, player, status)
}
override suspend fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean): Unit = transaction {
override fun setParcelAllowsInteractInventory(parcel: ParcelId, value: Boolean): Unit = transaction {
val id = ParcelsT.getOrInitId(parcel)
ParcelOptionsT.upsert(ParcelOptionsT.parcel_id) {
it[ParcelOptionsT.parcel_id] = id
@@ -152,7 +158,7 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
}
}
override suspend fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean): Unit = transaction {
override fun setParcelAllowsInteractInputs(parcel: ParcelId, value: Boolean): Unit = transaction {
val id = ParcelsT.getOrInitId(parcel)
ParcelOptionsT.upsert(ParcelOptionsT.parcel_id) {
it[ParcelOptionsT.parcel_id] = id
@@ -160,16 +166,16 @@ class ExposedBacking(private val dataSourceFactory: suspend () -> DataSource) :
}
}
override suspend fun produceAllGlobalAddedData(channel: SendChannel<Pair<ParcelOwner, MutableMap<UUID, AddedStatus>>>) = transactionLaunch {
override fun produceAllGlobalAddedData(channel: SendChannel<Pair<ParcelOwner, MutableMap<UUID, AddedStatus>>>) = ctransaction<Unit> {
AddedGlobalT.sendAllAddedData(channel)
channel.close()
}
override suspend fun readGlobalAddedData(owner: ParcelOwner): MutableMap<UUID, AddedStatus> = transaction {
override fun readGlobalAddedData(owner: ParcelOwner): MutableMap<UUID, AddedStatus> = transaction {
return@transaction AddedGlobalT.readAddedData(OwnersT.getId(owner) ?: return@transaction hashMapOf())
}
override suspend fun setGlobalPlayerStatus(owner: ParcelOwner, player: UUID, status: AddedStatus) = transaction {
override fun setGlobalPlayerStatus(owner: ParcelOwner, player: UUID, status: AddedStatus) = transaction {
AddedGlobalT.setPlayerStatus(owner, player, status)
}

View File

@@ -6,6 +6,7 @@ import org.jetbrains.exposed.sql.Table
import org.jetbrains.exposed.sql.Transaction
import org.jetbrains.exposed.sql.statements.InsertStatement
import org.jetbrains.exposed.sql.transactions.TransactionManager
import org.jetbrains.exposed.sql.transactions.transaction
class UpsertStatement<Key : Any>(table: Table, conflictColumn: Column<*>? = null, conflictIndex: Index? = null)
: InsertStatement<Key>(table, false) {
@@ -61,3 +62,4 @@ fun Table.indexR(customIndexName: String? = null, isUnique: Boolean = false, var
}
fun Table.uniqueIndexR(customIndexName: String? = null, vararg columns: Column<*>): Index = indexR(customIndexName, true, *columns)

View File

@@ -1,8 +1,9 @@
package io.dico.parcels2.storage.migration
import io.dico.parcels2.storage.Storage
import kotlinx.coroutines.experimental.Job
interface Migration {
fun migrateTo(storage: Storage)
fun migrateTo(storage: Storage): Job
}

View File

@@ -1,50 +1,50 @@
@file:Suppress("RedundantSuspendModifier", "DEPRECATION")
package io.dico.parcels2.storage.migration.plotme
import com.zaxxer.hikari.HikariDataSource
import io.dico.parcels2.*
import io.dico.parcels2.options.PlotmeMigrationOptions
import io.dico.parcels2.storage.Storage
import io.dico.parcels2.storage.migration.Migration
import io.dico.parcels2.util.Vec2i
import io.dico.parcels2.util.isValid
import io.dico.parcels2.util.toUUID
import io.dico.parcels2.util.uuid
import kotlinx.coroutines.experimental.asCoroutineDispatcher
import kotlinx.coroutines.experimental.launch
import kotlinx.coroutines.experimental.*
import org.bukkit.Bukkit
import org.jetbrains.exposed.sql.*
import org.slf4j.LoggerFactory
import java.io.ByteArrayOutputStream
import java.sql.Blob
import java.util.UUID
import java.util.concurrent.Executors
import java.util.concurrent.ConcurrentHashMap
import javax.sql.DataSource
import kotlin.coroutines.experimental.coroutineContext
class PlotmeMigration(val parcelProvider: ParcelProvider,
val worldMapper: Map<String, String>,
val dataSourceFactory: () -> DataSource) : Migration {
class PlotmeMigration(val options: PlotmeMigrationOptions) : Migration {
private var dataSource: DataSource? = null
private var database: Database? = null
private var isShutdown: Boolean = false
private val dispatcher = Executors.newSingleThreadExecutor { Thread(it, "PlotMe Migration Thread") }.asCoroutineDispatcher()
private val mlogger = LoggerFactory.getLogger("PlotMe Migrator")
private fun <T> transaction(statement: Transaction.() -> T) = org.jetbrains.exposed.sql.transactions.transaction(database!!, statement)
override fun migrateTo(storage: Storage) {
launch(context = dispatcher) {
override fun migrateTo(storage: Storage): Job {
return launch(context = storage.asyncDispatcher) {
init()
doWork(storage)
transaction { launch(context = Unconfined, start = CoroutineStart.UNDISPATCHED) { doWork(storage) } }
shutdown()
}
}
fun init() {
suspend fun init() {
if (isShutdown) throw IllegalStateException()
dataSource = dataSourceFactory()
dataSource = options.storage.getDataSourceFactory()!!()
database = Database.connect(dataSource!!)
}
fun shutdown() {
suspend fun shutdown() {
if (isShutdown) throw IllegalStateException()
dataSource?.let {
(it as? HikariDataSource)?.close()
@@ -53,22 +53,23 @@ class PlotmeMigration(val parcelProvider: ParcelProvider,
isShutdown = true
}
val parcelsCache = hashMapOf<String, MutableMap<Vec2i, ParcelData>>()
private val parcelsCache = hashMapOf<String, MutableMap<Vec2i, ParcelData>>()
private fun getMap(worldName: String): MutableMap<Vec2i, ParcelData>? {
val mapped = worldMapper[worldName] ?: return null
val mapped = options.worldsFromTo[worldName] ?: return null
return parcelsCache.computeIfAbsent(mapped) { mutableMapOf() }
}
private fun getData(worldName: String, position: Vec2i): ParcelData? {
return getMap(worldName)?.computeIfAbsent(position) { ParcelDataHolder() }
return getMap(worldName)?.computeIfAbsent(position) { ParcelDataHolder(addedMap = ConcurrentHashMap()) }
}
fun doWork(target: Storage): Unit = transaction {
suspend fun doWork(target: Storage): Unit {
if (!PlotmePlotsT.exists()) {
mlogger.warn("Plotme tables don't appear to exist. Exiting.")
return@transaction
return
}
parcelsCache.clear()
iterPlotmeTable(PlotmePlotsT) { data, row ->
@@ -76,22 +77,29 @@ class PlotmeMigration(val parcelProvider: ParcelProvider,
data.owner = ParcelOwner(row[owner_uuid]?.toUUID(), row[owner_name])
}
iterPlotmeTable(PlotmeAllowedT) { data, row ->
val uuid = row[player_uuid]?.toUUID()
?: Bukkit.getOfflinePlayer(row[player_name]).takeIf { it.isValid }?.uuid
?: return@iterPlotmeTable
launch(context = target.asyncDispatcher) {
iterPlotmeTable(PlotmeAllowedT) { data, row ->
val uuid = row[player_uuid]?.toUUID()
?: Bukkit.getOfflinePlayer(row[player_name]).takeIf { it.isValid }?.uuid
?: return@iterPlotmeTable
data.setAddedStatus(uuid, AddedStatus.ALLOWED)
data.setAddedStatus(uuid, AddedStatus.ALLOWED)
}
}
iterPlotmeTable(PlotmeDeniedT) { data, row ->
val uuid = row[PlotmeAllowedT.player_uuid]?.toUUID()
?: Bukkit.getOfflinePlayer(row[PlotmeAllowedT.player_name]).takeIf { it.isValid }?.uuid
?: return@iterPlotmeTable
launch(context = target.asyncDispatcher) {
iterPlotmeTable(PlotmeDeniedT) { data, row ->
val uuid = row[player_uuid]?.toUUID()
?: Bukkit.getOfflinePlayer(row[player_name]).takeIf { it.isValid }?.uuid
?: return@iterPlotmeTable
data.setAddedStatus(uuid, AddedStatus.BANNED)
data.setAddedStatus(uuid, AddedStatus.BANNED)
}
}
println(coroutineContext[Job]!!.children)
coroutineContext[Job]!!.joinChildren()
for ((worldName, map) in parcelsCache) {
val world = ParcelWorldId(worldName)
for ((pos, data) in map) {

View File

@@ -7,9 +7,9 @@ const val uppercase: Boolean = false
fun String.toCorrectCase() = if (uppercase) this else toLowerCase()
sealed class PlotmeTable(name: String) : Table(name) {
val px = PlotmePlotsT.integer("idX").primaryKey()
val pz = PlotmePlotsT.integer("idZ").primaryKey()
val world_name = PlotmePlotsT.varchar("world", 32).primaryKey()
val px = integer("idX").primaryKey()
val pz = integer("idZ").primaryKey()
val world_name = varchar("world", 32).primaryKey()
}
object PlotmePlotsT : PlotmeTable("plotmePlots".toCorrectCase()) {
@@ -18,8 +18,8 @@ object PlotmePlotsT : PlotmeTable("plotmePlots".toCorrectCase()) {
}
sealed class PlotmePlotPlayerMap(name: String) : PlotmeTable(name) {
val player_name = PlotmePlotsT.varchar("player", 32)
val player_uuid = PlotmePlotsT.blob("playerid").nullable()
val player_name = varchar("player", 32)
val player_uuid = blob("playerid").nullable()
}
object PlotmeAllowedT : PlotmePlotPlayerMap("plotmeAllowed".toCorrectCase())

View File

@@ -4,9 +4,12 @@ import io.dico.parcels2.logger
import java.io.File
fun File.tryCreate(): Boolean {
if (exists()) {
return !isDirectory
}
val parent = parentFile
if (parent == null || !(parent.exists() || parent.mkdirs()) || !createNewFile()) {
logger.warn("Failed to create file ${canonicalPath}")
logger.warn("Failed to create file $canonicalPath")
return false
}
return true

View File

@@ -6,7 +6,36 @@
</encoder>
</appender>
<root level="debug">
<root level="info">
<appender-ref ref="STDOUT" />
</root>
<appender name="SQLLOG" class="ch.qos.logback.core.RollingFileAppender">
<append>true</append>
<file>C:/Parcels/sql.log</file>
<encoder>
<!-- old pattern <pattern>%d{HH:mm:ss.SSS} %magenta(%-8.-8(%thread)) %highlight(%-5level) %boldCyan(%8.-32logger{32}) - %msg</pattern>-->
<pattern>%magenta(%-8.-8(%thread)) %highlight(%-5level) %boldCyan(%6.-32logger{32}) - %msg</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
<fileNamePattern>C:/Parcels/sql%i.log</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>3</maxIndex>
</rollingPolicy>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<maxFileSize>1MB</maxFileSize>
</triggeringPolicy>
</appender>
<appender name="ASYNC_SQLLOG" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="SQLLOG"/>
<neverBlock>true</neverBlock>
</appender>
<logger name="Exposed" level="DEBUG">
<appender-ref ref="ASYNC_SQLLOG"/>
</logger>
</configuration>