Fixed several bugs with the new data system

This commit is contained in:
Auxilor
2024-08-24 19:39:54 +01:00
parent e87b7ceb77
commit 84d481d753
9 changed files with 97 additions and 81 deletions

View File

@@ -9,6 +9,8 @@ import java.util.UUID;
/**
* Handles data read/write for a {@link com.willfp.eco.core.data.keys.PersistentDataKeyType} for a specific
* data handler.
*
* @param <T> The type of data.
*/
public abstract class DataTypeSerializer<T> {
/**

View File

@@ -1,7 +1,9 @@
package com.willfp.eco.core.data.handlers;
import com.willfp.eco.core.Eco;
import com.willfp.eco.core.data.keys.PersistentDataKey;
import com.willfp.eco.core.registry.Registrable;
import org.bukkit.Bukkit;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@@ -125,7 +127,10 @@ public abstract class PersistentDataHandler implements Registrable {
for (PersistentDataKey<?> key : keys) {
Object value = read(uuid, key);
data.put(key, value);
if (value != null) {
data.put(key, value);
}
}
profiles.add(new SerializedProfile(uuid, data));
@@ -151,13 +156,20 @@ public abstract class PersistentDataHandler implements Registrable {
}
/**
* Await outstanding writes.
* Save and shutdown the handler.
*
* @throws InterruptedException If the writes could not be awaited.
*/
public final void awaitOutstandingWrites() throws InterruptedException {
boolean success = executor.awaitTermination(2, TimeUnit.MINUTES);
public final void shutdown() throws InterruptedException {
doSave();
if (!success) {
throw new InterruptedException("Failed to await outstanding writes");
if (executor.isShutdown()) {
return;
}
executor.shutdown();
while (!executor.awaitTermination(2, TimeUnit.MINUTES)) {
// Wait
}
}
@@ -166,18 +178,4 @@ public abstract class PersistentDataHandler implements Registrable {
public final String getID() {
return id;
}
@Override
public boolean equals(@NotNull final Object obj) {
if (!(obj instanceof PersistentDataHandler other)) {
return false;
}
return other.getClass().equals(this.getClass());
}
@Override
public int hashCode() {
return this.getClass().hashCode();
}
}

View File

@@ -148,7 +148,7 @@ import org.bukkit.inventory.ItemStack
abstract class EcoSpigotPlugin : EcoPlugin() {
abstract val dataYml: DataYml
protected abstract val profileHandler: ProfileHandler
abstract val profileHandler: ProfileHandler
protected var bukkitAudiences: BukkitAudiences? = null
init {

View File

@@ -23,12 +23,12 @@ object PersistentDataHandlers: Registry<PersistentDataHandlerFactory>() {
register(object : PersistentDataHandlerFactory("mysql") {
override fun create(plugin: EcoSpigotPlugin) =
MySQLPersistentDataHandler(plugin, plugin.configYml.getSubsection("mysql"))
MySQLPersistentDataHandler(plugin.configYml.getSubsection("mysql"))
})
register(object : PersistentDataHandlerFactory("mongo") {
override fun create(plugin: EcoSpigotPlugin) =
MongoPersistentDataHandler(plugin, plugin.configYml.getSubsection("mongodb"))
MongoPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
})
}
}

View File

@@ -21,7 +21,6 @@ import java.math.BigDecimal
import java.util.UUID
class LegacyMySQLPersistentDataHandler(
plugin: EcoSpigotPlugin,
config: Config
) : PersistentDataHandler("legacy_mysql") {
private val dataSource = HikariDataSource(HikariConfig().apply {
@@ -101,7 +100,7 @@ class LegacyMySQLPersistentDataHandler(
object Factory: PersistentDataHandlerFactory("legacy_mysql") {
override fun create(plugin: EcoSpigotPlugin): PersistentDataHandler {
return LegacyMySQLPersistentDataHandler(plugin, plugin.configYml.getSubsection("mysql"))
return LegacyMySQLPersistentDataHandler(plugin.configYml.getSubsection("mysql"))
}
}
}

View File

@@ -20,7 +20,6 @@ import java.math.BigDecimal
import java.util.UUID
class MongoPersistentDataHandler(
plugin: EcoSpigotPlugin,
config: Config
) : PersistentDataHandler("mongo") {
private val client = MongoClient.create(config.getString("url"))

View File

@@ -8,7 +8,6 @@ import com.willfp.eco.core.data.handlers.DataTypeSerializer
import com.willfp.eco.core.data.handlers.PersistentDataHandler
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import org.jetbrains.exposed.sql.Column
@@ -25,7 +24,6 @@ import java.math.BigDecimal
import java.util.UUID
class MySQLPersistentDataHandler(
plugin: EcoSpigotPlugin,
config: Config
) : PersistentDataHandler("mysql") {
private val dataSource = HikariDataSource(HikariConfig().apply {
@@ -48,32 +46,32 @@ class MySQLPersistentDataHandler(
override val table = object : KeyTable<String>("string") {
override val value = varchar("value", 128)
}
})
}.createTable())
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : DirectStoreSerializer<Boolean>() {
override val table = object : KeyTable<Boolean>("boolean") {
override val value = bool("value")
}
})
}.createTable())
PersistentDataKeyType.INT.registerSerializer(this, object : DirectStoreSerializer<Int>() {
override val table = object : KeyTable<Int>("int") {
override val value = integer("value")
}
})
}.createTable())
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : DirectStoreSerializer<Double>() {
override val table = object : KeyTable<Double>("double") {
override val value = double("value")
}
})
}.createTable())
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : DirectStoreSerializer<BigDecimal>() {
override val table = object : KeyTable<BigDecimal>("big_decimal") {
// 34 digits of precision, 4 digits of scale
override val value = decimal("value", 34, 4)
}
})
}.createTable())
PersistentDataKeyType.CONFIG.registerSerializer(this, object : SingleValueSerializer<Config, String>() {
override val table = object : KeyTable<String>("config") {
@@ -92,13 +90,13 @@ class MySQLPersistentDataHandler(
Configs.fromMap(value.toMap(), ConfigType.JSON).toPlaintext()
}
}
})
}.createTable())
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : MultiValueSerializer<String>() {
override val table = object : ListKeyTable<String>("string_list") {
override val value = varchar("value", 128)
}
})
}.createTable())
}
override fun getSavedUUIDs(): Set<UUID> {
@@ -115,22 +113,24 @@ class MySQLPersistentDataHandler(
private abstract inner class MySQLSerializer<T : Any> : DataTypeSerializer<T>() {
protected abstract val table: ProfileTable
init {
transaction(database) {
SchemaUtils.create(table)
}
}
fun getSavedUUIDs(): Set<UUID> {
return transaction(database) {
table.selectAll().map { it[table.uuid] }.toSet()
}
}
fun createTable(): MySQLSerializer<T> {
transaction(database) {
SchemaUtils.create(table)
}
return this
}
}
// T is the key type
// S is the stored value type
private abstract inner class SingleValueSerializer<T : Any, S: Any> : MySQLSerializer<T>() {
private abstract inner class SingleValueSerializer<T : Any, S : Any> : MySQLSerializer<T>() {
abstract override val table: KeyTable<S>
abstract fun convertToStored(value: T): S
@@ -148,17 +148,21 @@ class MySQLPersistentDataHandler(
}
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
transaction(database) {
table.insert {
it[table.uuid] = uuid
it[table.key] = key.key.toString()
it[table.value] = convertToStored(value)
withRetries {
transaction(database) {
table.deleteWhere { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
table.insert {
it[table.uuid] = uuid
it[table.key] = key.key.toString()
it[table.value] = convertToStored(value)
}
}
}
}
}
private abstract inner class DirectStoreSerializer<T: Any> : SingleValueSerializer<T, T>() {
private abstract inner class DirectStoreSerializer<T : Any> : SingleValueSerializer<T, T>() {
override fun convertToStored(value: T): T {
return value
}
@@ -168,7 +172,7 @@ class MySQLPersistentDataHandler(
}
}
private abstract inner class MultiValueSerializer<T: Any> : MySQLSerializer<List<T>>() {
private abstract inner class MultiValueSerializer<T : Any> : MySQLSerializer<List<T>>() {
abstract override val table: ListKeyTable<T>
override fun readAsync(uuid: UUID, key: PersistentDataKey<List<T>>): List<T>? {
@@ -182,15 +186,17 @@ class MySQLPersistentDataHandler(
}
override fun writeAsync(uuid: UUID, key: PersistentDataKey<List<T>>, value: List<T>) {
transaction(database) {
table.deleteWhere { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
withRetries {
transaction(database) {
table.deleteWhere { (table.uuid eq uuid) and (table.key eq key.key.toString()) }
value.forEachIndexed { index, t ->
table.insert {
it[table.uuid] = uuid
it[table.key] = key.key.toString()
it[table.index] = index
it[table.value] = t
value.forEachIndexed { index, t ->
table.insert {
it[table.uuid] = uuid
it[table.key] = key.key.toString()
it[table.index] = index
it[table.value] = t
}
}
}
}
@@ -208,7 +214,7 @@ class MySQLPersistentDataHandler(
override val primaryKey = PrimaryKey(uuid, key)
init {
uniqueIndex()
uniqueIndex(uuid, key)
}
}
@@ -220,7 +226,21 @@ class MySQLPersistentDataHandler(
override val primaryKey = PrimaryKey(uuid, key, index)
init {
uniqueIndex()
uniqueIndex(uuid, key, index)
}
}
private inline fun <T> withRetries(action: () -> T): T {
var retries = 0
while (true) {
try {
return action()
} catch (e: Exception) {
if (retries >= 3) {
throw e
}
retries++
}
}
}
}

View File

@@ -6,6 +6,7 @@ import com.willfp.eco.internal.spigot.data.KeyRegistry
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlers
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoPlayerProfile
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoProfile
@@ -17,7 +18,7 @@ import java.util.concurrent.ConcurrentHashMap
class ProfileHandler(
private val plugin: EcoSpigotPlugin
) {
private val handlerId = plugin.dataYml.getString("data-handler")
private val handlerId = plugin.configYml.getString("data-handler")
val localHandler = YamlPersistentDataHandler(plugin)
val defaultHandler = PersistentDataHandlers[handlerId]
@@ -44,11 +45,8 @@ class ProfileHandler(
}
fun save() {
localHandler.save()
defaultHandler.save()
localHandler.awaitOutstandingWrites()
defaultHandler.awaitOutstandingWrites()
localHandler.shutdown()
defaultHandler.shutdown()
}
fun migrateIfNecessary(): Boolean {
@@ -56,28 +54,25 @@ class ProfileHandler(
return false
}
// First install
if (!plugin.dataYml.has("previous-handler")) {
plugin.dataYml.set("previous-handler", defaultHandler.id)
plugin.dataYml.set("legacy-mysql-migrated", true)
plugin.dataYml.save()
return false
}
if (defaultHandler.id == "mysql" && !plugin.dataYml.getBool("legacy-mysql-migrated")) {
plugin.logger.info("eco has detected a legacy MySQL database. Migrating to new MySQL database...")
scheduleMigration(LegacyMySQLPersistentDataHandler.Factory)
plugin.dataYml.set("legacy-mysql-migrated", true)
plugin.dataYml.save()
val previousHandlerId = plugin.dataYml.getString("previous-handler").lowercase()
if (previousHandlerId != defaultHandler.id) {
val fromFactory = PersistentDataHandlers[previousHandlerId] ?: return false
scheduleMigration(fromFactory)
return true
}
val previousHandlerId = plugin.dataYml.getString("previous-handler")
if (previousHandlerId != defaultHandler.id) {
val fromFactory = PersistentDataHandlers[previousHandlerId] ?: return false
scheduleMigration(fromFactory)
if (defaultHandler is MySQLPersistentDataHandler && !plugin.dataYml.getBool("legacy-mysql-migrated")) {
plugin.logger.info("eco has detected a legacy MySQL database. Migrating to new MySQL database...")
scheduleMigration(LegacyMySQLPersistentDataHandler.Factory)
return true
}
@@ -91,12 +86,15 @@ class ProfileHandler(
// Run after 5 ticks to allow plugins to load their data keys
plugin.scheduler.runLater(5) {
doMigrate(fromFactory)
plugin.dataYml.set("legacy-mysql-migrated", true)
plugin.dataYml.save()
}
}
private fun doMigrate(fromFactory: PersistentDataHandlerFactory) {
plugin.logger.info("eco has detected a change in data handler")
plugin.logger.info("${fromFactory.id} --> $handlerId")
plugin.logger.info("${fromFactory.id} --> ${defaultHandler.id}")
plugin.logger.info("This will take a while! Players will not be able to join during this time.")
val fromHandler = fromFactory.create(plugin)
@@ -114,7 +112,7 @@ class ProfileHandler(
}
plugin.logger.info("Profile writes submitted! Waiting for completion...")
toHandler.awaitOutstandingWrites()
toHandler.shutdown()
plugin.logger.info("Updating previous handler...")
plugin.dataYml.set("previous-handler", handlerId)

View File

@@ -6,8 +6,8 @@
# How player/server data is saved:
# yaml - Stored in data.yml: Good option for single-node servers (i.e. no BungeeCord/Velocity)
# mongo - If you're running on a network (Bungee/Velocity), you should use MongoDB if you can.
# mysql - The alternative to MongoDB. Because of how eco data works, MongoDB is the best option; but use this if you can't.
# mysql - Standard database, great option for multi-node servers (i.e. BungeeCord/Velocity)
# mongo - Alternative database, may suit some servers better
data-handler: yaml
# If data should be migrated automatically when changing data handler.