Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a7acceb83 | ||
|
|
40aa8b17dd | ||
|
|
71eb386a19 |
@@ -79,7 +79,6 @@ public interface Config extends Cloneable, PlaceholderInjectable {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get an object from config.
|
* Get an object from config.
|
||||||
* Default implementations call {@link org.bukkit.configuration.file.YamlConfiguration#get(String)}.
|
|
||||||
*
|
*
|
||||||
* @param path The path.
|
* @param path The path.
|
||||||
* @return The object.
|
* @return The object.
|
||||||
@@ -89,7 +88,6 @@ public interface Config extends Cloneable, PlaceholderInjectable {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Set an object in config.
|
* Set an object in config.
|
||||||
* Default implementations call {@link org.bukkit.configuration.file.YamlConfiguration#set(String, Object)}
|
|
||||||
*
|
*
|
||||||
* @param path The path.
|
* @param path The path.
|
||||||
* @param object The object.
|
* @param object The object.
|
||||||
|
|||||||
@@ -21,15 +21,6 @@ public interface KeyRegistry {
|
|||||||
*/
|
*/
|
||||||
void registerKey(@NotNull PersistentDataKey<?> key);
|
void registerKey(@NotNull PersistentDataKey<?> key);
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a key's category.
|
|
||||||
*
|
|
||||||
* @param key The key.
|
|
||||||
* @return The category.
|
|
||||||
*/
|
|
||||||
@Nullable
|
|
||||||
KeyCategory getCategory(@NotNull PersistentDataKey<?> key);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all registered keys.
|
* Get all registered keys.
|
||||||
*
|
*
|
||||||
@@ -37,15 +28,6 @@ public interface KeyRegistry {
|
|||||||
*/
|
*/
|
||||||
Set<PersistentDataKey<?>> getRegisteredKeys();
|
Set<PersistentDataKey<?>> getRegisteredKeys();
|
||||||
|
|
||||||
/**
|
|
||||||
* Mark key as category.
|
|
||||||
*
|
|
||||||
* @param key The key.
|
|
||||||
* @param category The category.
|
|
||||||
*/
|
|
||||||
void markKeyAs(@NotNull PersistentDataKey<?> key,
|
|
||||||
@NotNull KeyRegistry.KeyCategory category);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get persistent data key from namespaced key.
|
* Get persistent data key from namespaced key.
|
||||||
*
|
*
|
||||||
@@ -54,19 +36,4 @@ public interface KeyRegistry {
|
|||||||
*/
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
PersistentDataKey<?> getKeyFrom(@NotNull NamespacedKey namespacedKey);
|
PersistentDataKey<?> getKeyFrom(@NotNull NamespacedKey namespacedKey);
|
||||||
|
|
||||||
/**
|
|
||||||
* Locations for key categorization.
|
|
||||||
*/
|
|
||||||
enum KeyCategory {
|
|
||||||
/**
|
|
||||||
* Player keys.
|
|
||||||
*/
|
|
||||||
PLAYER,
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Server keys.
|
|
||||||
*/
|
|
||||||
SERVER
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -83,28 +83,40 @@ public final class PersistentDataKey<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* In older eco versions, keys would have to be categorized in order
|
||||||
|
* to register the columns in the MySQL database. This is no longer needed.
|
||||||
|
* <p>
|
||||||
|
* Old description is below:
|
||||||
|
* <p>
|
||||||
* Categorize key as a server key, will register new column to MySQL
|
* Categorize key as a server key, will register new column to MySQL
|
||||||
* database immediately rather than waiting for auto-categorization.
|
* database immediately rather than waiting for auto-categorization.
|
||||||
* <p>
|
* <p>
|
||||||
* This will improve performance.
|
* This will improve performance.
|
||||||
*
|
*
|
||||||
* @return The key.
|
* @return The key.
|
||||||
|
* @deprecated Not required since the new MySQL data handler was introduced.
|
||||||
*/
|
*/
|
||||||
|
@Deprecated(since = "6.40.0", forRemoval = true)
|
||||||
public PersistentDataKey<T> server() {
|
public PersistentDataKey<T> server() {
|
||||||
Eco.getHandler().getKeyRegistry().markKeyAs(this, KeyRegistry.KeyCategory.SERVER);
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* In older eco versions, keys would have to be categorized in order
|
||||||
|
* to register the columns in the MySQL database. This is no longer needed.
|
||||||
|
* <p>
|
||||||
|
* Old description is below:
|
||||||
|
* <p>
|
||||||
* Categorize key as a player key, will register new column to MySQL
|
* Categorize key as a player key, will register new column to MySQL
|
||||||
* database immediately rather than waiting for auto-categorization.
|
* database immediately rather than waiting for auto-categorization.
|
||||||
* <p>
|
* <p>
|
||||||
* This will improve performance.
|
* This will improve performance.
|
||||||
*
|
*
|
||||||
* @return The key.
|
* @return The key.
|
||||||
|
* @deprecated Not required since the new MySQL data handler was introduced.
|
||||||
*/
|
*/
|
||||||
|
@Deprecated(since = "6.40.0", forRemoval = true)
|
||||||
public PersistentDataKey<T> player() {
|
public PersistentDataKey<T> player() {
|
||||||
Eco.getHandler().getKeyRegistry().markKeyAs(this, KeyRegistry.KeyCategory.PLAYER);
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -59,17 +59,12 @@ class EcoHandler : EcoSpigotPlugin(), Handler {
|
|||||||
private val cleaner = EcoCleaner()
|
private val cleaner = EcoCleaner()
|
||||||
|
|
||||||
private var adventure: BukkitAudiences? = null
|
private var adventure: BukkitAudiences? = null
|
||||||
|
|
||||||
private val keyRegistry = EcoKeyRegistry()
|
private val keyRegistry = EcoKeyRegistry()
|
||||||
|
|
||||||
private val playerProfileHandler = EcoProfileHandler(
|
private val playerProfileHandler = EcoProfileHandler(
|
||||||
if (this.configYml.getBool("mysql.enabled")) {
|
HandlerType.valueOf(this.configYml.getString("data-handler").uppercase()),
|
||||||
this.configYml.set("mysql.enabled", false)
|
this
|
||||||
this.configYml.set("data-handler", "mysql")
|
|
||||||
HandlerType.MYSQL
|
|
||||||
} else {
|
|
||||||
HandlerType.valueOf(
|
|
||||||
this.configYml.getString("data-handler").uppercase()
|
|
||||||
)
|
|
||||||
}, this
|
|
||||||
)
|
)
|
||||||
|
|
||||||
private val snbtHandler = EcoSNBTHandler(this)
|
private val snbtHandler = EcoSNBTHandler(this)
|
||||||
|
|||||||
@@ -245,6 +245,11 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
|||||||
override fun handleReload() {
|
override fun handleReload() {
|
||||||
CollatedRunnable(this)
|
CollatedRunnable(this)
|
||||||
DropManager.update(this)
|
DropManager.update(this)
|
||||||
|
|
||||||
|
this.scheduler.runLater(3) {
|
||||||
|
(Eco.getHandler().profileHandler as EcoProfileHandler).migrateIfNeeded()
|
||||||
|
}
|
||||||
|
|
||||||
ProfileSaver(this, Eco.getHandler().profileHandler)
|
ProfileSaver(this, Eco.getHandler().profileHandler)
|
||||||
this.scheduler.runTimer(
|
this.scheduler.runTimer(
|
||||||
{ clearFrames() },
|
{ clearFrames() },
|
||||||
@@ -366,7 +371,8 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
|
|||||||
ArmorChangeEventListeners(this),
|
ArmorChangeEventListeners(this),
|
||||||
DataListener(this),
|
DataListener(this),
|
||||||
PlayerBlockListener(this),
|
PlayerBlockListener(this),
|
||||||
PlayerHealthFixer(this)
|
PlayerHealthFixer(this),
|
||||||
|
ServerLocking
|
||||||
)
|
)
|
||||||
|
|
||||||
if (Prerequisite.HAS_PAPER.isMet) {
|
if (Prerequisite.HAS_PAPER.isMet) {
|
||||||
|
|||||||
@@ -0,0 +1,28 @@
|
|||||||
|
package com.willfp.eco.internal.spigot
|
||||||
|
|
||||||
|
import org.bukkit.event.EventHandler
|
||||||
|
import org.bukkit.event.Listener
|
||||||
|
import org.bukkit.event.player.PlayerLoginEvent
|
||||||
|
|
||||||
|
object ServerLocking : Listener {
|
||||||
|
private var lockReason: String? = null
|
||||||
|
|
||||||
|
@Suppress("DEPRECATION")
|
||||||
|
@EventHandler
|
||||||
|
fun handle(event: PlayerLoginEvent) {
|
||||||
|
if (lockReason != null) {
|
||||||
|
event.disallow(
|
||||||
|
PlayerLoginEvent.Result.KICK_OTHER,
|
||||||
|
lockReason!!
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun lock(reason: String) {
|
||||||
|
lockReason = reason
|
||||||
|
}
|
||||||
|
|
||||||
|
fun unlock() {
|
||||||
|
lockReason = null
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
package com.willfp.eco.internal.spigot.data
|
package com.willfp.eco.internal.spigot.data
|
||||||
|
|
||||||
import com.willfp.eco.core.Eco
|
|
||||||
import com.willfp.eco.core.data.keys.KeyRegistry
|
import com.willfp.eco.core.data.keys.KeyRegistry
|
||||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||||
@@ -8,7 +7,6 @@ import org.bukkit.NamespacedKey
|
|||||||
|
|
||||||
class EcoKeyRegistry : KeyRegistry {
|
class EcoKeyRegistry : KeyRegistry {
|
||||||
private val registry = mutableMapOf<NamespacedKey, PersistentDataKey<*>>()
|
private val registry = mutableMapOf<NamespacedKey, PersistentDataKey<*>>()
|
||||||
private val categories = mutableMapOf<NamespacedKey, KeyRegistry.KeyCategory>()
|
|
||||||
|
|
||||||
override fun registerKey(key: PersistentDataKey<*>) {
|
override fun registerKey(key: PersistentDataKey<*>) {
|
||||||
if (this.registry.containsKey(key.key)) {
|
if (this.registry.containsKey(key.key)) {
|
||||||
@@ -24,10 +22,6 @@ class EcoKeyRegistry : KeyRegistry {
|
|||||||
return registry.values.toMutableSet()
|
return registry.values.toMutableSet()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun getCategory(key: PersistentDataKey<*>): KeyRegistry.KeyCategory? {
|
|
||||||
return categories[key.key]
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun <T> validateKey(key: PersistentDataKey<T>) {
|
private fun <T> validateKey(key: PersistentDataKey<T>) {
|
||||||
val default = key.defaultValue
|
val default = key.defaultValue
|
||||||
|
|
||||||
@@ -52,11 +46,6 @@ class EcoKeyRegistry : KeyRegistry {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun markKeyAs(key: PersistentDataKey<*>, category: KeyRegistry.KeyCategory) {
|
|
||||||
categories[key.key] = category
|
|
||||||
(Eco.getHandler().profileHandler as EcoProfileHandler).handler.categorize(key, category) // ew
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getKeyFrom(namespacedKey: NamespacedKey): PersistentDataKey<*>? {
|
override fun getKeyFrom(namespacedKey: NamespacedKey): PersistentDataKey<*>? {
|
||||||
return registry[namespacedKey]
|
return registry[namespacedKey]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,16 @@
|
|||||||
package com.willfp.eco.internal.spigot.data
|
package com.willfp.eco.internal.spigot.data
|
||||||
|
|
||||||
import com.willfp.eco.core.Eco
|
|
||||||
import com.willfp.eco.core.data.PlayerProfile
|
import com.willfp.eco.core.data.PlayerProfile
|
||||||
import com.willfp.eco.core.data.Profile
|
import com.willfp.eco.core.data.Profile
|
||||||
import com.willfp.eco.core.data.ProfileHandler
|
import com.willfp.eco.core.data.ProfileHandler
|
||||||
import com.willfp.eco.core.data.ServerProfile
|
import com.willfp.eco.core.data.ServerProfile
|
||||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||||
|
import com.willfp.eco.core.data.profile
|
||||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||||
|
import com.willfp.eco.internal.spigot.ServerLocking
|
||||||
import com.willfp.eco.internal.spigot.data.storage.DataHandler
|
import com.willfp.eco.internal.spigot.data.storage.DataHandler
|
||||||
import com.willfp.eco.internal.spigot.data.storage.HandlerType
|
import com.willfp.eco.internal.spigot.data.storage.HandlerType
|
||||||
|
import com.willfp.eco.internal.spigot.data.storage.LegacyMySQLDataHandler
|
||||||
import com.willfp.eco.internal.spigot.data.storage.MongoDataHandler
|
import com.willfp.eco.internal.spigot.data.storage.MongoDataHandler
|
||||||
import com.willfp.eco.internal.spigot.data.storage.MySQLDataHandler
|
import com.willfp.eco.internal.spigot.data.storage.MySQLDataHandler
|
||||||
import com.willfp.eco.internal.spigot.data.storage.YamlDataHandler
|
import com.willfp.eco.internal.spigot.data.storage.YamlDataHandler
|
||||||
@@ -27,6 +29,7 @@ class EcoProfileHandler(
|
|||||||
HandlerType.YAML -> YamlDataHandler(plugin, this)
|
HandlerType.YAML -> YamlDataHandler(plugin, this)
|
||||||
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
|
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
|
||||||
HandlerType.MONGO -> MongoDataHandler(plugin, this)
|
HandlerType.MONGO -> MongoDataHandler(plugin, this)
|
||||||
|
HandlerType.LEGACY_MYSQL -> LegacyMySQLDataHandler(plugin, this)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun loadGenericProfile(uuid: UUID): Profile {
|
fun loadGenericProfile(uuid: UUID): Profile {
|
||||||
@@ -64,7 +67,7 @@ class EcoProfileHandler(
|
|||||||
handler.save()
|
handler.save()
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun migrateIfNeeded() {
|
fun migrateIfNeeded() {
|
||||||
if (!plugin.configYml.getBool("perform-data-migration")) {
|
if (!plugin.configYml.getBool("perform-data-migration")) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -74,7 +77,12 @@ class EcoProfileHandler(
|
|||||||
plugin.dataYml.save()
|
plugin.dataYml.save()
|
||||||
}
|
}
|
||||||
|
|
||||||
val previousHandlerType = HandlerType.valueOf(plugin.dataYml.getString("previous-handler"))
|
|
||||||
|
var previousHandlerType = HandlerType.valueOf(plugin.dataYml.getString("previous-handler"))
|
||||||
|
|
||||||
|
if (previousHandlerType == HandlerType.MYSQL && !plugin.dataYml.has("new-mysql")) {
|
||||||
|
previousHandlerType = HandlerType.LEGACY_MYSQL
|
||||||
|
}
|
||||||
|
|
||||||
if (previousHandlerType == type) {
|
if (previousHandlerType == type) {
|
||||||
return
|
return
|
||||||
@@ -84,12 +92,18 @@ class EcoProfileHandler(
|
|||||||
HandlerType.YAML -> YamlDataHandler(plugin, this)
|
HandlerType.YAML -> YamlDataHandler(plugin, this)
|
||||||
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
|
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
|
||||||
HandlerType.MONGO -> MongoDataHandler(plugin, this)
|
HandlerType.MONGO -> MongoDataHandler(plugin, this)
|
||||||
|
HandlerType.LEGACY_MYSQL -> LegacyMySQLDataHandler(plugin, this)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ServerLocking.lock("Migrating player data! Check console for more information.")
|
||||||
|
|
||||||
plugin.logger.info("eco has detected a change in data handler!")
|
plugin.logger.info("eco has detected a change in data handler!")
|
||||||
plugin.logger.info("Migrating server data from ${previousHandlerType.name} to ${type.name}")
|
plugin.logger.info("Migrating server data from ${previousHandlerType.name} to ${type.name}")
|
||||||
plugin.logger.info("This will take a while!")
|
plugin.logger.info("This will take a while!")
|
||||||
|
|
||||||
|
plugin.logger.info("Initializing previous handler...")
|
||||||
|
previousHandler.initialize()
|
||||||
|
|
||||||
val players = Bukkit.getOfflinePlayers().map { it.uniqueId }
|
val players = Bukkit.getOfflinePlayers().map { it.uniqueId }
|
||||||
|
|
||||||
plugin.logger.info("Found data for ${players.size} players!")
|
plugin.logger.info("Found data for ${players.size} players!")
|
||||||
@@ -98,12 +112,9 @@ class EcoProfileHandler(
|
|||||||
Declared here as its own function to be able to use T.
|
Declared here as its own function to be able to use T.
|
||||||
*/
|
*/
|
||||||
fun <T : Any> migrateKey(uuid: UUID, key: PersistentDataKey<T>, from: DataHandler, to: DataHandler) {
|
fun <T : Any> migrateKey(uuid: UUID, key: PersistentDataKey<T>, from: DataHandler, to: DataHandler) {
|
||||||
val category = Eco.getHandler().keyRegistry.getCategory(key)
|
|
||||||
if (category != null) {
|
|
||||||
from.categorize(key, category)
|
|
||||||
}
|
|
||||||
val previous: T? = from.read(uuid, key)
|
val previous: T? = from.read(uuid, key)
|
||||||
if (previous != null) {
|
if (previous != null) {
|
||||||
|
Bukkit.getOfflinePlayer(uuid).profile.write(key, previous) // Nope, no idea.
|
||||||
to.write(uuid, key, previous)
|
to.write(uuid, key, previous)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -111,29 +122,35 @@ class EcoProfileHandler(
|
|||||||
var i = 1
|
var i = 1
|
||||||
for (uuid in players) {
|
for (uuid in players) {
|
||||||
plugin.logger.info("Migrating data for $uuid... ($i / ${players.size})")
|
plugin.logger.info("Migrating data for $uuid... ($i / ${players.size})")
|
||||||
|
|
||||||
for (key in PersistentDataKey.values()) {
|
for (key in PersistentDataKey.values()) {
|
||||||
migrateKey(uuid, key, previousHandler, handler)
|
// Why this? Because known points *really* likes to break things with the legacy MySQL handler.
|
||||||
|
if (key.key.key == "known_points") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
migrateKey(uuid, key, previousHandler, handler)
|
||||||
|
} catch (e: Exception) {
|
||||||
|
plugin.logger.info("Could not migrate ${key.key} for $uuid! This is probably because they do not have any data.")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
|
|
||||||
|
plugin.logger.info("Saving new data...")
|
||||||
|
handler.save()
|
||||||
plugin.logger.info("Updating previous handler...")
|
plugin.logger.info("Updating previous handler...")
|
||||||
plugin.dataYml.set("previous-handler", type.name)
|
plugin.dataYml.set("previous-handler", type.name)
|
||||||
plugin.dataYml.save()
|
plugin.dataYml.save()
|
||||||
plugin.logger.info("Done!")
|
plugin.logger.info("The server will now automatically be restarted...")
|
||||||
|
|
||||||
|
ServerLocking.unlock()
|
||||||
|
|
||||||
|
Bukkit.getServer().shutdown()
|
||||||
}
|
}
|
||||||
|
|
||||||
fun initialize() {
|
fun initialize() {
|
||||||
plugin.dataYml.getStrings("categorized-keys.player")
|
|
||||||
.mapNotNull { KeyHelpers.deserializeFromString(it) }
|
|
||||||
|
|
||||||
plugin.dataYml.getStrings("categorized-keys.server")
|
|
||||||
.mapNotNull { KeyHelpers.deserializeFromString(it, server = true) }
|
|
||||||
|
|
||||||
handler.initialize()
|
handler.initialize()
|
||||||
|
|
||||||
migrateIfNeeded()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,56 +0,0 @@
|
|||||||
package com.willfp.eco.internal.spigot.data
|
|
||||||
|
|
||||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
|
||||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
|
||||||
import com.willfp.eco.util.NamespacedKeyUtils
|
|
||||||
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
|
||||||
object KeyHelpers {
|
|
||||||
fun deserializeFromString(serialized: String, server: Boolean = false): PersistentDataKey<*>? {
|
|
||||||
val split = serialized.split(";").toTypedArray()
|
|
||||||
|
|
||||||
if (split.size < 2) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
val key = NamespacedKeyUtils.fromStringOrNull(split[0]) ?: return null
|
|
||||||
val type = PersistentDataKeyType.valueOf(split[1]) ?: return null
|
|
||||||
val persistentKey = when (type) {
|
|
||||||
PersistentDataKeyType.STRING -> PersistentDataKey(
|
|
||||||
key,
|
|
||||||
type as PersistentDataKeyType<String>,
|
|
||||||
if (split.size >= 3) split.toList().subList(2, split.size).joinToString("") else ""
|
|
||||||
)
|
|
||||||
PersistentDataKeyType.INT -> PersistentDataKey(
|
|
||||||
key,
|
|
||||||
type as PersistentDataKeyType<Int>,
|
|
||||||
split[2].toInt()
|
|
||||||
)
|
|
||||||
PersistentDataKeyType.DOUBLE -> PersistentDataKey(
|
|
||||||
key,
|
|
||||||
type as PersistentDataKeyType<Double>,
|
|
||||||
split[2].toDouble()
|
|
||||||
)
|
|
||||||
PersistentDataKeyType.BOOLEAN -> PersistentDataKey(
|
|
||||||
key,
|
|
||||||
type as PersistentDataKeyType<Boolean>,
|
|
||||||
java.lang.Boolean.parseBoolean(split[2])
|
|
||||||
)
|
|
||||||
else -> null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (persistentKey != null) {
|
|
||||||
if (server) {
|
|
||||||
persistentKey.server()
|
|
||||||
} else {
|
|
||||||
persistentKey.player()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return persistentKey
|
|
||||||
}
|
|
||||||
|
|
||||||
fun serializeToString(key: PersistentDataKey<*>): String {
|
|
||||||
return "${key.key};${key.type.name()};${key.defaultValue}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
package com.willfp.eco.internal.spigot.data.storage
|
package com.willfp.eco.internal.spigot.data.storage
|
||||||
|
|
||||||
import com.willfp.eco.core.data.keys.KeyRegistry
|
|
||||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
|
|
||||||
@@ -28,10 +27,6 @@ abstract class DataHandler(
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
open fun categorize(key: PersistentDataKey<*>, category: KeyRegistry.KeyCategory) {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
open fun initialize() {
|
open fun initialize() {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,5 +3,6 @@ package com.willfp.eco.internal.spigot.data.storage
|
|||||||
enum class HandlerType {
|
enum class HandlerType {
|
||||||
YAML,
|
YAML,
|
||||||
MYSQL,
|
MYSQL,
|
||||||
MONGO
|
MONGO,
|
||||||
|
LEGACY_MYSQL
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,309 @@
|
|||||||
|
package com.willfp.eco.internal.spigot.data.storage
|
||||||
|
|
||||||
|
import com.github.benmanes.caffeine.cache.Caffeine
|
||||||
|
import com.google.common.util.concurrent.ThreadFactoryBuilder
|
||||||
|
import com.willfp.eco.core.Eco
|
||||||
|
import com.willfp.eco.core.EcoPlugin
|
||||||
|
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||||
|
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||||
|
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||||
|
import com.willfp.eco.internal.spigot.data.EcoProfileHandler
|
||||||
|
import com.willfp.eco.internal.spigot.data.serverProfileUUID
|
||||||
|
import com.zaxxer.hikari.HikariConfig
|
||||||
|
import com.zaxxer.hikari.HikariDataSource
|
||||||
|
import org.jetbrains.exposed.dao.id.UUIDTable
|
||||||
|
import org.jetbrains.exposed.sql.BooleanColumnType
|
||||||
|
import org.jetbrains.exposed.sql.Column
|
||||||
|
import org.jetbrains.exposed.sql.Database
|
||||||
|
import org.jetbrains.exposed.sql.DoubleColumnType
|
||||||
|
import org.jetbrains.exposed.sql.IntegerColumnType
|
||||||
|
import org.jetbrains.exposed.sql.ResultRow
|
||||||
|
import org.jetbrains.exposed.sql.SchemaUtils
|
||||||
|
import org.jetbrains.exposed.sql.VarCharColumnType
|
||||||
|
import org.jetbrains.exposed.sql.insert
|
||||||
|
import org.jetbrains.exposed.sql.select
|
||||||
|
import org.jetbrains.exposed.sql.transactions.transaction
|
||||||
|
import org.jetbrains.exposed.sql.update
|
||||||
|
import java.util.UUID
|
||||||
|
import java.util.concurrent.Callable
|
||||||
|
import java.util.concurrent.Executors
|
||||||
|
import java.util.concurrent.TimeUnit
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
|
The MySQL data handler is hot garbage for several reasons:
|
||||||
|
- Using MySQL on unstructured data: it's being horrifically misused, but that's just how it has to be.
|
||||||
|
- Can't remove un-needed keys, there's wasted space in the columns everywhere.
|
||||||
|
- No native support for the STRING_LIST type, instead it 'serializes' the lists with semicolons as separators.
|
||||||
|
- General lack of flexibility, it's too rigid.
|
||||||
|
|
||||||
|
That's why I added the MongoDB handler, it's far, far better suited for what eco does - use it over
|
||||||
|
MySQL if you can.
|
||||||
|
|
||||||
|
Oh, also - I don't really know how this class works. I've rewritten it and hacked it together several ways
|
||||||
|
in several sessions, and it's basically complete gibberish to me. Adding the STRING_LIST type is probably
|
||||||
|
the worst bodge I've shipped in production.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
class LegacyMySQLDataHandler(
|
||||||
|
private val plugin: EcoSpigotPlugin,
|
||||||
|
handler: EcoProfileHandler
|
||||||
|
) : DataHandler(HandlerType.LEGACY_MYSQL) {
|
||||||
|
private val playerHandler: ImplementedMySQLHandler
|
||||||
|
private val serverHandler: ImplementedMySQLHandler
|
||||||
|
|
||||||
|
init {
|
||||||
|
val config = HikariConfig()
|
||||||
|
config.driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||||
|
config.username = plugin.configYml.getString("mysql.user")
|
||||||
|
config.password = plugin.configYml.getString("mysql.password")
|
||||||
|
config.jdbcUrl = "jdbc:mysql://" +
|
||||||
|
"${plugin.configYml.getString("mysql.host")}:" +
|
||||||
|
"${plugin.configYml.getString("mysql.port")}/" +
|
||||||
|
plugin.configYml.getString("mysql.database")
|
||||||
|
config.maximumPoolSize = plugin.configYml.getInt("mysql.connections")
|
||||||
|
|
||||||
|
Database.connect(HikariDataSource(config))
|
||||||
|
|
||||||
|
playerHandler = ImplementedMySQLHandler(
|
||||||
|
handler,
|
||||||
|
UUIDTable("eco_players"),
|
||||||
|
plugin
|
||||||
|
)
|
||||||
|
|
||||||
|
serverHandler = ImplementedMySQLHandler(
|
||||||
|
handler,
|
||||||
|
UUIDTable("eco_server"),
|
||||||
|
plugin
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||||
|
return applyFor(uuid) {
|
||||||
|
it.read(uuid, key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||||
|
applyFor(uuid) {
|
||||||
|
it.write(uuid, key, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun saveKeysFor(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
||||||
|
applyFor(uuid) {
|
||||||
|
it.saveKeysForRow(uuid, keys)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private inline fun <R> applyFor(uuid: UUID, function: (ImplementedMySQLHandler) -> R): R {
|
||||||
|
return if (uuid == serverProfileUUID) {
|
||||||
|
function(serverHandler)
|
||||||
|
} else {
|
||||||
|
function(playerHandler)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun initialize() {
|
||||||
|
playerHandler.initialize()
|
||||||
|
serverHandler.initialize()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
private class ImplementedMySQLHandler(
|
||||||
|
private val handler: EcoProfileHandler,
|
||||||
|
private val table: UUIDTable,
|
||||||
|
private val plugin: EcoPlugin
|
||||||
|
) {
|
||||||
|
private val rows = Caffeine.newBuilder()
|
||||||
|
.expireAfterWrite(3, TimeUnit.SECONDS)
|
||||||
|
.build<UUID, ResultRow>()
|
||||||
|
|
||||||
|
private val threadFactory = ThreadFactoryBuilder().setNameFormat("eco-mysql-thread-%d").build()
|
||||||
|
private val executor = Executors.newFixedThreadPool(plugin.configYml.getInt("mysql.threads"), threadFactory)
|
||||||
|
val registeredKeys = mutableSetOf<PersistentDataKey<*>>()
|
||||||
|
|
||||||
|
init {
|
||||||
|
transaction {
|
||||||
|
SchemaUtils.create(table)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun initialize() {
|
||||||
|
transaction {
|
||||||
|
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun ensureKeyRegistration(key: PersistentDataKey<*>) {
|
||||||
|
if (table.columns.any { it.name == key.key.toString() }) {
|
||||||
|
registeredKeys.add(key)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
registerColumn(key)
|
||||||
|
registeredKeys.add(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: Any) {
|
||||||
|
getRow(uuid)
|
||||||
|
doWrite(uuid, key, key.type.constrainSQLTypes(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun doWrite(uuid: UUID, key: PersistentDataKey<*>, constrainedValue: Any) {
|
||||||
|
val column: Column<Any> = getColumn(key) as Column<Any>
|
||||||
|
|
||||||
|
executor.submit {
|
||||||
|
transaction {
|
||||||
|
table.update({ table.id eq uuid }) {
|
||||||
|
it[column] = constrainedValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun saveKeysForRow(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
||||||
|
saveRow(uuid, keys)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun saveRow(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
||||||
|
val profile = handler.loadGenericProfile(uuid)
|
||||||
|
|
||||||
|
executor.submit {
|
||||||
|
transaction {
|
||||||
|
getRow(uuid)
|
||||||
|
|
||||||
|
for (key in keys) {
|
||||||
|
doWrite(uuid, key, key.type.constrainSQLTypes(profile.read(key)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun <T> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||||
|
val doRead = Callable<T?> {
|
||||||
|
transaction {
|
||||||
|
val row = getRow(uuid)
|
||||||
|
val column = getColumn(key)
|
||||||
|
val raw = row[column]
|
||||||
|
key.type.fromConstrained(raw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureKeyRegistration(key) // DON'T DELETE THIS LINE! I know it's covered in getColumn, but I need to do it here as well.
|
||||||
|
|
||||||
|
doRead.call()
|
||||||
|
|
||||||
|
return if (Eco.getHandler().ecoPlugin.configYml.getBool("mysql.async-reads")) {
|
||||||
|
executor.submit(doRead).get()
|
||||||
|
} else {
|
||||||
|
doRead.call()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun <T> registerColumn(key: PersistentDataKey<T>) {
|
||||||
|
try {
|
||||||
|
transaction {
|
||||||
|
try {
|
||||||
|
table.apply {
|
||||||
|
if (table.columns.any { it.name == key.key.toString() }) {
|
||||||
|
return@apply
|
||||||
|
}
|
||||||
|
|
||||||
|
when (key.type) {
|
||||||
|
PersistentDataKeyType.INT -> registerColumn<Int>(key.key.toString(), IntegerColumnType())
|
||||||
|
.default(key.defaultValue as Int)
|
||||||
|
PersistentDataKeyType.DOUBLE -> registerColumn<Double>(
|
||||||
|
key.key.toString(),
|
||||||
|
DoubleColumnType()
|
||||||
|
)
|
||||||
|
.default(key.defaultValue as Double)
|
||||||
|
PersistentDataKeyType.BOOLEAN -> registerColumn<Boolean>(
|
||||||
|
key.key.toString(),
|
||||||
|
BooleanColumnType()
|
||||||
|
)
|
||||||
|
.default(key.defaultValue as Boolean)
|
||||||
|
PersistentDataKeyType.STRING -> registerColumn<String>(
|
||||||
|
key.key.toString(),
|
||||||
|
VarCharColumnType(512)
|
||||||
|
)
|
||||||
|
.default(key.defaultValue as String)
|
||||||
|
PersistentDataKeyType.STRING_LIST -> registerColumn<String>(
|
||||||
|
key.key.toString(),
|
||||||
|
VarCharColumnType(8192)
|
||||||
|
).default(PersistentDataKeyType.STRING_LIST.constrainSQLTypes(key.defaultValue as List<String>) as String)
|
||||||
|
|
||||||
|
else -> throw NullPointerException("Null value found!")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
||||||
|
} catch (e: Exception) {
|
||||||
|
plugin.logger.info("MySQL Error 1!")
|
||||||
|
e.printStackTrace()
|
||||||
|
// What's that? Two enormous exception catches? That's right! This code sucks.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
plugin.logger.info("MySQL Error 2!")
|
||||||
|
e.printStackTrace()
|
||||||
|
// It might fail. Who cares? This is legacy.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun getColumn(key: PersistentDataKey<*>): Column<*> {
|
||||||
|
ensureKeyRegistration(key)
|
||||||
|
|
||||||
|
val name = key.key.toString()
|
||||||
|
|
||||||
|
return table.columns.first { it.name == name }
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun getRow(uuid: UUID): ResultRow {
|
||||||
|
fun select(uuid: UUID): ResultRow? {
|
||||||
|
return transaction {
|
||||||
|
table.select { table.id eq uuid }.limit(1).singleOrNull()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return rows.get(uuid) {
|
||||||
|
val row = select(uuid)
|
||||||
|
|
||||||
|
return@get if (row != null) {
|
||||||
|
row
|
||||||
|
} else {
|
||||||
|
transaction {
|
||||||
|
table.insert { it[id] = uuid }
|
||||||
|
}
|
||||||
|
select(uuid)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun <T> PersistentDataKeyType<T>.constrainSQLTypes(value: Any): Any {
|
||||||
|
return if (this == PersistentDataKeyType.STRING_LIST) {
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
value as List<String>
|
||||||
|
value.joinToString(separator = ";")
|
||||||
|
} else {
|
||||||
|
value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun <T> PersistentDataKeyType<T>.fromConstrained(constrained: Any?): T? {
|
||||||
|
if (constrained == null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
@Suppress("UNCHECKED_CAST")
|
||||||
|
return if (this == PersistentDataKeyType.STRING_LIST) {
|
||||||
|
constrained as String
|
||||||
|
constrained.split(";").toList()
|
||||||
|
} else {
|
||||||
|
constrained
|
||||||
|
} as T
|
||||||
|
}
|
||||||
@@ -2,64 +2,54 @@ package com.willfp.eco.internal.spigot.data.storage
|
|||||||
|
|
||||||
import com.github.benmanes.caffeine.cache.Caffeine
|
import com.github.benmanes.caffeine.cache.Caffeine
|
||||||
import com.google.common.util.concurrent.ThreadFactoryBuilder
|
import com.google.common.util.concurrent.ThreadFactoryBuilder
|
||||||
import com.willfp.eco.core.Eco
|
import com.willfp.eco.core.config.ConfigType
|
||||||
import com.willfp.eco.core.EcoPlugin
|
import com.willfp.eco.core.config.TransientConfig
|
||||||
import com.willfp.eco.core.data.keys.KeyRegistry
|
import com.willfp.eco.core.config.interfaces.Config
|
||||||
import com.willfp.eco.core.data.keys.PersistentDataKey
|
import com.willfp.eco.core.data.keys.PersistentDataKey
|
||||||
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
import com.willfp.eco.core.data.keys.PersistentDataKeyType
|
||||||
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
|
||||||
import com.willfp.eco.internal.spigot.data.EcoProfileHandler
|
import com.willfp.eco.internal.spigot.data.EcoProfileHandler
|
||||||
import com.willfp.eco.internal.spigot.data.KeyHelpers
|
|
||||||
import com.willfp.eco.internal.spigot.data.serverProfileUUID
|
|
||||||
import com.zaxxer.hikari.HikariConfig
|
import com.zaxxer.hikari.HikariConfig
|
||||||
import com.zaxxer.hikari.HikariDataSource
|
import com.zaxxer.hikari.HikariDataSource
|
||||||
import org.jetbrains.exposed.dao.id.UUIDTable
|
import org.jetbrains.exposed.dao.id.UUIDTable
|
||||||
import org.jetbrains.exposed.sql.BooleanColumnType
|
|
||||||
import org.jetbrains.exposed.sql.Column
|
import org.jetbrains.exposed.sql.Column
|
||||||
import org.jetbrains.exposed.sql.Database
|
import org.jetbrains.exposed.sql.Database
|
||||||
import org.jetbrains.exposed.sql.DoubleColumnType
|
|
||||||
import org.jetbrains.exposed.sql.IntegerColumnType
|
|
||||||
import org.jetbrains.exposed.sql.ResultRow
|
import org.jetbrains.exposed.sql.ResultRow
|
||||||
import org.jetbrains.exposed.sql.SchemaUtils
|
import org.jetbrains.exposed.sql.SchemaUtils
|
||||||
import org.jetbrains.exposed.sql.VarCharColumnType
|
import org.jetbrains.exposed.sql.TextColumnType
|
||||||
import org.jetbrains.exposed.sql.insert
|
import org.jetbrains.exposed.sql.insert
|
||||||
import org.jetbrains.exposed.sql.select
|
import org.jetbrains.exposed.sql.select
|
||||||
import org.jetbrains.exposed.sql.transactions.transaction
|
import org.jetbrains.exposed.sql.transactions.transaction
|
||||||
import org.jetbrains.exposed.sql.update
|
import org.jetbrains.exposed.sql.update
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import java.util.concurrent.Callable
|
|
||||||
import java.util.concurrent.Executors
|
import java.util.concurrent.Executors
|
||||||
import java.util.concurrent.TimeUnit
|
import java.util.concurrent.TimeUnit
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
Better than old MySQL data handler, but that's only because it's literally just dumping all the
|
||||||
|
data into a single text column, containing the contents of the players profile as a Config.
|
||||||
|
|
||||||
The MySQL data handler is hot garbage for several reasons:
|
Whatever. At least it works.
|
||||||
- Using MySQL on unstructured data: it's being horrifically misused, but that's just how it has to be.
|
|
||||||
- Can't remove un-needed keys, there's wasted space in the columns everywhere.
|
|
||||||
- No native support for the STRING_LIST type, instead it 'serializes' the lists with semicolons as separators.
|
|
||||||
- General lack of flexibility, it's too rigid.
|
|
||||||
|
|
||||||
That's why I added the MongoDB handler, it's far, far better suited for what eco does - use it over
|
|
||||||
MySQL if you can.
|
|
||||||
|
|
||||||
Oh, also - I don't really know how this class works. I've rewritten it and hacked it together several ways
|
|
||||||
in several sessions, and it's basically complete gibberish to me. Adding the STRING_LIST type is probably
|
|
||||||
the worst bodge I've shipped in production.
|
|
||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
@Suppress("UNCHECKED_CAST")
|
||||||
class MySQLDataHandler(
|
class MySQLDataHandler(
|
||||||
private val plugin: EcoSpigotPlugin,
|
private val plugin: EcoSpigotPlugin,
|
||||||
handler: EcoProfileHandler
|
private val handler: EcoProfileHandler
|
||||||
) : DataHandler(HandlerType.MYSQL) {
|
) : DataHandler(HandlerType.MYSQL) {
|
||||||
private val playerHandler: ImplementedMySQLHandler
|
private val table = UUIDTable("eco_data")
|
||||||
private val serverHandler: ImplementedMySQLHandler
|
|
||||||
|
private val rows = Caffeine.newBuilder()
|
||||||
|
.expireAfterWrite(3, TimeUnit.SECONDS)
|
||||||
|
.build<UUID, ResultRow>()
|
||||||
|
|
||||||
|
private val threadFactory = ThreadFactoryBuilder().setNameFormat("eco-mysql-thread-%d").build()
|
||||||
|
private val executor = Executors.newFixedThreadPool(plugin.configYml.getInt("mysql.threads"), threadFactory)
|
||||||
|
|
||||||
|
private val dataColumn: Column<String>
|
||||||
|
get() = table.columns.first { it.name == "json_data" } as Column<String>
|
||||||
|
|
||||||
init {
|
init {
|
||||||
plugin.logger.warning("You're using the MySQL Data Handler")
|
|
||||||
plugin.logger.warning("It's recommended to switch to MongoDB (mongo)!")
|
|
||||||
|
|
||||||
val config = HikariConfig()
|
val config = HikariConfig()
|
||||||
config.driverClassName = "com.mysql.cj.jdbc.Driver"
|
config.driverClassName = "com.mysql.cj.jdbc.Driver"
|
||||||
config.username = plugin.configYml.getString("mysql.user")
|
config.username = plugin.configYml.getString("mysql.user")
|
||||||
@@ -72,238 +62,93 @@ class MySQLDataHandler(
|
|||||||
|
|
||||||
Database.connect(HikariDataSource(config))
|
Database.connect(HikariDataSource(config))
|
||||||
|
|
||||||
playerHandler = ImplementedMySQLHandler(
|
transaction {
|
||||||
handler,
|
SchemaUtils.create(table)
|
||||||
UUIDTable("eco_players"),
|
|
||||||
plugin
|
|
||||||
)
|
|
||||||
|
|
||||||
serverHandler = ImplementedMySQLHandler(
|
table.apply {
|
||||||
handler,
|
registerColumn<String>("json_data", TextColumnType())
|
||||||
UUIDTable("eco_server"),
|
.default("{}")
|
||||||
plugin
|
}
|
||||||
)
|
|
||||||
|
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
||||||
return applyFor(uuid) {
|
val data = getData(uuid)
|
||||||
it.read(uuid, key)
|
|
||||||
|
val value: Any? = when (key.type) {
|
||||||
|
PersistentDataKeyType.INT -> data.getIntOrNull(key.key.toString())
|
||||||
|
PersistentDataKeyType.DOUBLE -> data.getDoubleOrNull(key.key.toString())
|
||||||
|
PersistentDataKeyType.STRING -> data.getStringOrNull(key.key.toString())
|
||||||
|
PersistentDataKeyType.BOOLEAN -> data.getBoolOrNull(key.key.toString())
|
||||||
|
PersistentDataKeyType.STRING_LIST -> data.getStringsOrNull(key.key.toString())
|
||||||
|
else -> null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return value as? T?
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
|
||||||
applyFor(uuid) {
|
val data = getData(uuid)
|
||||||
it.write(uuid, key, value)
|
data.set(key.key.toString(), value)
|
||||||
}
|
|
||||||
|
setData(uuid, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun saveKeysFor(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
override fun saveKeysFor(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
||||||
applyFor(uuid) {
|
val profile = handler.loadGenericProfile(uuid)
|
||||||
it.saveKeysForRow(uuid, keys)
|
|
||||||
|
executor.submit {
|
||||||
|
val data = getData(uuid)
|
||||||
|
for (key in keys) {
|
||||||
|
data.set(key.key.toString(), profile.read(key))
|
||||||
|
}
|
||||||
|
|
||||||
|
setData(uuid, data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private inline fun <R> applyFor(uuid: UUID, function: (ImplementedMySQLHandler) -> R): R {
|
private fun getData(uuid: UUID): Config {
|
||||||
return if (uuid == serverProfileUUID) {
|
val plaintext = transaction {
|
||||||
function(serverHandler)
|
val row = rows.get(uuid) {
|
||||||
} else {
|
val row = table.select { table.id eq uuid }.limit(1).singleOrNull()
|
||||||
function(playerHandler)
|
|
||||||
|
if (row != null) {
|
||||||
|
row
|
||||||
|
} else {
|
||||||
|
transaction {
|
||||||
|
table.insert { it[id] = uuid }
|
||||||
|
}
|
||||||
|
table.select { table.id eq uuid }.limit(1).singleOrNull()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
row[dataColumn]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
return TransientConfig(plaintext, ConfigType.JSON)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun setData(uuid: UUID, config: Config) {
|
||||||
|
executor.submit {
|
||||||
|
transaction {
|
||||||
|
table.update({ table.id eq uuid }) {
|
||||||
|
it[dataColumn] = config.toPlaintext()
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun categorize(key: PersistentDataKey<*>, category: KeyRegistry.KeyCategory) {
|
override fun initialize() {
|
||||||
if (category == KeyRegistry.KeyCategory.SERVER) {
|
transaction {
|
||||||
serverHandler.ensureKeyRegistration(key)
|
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
||||||
} else {
|
|
||||||
playerHandler.ensureKeyRegistration(key)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun save() {
|
override fun save() {
|
||||||
plugin.dataYml.set(
|
plugin.dataYml.set("new-mysql", true)
|
||||||
"categorized-keys.player",
|
|
||||||
playerHandler.registeredKeys
|
|
||||||
.map { KeyHelpers.serializeToString(it) }
|
|
||||||
)
|
|
||||||
plugin.dataYml.set(
|
|
||||||
"categorized-keys.server",
|
|
||||||
serverHandler.registeredKeys
|
|
||||||
.map { KeyHelpers.serializeToString(it) }
|
|
||||||
)
|
|
||||||
plugin.dataYml.save()
|
plugin.dataYml.save()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun initialize() {
|
|
||||||
playerHandler.initialize()
|
|
||||||
serverHandler.initialize()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
|
||||||
private class ImplementedMySQLHandler(
|
|
||||||
private val handler: EcoProfileHandler,
|
|
||||||
private val table: UUIDTable,
|
|
||||||
plugin: EcoPlugin
|
|
||||||
) {
|
|
||||||
private val rows = Caffeine.newBuilder()
|
|
||||||
.expireAfterWrite(3, TimeUnit.SECONDS)
|
|
||||||
.build<UUID, ResultRow>()
|
|
||||||
|
|
||||||
private val threadFactory = ThreadFactoryBuilder().setNameFormat("eco-mysql-thread-%d").build()
|
|
||||||
private val executor = Executors.newFixedThreadPool(plugin.configYml.getInt("mysql.threads"), threadFactory)
|
|
||||||
val registeredKeys = mutableSetOf<PersistentDataKey<*>>()
|
|
||||||
|
|
||||||
init {
|
|
||||||
transaction {
|
|
||||||
SchemaUtils.create(table)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun initialize() {
|
|
||||||
transaction {
|
|
||||||
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun ensureKeyRegistration(key: PersistentDataKey<*>) {
|
|
||||||
if (table.columns.any { it.name == key.key.toString() }) {
|
|
||||||
registeredKeys.add(key)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
registerColumn(key)
|
|
||||||
registeredKeys.add(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: Any) {
|
|
||||||
getRow(uuid)
|
|
||||||
doWrite(uuid, key, key.type.constrainSQLTypes(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun doWrite(uuid: UUID, key: PersistentDataKey<*>, constrainedValue: Any) {
|
|
||||||
val column: Column<Any> = getColumn(key) as Column<Any>
|
|
||||||
|
|
||||||
executor.submit {
|
|
||||||
transaction {
|
|
||||||
table.update({ table.id eq uuid }) {
|
|
||||||
it[column] = constrainedValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun saveKeysForRow(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
|
||||||
saveRow(uuid, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun saveRow(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
|
|
||||||
val profile = handler.loadGenericProfile(uuid)
|
|
||||||
|
|
||||||
executor.submit {
|
|
||||||
transaction {
|
|
||||||
getRow(uuid)
|
|
||||||
|
|
||||||
for (key in keys) {
|
|
||||||
doWrite(uuid, key, key.type.constrainSQLTypes(profile.read(key)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun <T> read(uuid: UUID, key: PersistentDataKey<T>): T? {
|
|
||||||
val doRead = Callable<T?> {
|
|
||||||
transaction {
|
|
||||||
val row = getRow(uuid)
|
|
||||||
val column = getColumn(key)
|
|
||||||
val raw = row[column]
|
|
||||||
key.type.fromConstrained(raw)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ensureKeyRegistration(key) // DON'T DELETE THIS LINE! I know it's covered in getColumn, but I need to do it here as well.
|
|
||||||
|
|
||||||
return if (Eco.getHandler().ecoPlugin.configYml.getBool("mysql.async-reads")) {
|
|
||||||
executor.submit(doRead).get()
|
|
||||||
} else {
|
|
||||||
doRead.call()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun <T> registerColumn(key: PersistentDataKey<T>) {
|
|
||||||
transaction {
|
|
||||||
table.apply {
|
|
||||||
when (key.type) {
|
|
||||||
PersistentDataKeyType.INT -> registerColumn<Int>(key.key.toString(), IntegerColumnType())
|
|
||||||
.default(key.defaultValue as Int)
|
|
||||||
PersistentDataKeyType.DOUBLE -> registerColumn<Double>(key.key.toString(), DoubleColumnType())
|
|
||||||
.default(key.defaultValue as Double)
|
|
||||||
PersistentDataKeyType.BOOLEAN -> registerColumn<Boolean>(key.key.toString(), BooleanColumnType())
|
|
||||||
.default(key.defaultValue as Boolean)
|
|
||||||
PersistentDataKeyType.STRING -> registerColumn<String>(key.key.toString(), VarCharColumnType(512))
|
|
||||||
.default(key.defaultValue as String)
|
|
||||||
PersistentDataKeyType.STRING_LIST -> registerColumn<String>(
|
|
||||||
key.key.toString(),
|
|
||||||
VarCharColumnType(8192)
|
|
||||||
).default(PersistentDataKeyType.STRING_LIST.constrainSQLTypes(key.defaultValue as List<String>) as String)
|
|
||||||
|
|
||||||
else -> throw NullPointerException("Null value found!")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun getColumn(key: PersistentDataKey<*>): Column<*> {
|
|
||||||
ensureKeyRegistration(key)
|
|
||||||
|
|
||||||
val name = key.key.toString()
|
|
||||||
|
|
||||||
return table.columns.first { it.name == name }
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun getRow(uuid: UUID): ResultRow {
|
|
||||||
fun select(uuid: UUID): ResultRow? {
|
|
||||||
return transaction {
|
|
||||||
table.select { table.id eq uuid }.limit(1).singleOrNull()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return rows.get(uuid) {
|
|
||||||
val row = select(uuid)
|
|
||||||
|
|
||||||
return@get if (row != null) {
|
|
||||||
row
|
|
||||||
} else {
|
|
||||||
transaction {
|
|
||||||
table.insert { it[id] = uuid }
|
|
||||||
}
|
|
||||||
select(uuid)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun <T> PersistentDataKeyType<T>.constrainSQLTypes(value: Any): Any {
|
|
||||||
return if (this == PersistentDataKeyType.STRING_LIST) {
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
|
||||||
value as List<String>
|
|
||||||
value.joinToString(separator = ";")
|
|
||||||
} else {
|
|
||||||
value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun <T> PersistentDataKeyType<T>.fromConstrained(constrained: Any?): T? {
|
|
||||||
if (constrained == null) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
@Suppress("UNCHECKED_CAST")
|
|
||||||
return if (this == PersistentDataKeyType.STRING_LIST) {
|
|
||||||
constrained as String
|
|
||||||
constrained.split(";").toList()
|
|
||||||
} else {
|
|
||||||
constrained
|
|
||||||
} as T
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,8 +6,8 @@
|
|||||||
|
|
||||||
# How player/server data is saved:
|
# How player/server data is saved:
|
||||||
# yaml - Stored in data.yml: Good option for single-node servers (i.e. no BungeeCord/Velocity)
|
# yaml - Stored in data.yml: Good option for single-node servers (i.e. no BungeeCord/Velocity)
|
||||||
# mongo - (Recommended) If you're running on a network (Bungee/Velocity), you should use MongoDB if you can.
|
# mongo - If you're running on a network (Bungee/Velocity), you should use MongoDB if you can.
|
||||||
# mysql - (Not Recommended) The basic choice for Bungee/Velocity networks, less flexible and worse performance than MongoDB. Only use it if you can't use MongoDB.
|
# mysql - The alternative to MongoDB. Because of how eco data works, MongoDB is the best option; but use this if you can't.
|
||||||
data-handler: yaml
|
data-handler: yaml
|
||||||
|
|
||||||
# If data should be migrated automatically when changing data handler.
|
# If data should be migrated automatically when changing data handler.
|
||||||
@@ -24,17 +24,12 @@ mysql:
|
|||||||
threads: 2
|
threads: 2
|
||||||
# The maximum number of MySQL connections.
|
# The maximum number of MySQL connections.
|
||||||
connections: 10
|
connections: 10
|
||||||
# If read operations should be run in the thread pool. Runs on main thread by default.
|
|
||||||
async-reads: false
|
|
||||||
host: localhost
|
host: localhost
|
||||||
port: 3306
|
port: 3306
|
||||||
database: database
|
database: database
|
||||||
user: username
|
user: username
|
||||||
password: passy
|
password: passy
|
||||||
|
|
||||||
# Ignore this option, it does nothing.
|
|
||||||
enabled: false # Ignore this - only for backwards compatibility
|
|
||||||
|
|
||||||
# Options to manage the conflict finder
|
# Options to manage the conflict finder
|
||||||
conflicts:
|
conflicts:
|
||||||
whitelist: # Plugins that should never be marked as conflicts
|
whitelist: # Plugins that should never be marked as conflicts
|
||||||
|
|||||||
@@ -1,71 +1 @@
|
|||||||
# For internal storage use only, do not modify.
|
# For internal storage use only, do not modify.
|
||||||
|
|
||||||
categorized-keys:
|
|
||||||
# Preloading known keys (as of the release of 6.25.0) for optimal performance.
|
|
||||||
# This is only used when MySQL is enabled as the columns must be added each time a new key is registered.
|
|
||||||
player:
|
|
||||||
- ecoskills:crit_damage;INT;0
|
|
||||||
- ecoskills:strong_impact;INT;0
|
|
||||||
- ecoskills:shamanism;INT;0
|
|
||||||
- ecoskills:reimbursement;INT;0
|
|
||||||
- ecoskills:armory_xp;DOUBLE;0.0
|
|
||||||
- ecoskills:bravery;INT;0
|
|
||||||
- ecoskills:seamless_movement;INT;0
|
|
||||||
- ecoskills:fishing;INT;0
|
|
||||||
- ecoskills:armory;INT;0
|
|
||||||
- ecoskills:accelerated_escape;INT;0
|
|
||||||
- ecoskills:alchemy_xp;DOUBLE;0.0
|
|
||||||
- boosters:2sell_multiplier;INT;0
|
|
||||||
- ecoskills:second_chance;INT;0
|
|
||||||
- ecoskills:health;INT;0
|
|
||||||
- ecoskills:spelunking;INT;0
|
|
||||||
- eco:player_name;STRING;Unknown Player
|
|
||||||
- ecoskills:strength;INT;0
|
|
||||||
- ecoskills:woodcutting_xp;DOUBLE;0.0
|
|
||||||
- ecoskills:versatile_tools;INT;0
|
|
||||||
- boosters:skill_xp;INT;0
|
|
||||||
- ecoskills:infernal_resistance;INT;0
|
|
||||||
- ecoskills:wisdom;INT;0
|
|
||||||
- ecoskills:master_lumberjack;INT;0
|
|
||||||
- ecoskills:defense;INT;0
|
|
||||||
- ecoskills:mystic_resilience;INT;0
|
|
||||||
- ecoskills:gainsound;BOOLEAN;true
|
|
||||||
- ecoskills:golden_yield;INT;0
|
|
||||||
- ecoskills:dazzle;INT;0
|
|
||||||
- ecoskills:dodging;INT;0
|
|
||||||
- ecoskills:efficient_brewing;INT;0
|
|
||||||
- ecoskills:bountiful_harvest;INT;0
|
|
||||||
- ecoskills:actionbar_enabled;BOOLEAN;true
|
|
||||||
- ecoskills:enchanting_xp;DOUBLE;0.0
|
|
||||||
- ecoskills:overcompensation;INT;0
|
|
||||||
- ecoskills:alchemy;INT;0
|
|
||||||
- ecoskills:woodcutting;INT;0
|
|
||||||
- ecoskills:mining;INT;0
|
|
||||||
- ecoskills:magnetic_rod;INT;0
|
|
||||||
- ecoskills:fishing_xp;DOUBLE;0.0
|
|
||||||
- ecoskills:farming_xp;DOUBLE;0.0
|
|
||||||
- ecoskills:speed;INT;0
|
|
||||||
- ecoskills:potionmaster;INT;0
|
|
||||||
- ecoskills:combat_xp;DOUBLE;0.0
|
|
||||||
- ecoskills:eye_of_the_depths;INT;0
|
|
||||||
- ecoskills:ferocity;INT;0
|
|
||||||
- ecoskills:combat;INT;0
|
|
||||||
- ecoskills:mining_xp;DOUBLE;0.0
|
|
||||||
- ecoskills:satiation;INT;0
|
|
||||||
- ecoskills:craftsmanship;INT;0
|
|
||||||
- ecoskills:crit_chance;INT;0
|
|
||||||
- ecoskills:dynamic_mining;INT;0
|
|
||||||
- ecoskills:exploration;INT;0
|
|
||||||
- boosters:1_5sell_multiplier;INT;0
|
|
||||||
- ecoskills:enchanting;INT;0
|
|
||||||
- ecoskills:endangering;INT;0
|
|
||||||
- ecoskills:serrated_strikes;INT;0
|
|
||||||
- ecoskills:exploration_xp;DOUBLE;0.0
|
|
||||||
- ecoskills:farming;INT;0
|
|
||||||
server:
|
|
||||||
- 'talismans:known_points;STRING;'
|
|
||||||
- 'ecoarmor:known_points;STRING;'
|
|
||||||
- 'ecoenchants:known_points;STRING;'
|
|
||||||
- 'ecoitems:known_points;STRING;'
|
|
||||||
- 'boosters:known_points;STRING;'
|
|
||||||
- 'reforges:known_points;STRING;'
|
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
version = 6.39.1
|
version = 6.40.0
|
||||||
plugin-name = eco
|
plugin-name = eco
|
||||||
kotlin.code.style = official
|
kotlin.code.style = official
|
||||||
Reference in New Issue
Block a user