Implemented new data backend

This commit is contained in:
Auxilor
2024-08-24 17:46:08 +01:00
parent fd031e21f5
commit e87b7ceb77
26 changed files with 386 additions and 878 deletions

View File

@@ -1,7 +1,6 @@
package com.willfp.eco.core.data.handlers; package com.willfp.eco.core.data.handlers;
import com.willfp.eco.core.data.keys.PersistentDataKey; import com.willfp.eco.core.data.keys.PersistentDataKey;
import com.willfp.eco.core.data.keys.PersistentDataKeyType;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;

View File

@@ -16,9 +16,12 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/**
* Handles persistent data.
*/
public abstract class PersistentDataHandler implements Registrable { public abstract class PersistentDataHandler implements Registrable {
/** /**
* The id of the handler. * The id.
*/ */
private final String id; private final String id;
@@ -30,7 +33,7 @@ public abstract class PersistentDataHandler implements Registrable {
/** /**
* Create a new persistent data handler. * Create a new persistent data handler.
* *
* @param id The id of the handler. * @param id The id.
*/ */
protected PersistentDataHandler(@NotNull final String id) { protected PersistentDataHandler(@NotNull final String id) {
this.id = id; this.id = id;
@@ -134,11 +137,10 @@ public abstract class PersistentDataHandler implements Registrable {
/** /**
* Load profile data. * Load profile data.
* *
* @param data The data. * @param profile The profile.
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public final void loadProfileData(@NotNull Set<SerializedProfile> data) { public final void loadSerializedProfile(@NotNull final SerializedProfile profile) {
for (SerializedProfile profile : data) {
for (Map.Entry<PersistentDataKey<?>, Object> entry : profile.data().entrySet()) { for (Map.Entry<PersistentDataKey<?>, Object> entry : profile.data().entrySet()) {
PersistentDataKey<?> key = entry.getKey(); PersistentDataKey<?> key = entry.getKey();
Object value = entry.getValue(); Object value = entry.getValue();
@@ -147,13 +149,12 @@ public abstract class PersistentDataHandler implements Registrable {
write(profile.uuid(), (PersistentDataKey<? super Object>) key, value); write(profile.uuid(), (PersistentDataKey<? super Object>) key, value);
} }
} }
}
/** /**
* Await outstanding writes. * Await outstanding writes.
*/ */
public final void awaitOutstandingWrites() throws InterruptedException { public final void awaitOutstandingWrites() throws InterruptedException {
boolean success = executor.awaitTermination(15, TimeUnit.SECONDS); boolean success = executor.awaitTermination(2, TimeUnit.MINUTES);
if (!success) { if (!success) {
throw new InterruptedException("Failed to await outstanding writes"); throw new InterruptedException("Failed to await outstanding writes");
@@ -161,27 +162,22 @@ public abstract class PersistentDataHandler implements Registrable {
} }
@Override @Override
public final @NotNull String getID() { @NotNull
public final String getID() {
return id; return id;
} }
@Override @Override
public final boolean equals(@Nullable final Object obj) { public boolean equals(@NotNull final Object obj) {
if (this == obj) { if (!(obj instanceof PersistentDataHandler other)) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false; return false;
} }
PersistentDataHandler that = (PersistentDataHandler) obj; return other.getClass().equals(this.getClass());
return id.equals(that.id);
} }
@Override @Override
public final int hashCode() { public int hashCode() {
return id.hashCode(); return this.getClass().hashCode();
} }
} }

View File

@@ -1,42 +0,0 @@
package com.willfp.eco.core.data.handlers;
import com.willfp.eco.core.registry.Registry;
import org.jetbrains.annotations.NotNull;
/**
* Utility class to manage persistent data handlers.
*/
public final class PersistentDataHandlers {
private static final Registry<PersistentDataHandler> REGISTRY = new Registry<>();
/**
* Register a persistent data handler.
*
* @param handler The handler.
*/
public static void register(@NotNull final PersistentDataHandler handler) {
REGISTRY.register(handler);
}
/**
* Get a persistent data handler by id.
*
* @param id The id.
* @return The handler.
* @throws IllegalArgumentException if no handler with that id is found.
*/
@NotNull
public static PersistentDataHandler get(@NotNull final String id) {
PersistentDataHandler handler = REGISTRY.get(id);
if (handler == null) {
throw new IllegalArgumentException("No handler with id: " + id);
}
return handler;
}
private PersistentDataHandlers() {
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
}
}

View File

@@ -4,7 +4,6 @@ import com.willfp.eco.core.Eco
import com.willfp.eco.core.EcoPlugin import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.PluginLike import com.willfp.eco.core.PluginLike
import com.willfp.eco.core.PluginProps import com.willfp.eco.core.PluginProps
import com.willfp.eco.core.Prerequisite
import com.willfp.eco.core.command.CommandBase import com.willfp.eco.core.command.CommandBase
import com.willfp.eco.core.command.PluginCommandBase import com.willfp.eco.core.command.PluginCommandBase
import com.willfp.eco.core.config.ConfigType import com.willfp.eco.core.config.ConfigType
@@ -44,8 +43,7 @@ import com.willfp.eco.internal.proxy.EcoProxyFactory
import com.willfp.eco.internal.scheduling.EcoScheduler import com.willfp.eco.internal.scheduling.EcoScheduler
import com.willfp.eco.internal.spigot.data.DataYml import com.willfp.eco.internal.spigot.data.DataYml
import com.willfp.eco.internal.spigot.data.KeyRegistry import com.willfp.eco.internal.spigot.data.KeyRegistry
import com.willfp.eco.internal.spigot.data.ProfileHandler import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
import com.willfp.eco.internal.spigot.data.storage.HandlerType
import com.willfp.eco.internal.spigot.integrations.bstats.MetricHandler import com.willfp.eco.internal.spigot.integrations.bstats.MetricHandler
import com.willfp.eco.internal.spigot.math.DelegatedExpressionHandler import com.willfp.eco.internal.spigot.math.DelegatedExpressionHandler
import com.willfp.eco.internal.spigot.math.ImmediatePlaceholderTranslationExpressionHandler import com.willfp.eco.internal.spigot.math.ImmediatePlaceholderTranslationExpressionHandler
@@ -74,7 +72,7 @@ import org.bukkit.inventory.ItemStack
import org.bukkit.inventory.meta.SkullMeta import org.bukkit.inventory.meta.SkullMeta
import org.bukkit.persistence.PersistentDataContainer import org.bukkit.persistence.PersistentDataContainer
import java.net.URLClassLoader import java.net.URLClassLoader
import java.util.* import java.util.UUID
private val loadedEcoPlugins = mutableMapOf<String, EcoPlugin>() private val loadedEcoPlugins = mutableMapOf<String, EcoPlugin>()
@@ -82,10 +80,7 @@ private val loadedEcoPlugins = mutableMapOf<String, EcoPlugin>()
class EcoImpl : EcoSpigotPlugin(), Eco { class EcoImpl : EcoSpigotPlugin(), Eco {
override val dataYml = DataYml(this) override val dataYml = DataYml(this)
override val profileHandler = ProfileHandler( override val profileHandler = ProfileHandler(this)
HandlerType.valueOf(this.configYml.getString("data-handler").uppercase()),
this
)
init { init {
getProxy(CommonsInitializerProxy::class.java).init(this) getProxy(CommonsInitializerProxy::class.java).init(this)
@@ -290,10 +285,10 @@ class EcoImpl : EcoSpigotPlugin(), Eco {
bukkitAudiences bukkitAudiences
override fun getServerProfile() = override fun getServerProfile() =
profileHandler.loadServerProfile() profileHandler.getServerProfile()
override fun loadPlayerProfile(uuid: UUID) = override fun loadPlayerProfile(uuid: UUID) =
profileHandler.load(uuid) profileHandler.getPlayerProfile(uuid)
override fun createDummyEntity(location: Location): Entity = override fun createDummyEntity(location: Location): Entity =
getProxy(DummyEntityFactoryProxy::class.java).createDummyEntity(location) getProxy(DummyEntityFactoryProxy::class.java).createDummyEntity(location)

View File

@@ -17,7 +17,6 @@ import com.willfp.eco.core.integrations.mcmmo.McmmoManager
import com.willfp.eco.core.integrations.placeholder.PlaceholderManager import com.willfp.eco.core.integrations.placeholder.PlaceholderManager
import com.willfp.eco.core.integrations.shop.ShopManager import com.willfp.eco.core.integrations.shop.ShopManager
import com.willfp.eco.core.items.Items import com.willfp.eco.core.items.Items
import com.willfp.eco.core.items.tag.VanillaItemTag
import com.willfp.eco.core.packet.PacketListener import com.willfp.eco.core.packet.PacketListener
import com.willfp.eco.core.particle.Particles import com.willfp.eco.core.particle.Particles
import com.willfp.eco.core.price.Prices import com.willfp.eco.core.price.Prices
@@ -62,11 +61,10 @@ import com.willfp.eco.internal.price.PriceFactoryXP
import com.willfp.eco.internal.price.PriceFactoryXPLevels import com.willfp.eco.internal.price.PriceFactoryXPLevels
import com.willfp.eco.internal.recipes.AutocrafterPatch import com.willfp.eco.internal.recipes.AutocrafterPatch
import com.willfp.eco.internal.spigot.arrows.ArrowDataListener import com.willfp.eco.internal.spigot.arrows.ArrowDataListener
import com.willfp.eco.internal.spigot.data.DataListener
import com.willfp.eco.internal.spigot.data.DataYml import com.willfp.eco.internal.spigot.data.DataYml
import com.willfp.eco.internal.spigot.data.PlayerBlockListener import com.willfp.eco.internal.spigot.data.PlayerBlockListener
import com.willfp.eco.internal.spigot.data.ProfileHandler import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
import com.willfp.eco.internal.spigot.data.storage.ProfileSaver import com.willfp.eco.internal.spigot.data.profiles.ProfileLoadListener
import com.willfp.eco.internal.spigot.drops.CollatedRunnable import com.willfp.eco.internal.spigot.drops.CollatedRunnable
import com.willfp.eco.internal.spigot.eventlisteners.EntityDeathByEntityListeners import com.willfp.eco.internal.spigot.eventlisteners.EntityDeathByEntityListeners
import com.willfp.eco.internal.spigot.eventlisteners.NaturalExpGainListenersPaper import com.willfp.eco.internal.spigot.eventlisteners.NaturalExpGainListenersPaper
@@ -259,9 +257,6 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
// Init FIS // Init FIS
this.getProxy(FastItemStackFactoryProxy::class.java).create(ItemStack(Material.AIR)).unwrap() this.getProxy(FastItemStackFactoryProxy::class.java).create(ItemStack(Material.AIR)).unwrap()
// Preload categorized persistent data keys
profileHandler.initialize()
// Init adventure // Init adventure
if (!Prerequisite.HAS_PAPER.isMet) { if (!Prerequisite.HAS_PAPER.isMet) {
bukkitAudiences = BukkitAudiences.create(this) bukkitAudiences = BukkitAudiences.create(this)
@@ -282,14 +277,11 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
override fun createTasks() { override fun createTasks() {
CollatedRunnable(this) CollatedRunnable(this)
this.scheduler.runLater(3) { if (!profileHandler.migrateIfNecessary()) {
profileHandler.migrateIfNeeded() profileHandler.profileWriter.startTickingAutosave()
profileHandler.profileWriter.startTickingSaves()
} }
profileHandler.startAutosaving()
ProfileSaver(this, profileHandler).startTicking()
this.scheduler.runTimer( this.scheduler.runTimer(
this.configYml.getInt("display-frame-ttl").toLong(), this.configYml.getInt("display-frame-ttl").toLong(),
this.configYml.getInt("display-frame-ttl").toLong(), this.configYml.getInt("display-frame-ttl").toLong(),
@@ -428,7 +420,7 @@ abstract class EcoSpigotPlugin : EcoPlugin() {
GUIListener(this), GUIListener(this),
ArrowDataListener(this), ArrowDataListener(this),
ArmorChangeEventListeners(this), ArmorChangeEventListeners(this),
DataListener(this, profileHandler), ProfileLoadListener(this, profileHandler),
PlayerBlockListener(this), PlayerBlockListener(this),
ServerLocking ServerLocking
) )

View File

@@ -1,110 +0,0 @@
package com.willfp.eco.internal.spigot.data
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.data.PlayerProfile
import com.willfp.eco.core.data.Profile
import com.willfp.eco.core.data.ServerProfile
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.data.storage.DataHandler
import com.willfp.eco.util.namespacedKeyOf
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap
abstract class EcoProfile(
val data: MutableMap<PersistentDataKey<*>, Any>,
val uuid: UUID,
private val handler: DataHandler,
private val localHandler: DataHandler
) : Profile {
override fun <T : Any> write(key: PersistentDataKey<T>, value: T) {
this.data[key] = value
CHANGE_MAP.add(uuid)
}
override fun <T : Any> read(key: PersistentDataKey<T>): T {
@Suppress("UNCHECKED_CAST")
if (this.data.containsKey(key)) {
return this.data[key] as T
}
this.data[key] = if (key.isSavedLocally) {
localHandler.read(uuid, key)
} else {
handler.read(uuid, key)
} ?: key.defaultValue
return read(key)
}
override fun equals(other: Any?): Boolean {
if (other !is EcoProfile) {
return false
}
return this.uuid == other.uuid
}
override fun hashCode(): Int {
return this.uuid.hashCode()
}
companion object {
val CHANGE_MAP: MutableSet<UUID> = ConcurrentHashMap.newKeySet()
}
}
class EcoPlayerProfile(
data: MutableMap<PersistentDataKey<*>, Any>,
uuid: UUID,
handler: DataHandler,
localHandler: DataHandler
) : EcoProfile(data, uuid, handler, localHandler), PlayerProfile {
override fun toString(): String {
return "EcoPlayerProfile{uuid=$uuid}"
}
}
private val serverIDKey = PersistentDataKey(
namespacedKeyOf("eco", "server_id"),
PersistentDataKeyType.STRING,
""
)
private val localServerIDKey = PersistentDataKey(
namespacedKeyOf("eco", "local_server_id"),
PersistentDataKeyType.STRING,
""
)
class EcoServerProfile(
data: MutableMap<PersistentDataKey<*>, Any>,
handler: DataHandler,
localHandler: DataHandler
) : EcoProfile(data, serverProfileUUID, handler, localHandler), ServerProfile {
override fun getServerID(): String {
if (this.read(serverIDKey).isBlank()) {
this.write(serverIDKey, UUID.randomUUID().toString())
}
return this.read(serverIDKey)
}
override fun getLocalServerID(): String {
if (this.read(localServerIDKey).isBlank()) {
this.write(localServerIDKey, UUID.randomUUID().toString())
}
return this.read(localServerIDKey)
}
override fun toString(): String {
return "EcoServerProfile"
}
}
private val PersistentDataKey<*>.isSavedLocally: Boolean
get() = this == localServerIDKey
|| EcoPlugin.getPlugin(this.key.namespace)?.isUsingLocalStorage == true
|| this.isLocal

View File

@@ -19,8 +19,8 @@ object KeyRegistry {
this.registry[key.key] = key this.registry[key.key] = key
} }
fun getRegisteredKeys(): MutableSet<PersistentDataKey<*>> { fun getRegisteredKeys(): Set<PersistentDataKey<*>> {
return registry.values.toMutableSet() return registry.values.toSet()
} }
private fun <T> validateKey(key: PersistentDataKey<T>) { private fun <T> validateKey(key: PersistentDataKey<T>) {

View File

@@ -1,185 +0,0 @@
package com.willfp.eco.internal.spigot.data
import com.willfp.eco.core.data.PlayerProfile
import com.willfp.eco.core.data.Profile
import com.willfp.eco.core.data.ServerProfile
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.profile
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.ServerLocking
import com.willfp.eco.internal.spigot.data.storage.DataHandler
import com.willfp.eco.internal.spigot.data.storage.HandlerType
import com.willfp.eco.internal.spigot.data.storage.MongoDataHandler
import com.willfp.eco.internal.spigot.data.storage.MySQLDataHandler
import com.willfp.eco.internal.spigot.data.storage.YamlDataHandler
import org.bukkit.Bukkit
import java.util.UUID
val serverProfileUUID = UUID(0, 0)
class ProfileHandler(
private val type: HandlerType,
private val plugin: EcoSpigotPlugin
) {
private val loaded = mutableMapOf<UUID, EcoProfile>()
private val localHandler = YamlDataHandler(plugin, this)
val handler: DataHandler = when (type) {
HandlerType.YAML -> localHandler
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
HandlerType.MONGO -> MongoDataHandler(plugin, this)
}
fun accessLoadedProfile(uuid: UUID): EcoProfile? =
loaded[uuid]
fun loadGenericProfile(uuid: UUID): Profile {
val found = loaded[uuid]
if (found != null) {
return found
}
val data = mutableMapOf<PersistentDataKey<*>, Any>()
val profile = if (uuid == serverProfileUUID)
EcoServerProfile(data, handler, localHandler) else EcoPlayerProfile(data, uuid, handler, localHandler)
loaded[uuid] = profile
return profile
}
fun load(uuid: UUID): PlayerProfile {
return loadGenericProfile(uuid) as PlayerProfile
}
fun loadServerProfile(): ServerProfile {
return loadGenericProfile(serverProfileUUID) as ServerProfile
}
fun saveKeysFor(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
val profile = accessLoadedProfile(uuid) ?: return
val map = mutableMapOf<PersistentDataKey<*>, Any>()
for (key in keys) {
map[key] = profile.data[key] ?: continue
}
handler.saveKeysFor(uuid, map)
// Don't save to local handler if it's the same handler.
if (localHandler != handler) {
localHandler.saveKeysFor(uuid, map)
}
}
fun unloadPlayer(uuid: UUID) {
loaded.remove(uuid)
}
fun save() {
handler.save()
if (localHandler != handler) {
localHandler.save()
}
}
fun migrateIfNeeded() {
if (!plugin.configYml.getBool("perform-data-migration")) {
return
}
if (!plugin.dataYml.has("previous-handler")) {
plugin.dataYml.set("previous-handler", type.name)
plugin.dataYml.save()
}
val previousHandlerType = HandlerType.valueOf(plugin.dataYml.getString("previous-handler"))
if (previousHandlerType == type) {
return
}
val previousHandler = when (previousHandlerType) {
HandlerType.YAML -> YamlDataHandler(plugin, this)
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
HandlerType.MONGO -> MongoDataHandler(plugin, this)
}
ServerLocking.lock("Migrating player data! Check console for more information.")
plugin.logger.info("eco has detected a change in data handler!")
plugin.logger.info("Migrating server data from ${previousHandlerType.name} to ${type.name}")
plugin.logger.info("This will take a while!")
plugin.logger.info("Initializing previous handler...")
previousHandler.initialize()
val players = Bukkit.getOfflinePlayers().map { it.uniqueId }
plugin.logger.info("Found data for ${players.size} players!")
/*
Declared here as its own function to be able to use T.
*/
fun <T : Any> migrateKey(uuid: UUID, key: PersistentDataKey<T>, from: DataHandler, to: DataHandler) {
val previous: T? = from.read(uuid, key)
if (previous != null) {
Bukkit.getOfflinePlayer(uuid).profile.write(key, previous) // Nope, no idea.
to.write(uuid, key, previous)
}
}
var i = 1
for (uuid in players) {
plugin.logger.info("Migrating data for $uuid... ($i / ${players.size})")
for (key in PersistentDataKey.values()) {
// Why this? Because known points *really* likes to break things with the legacy MySQL handler.
if (key.key.key == "known_points") {
continue
}
try {
migrateKey(uuid, key, previousHandler, handler)
} catch (e: Exception) {
plugin.logger.info("Could not migrate ${key.key} for $uuid! This is probably because they do not have any data.")
}
}
i++
}
plugin.logger.info("Saving new data...")
handler.save()
plugin.logger.info("Updating previous handler...")
plugin.dataYml.set("previous-handler", type.name)
plugin.dataYml.save()
plugin.logger.info("The server will now automatically be restarted...")
ServerLocking.unlock()
Bukkit.getServer().shutdown()
}
fun initialize() {
handler.initialize()
if (localHandler != handler) {
localHandler.initialize()
}
}
fun startAutosaving() {
if (!plugin.configYml.getBool("yaml.autosave")) {
return
}
val interval = plugin.configYml.getInt("yaml.autosave-interval") * 20L
plugin.scheduler.runTimer(20, interval) {
handler.saveAsync()
localHandler.saveAsync()
}
}
}

View File

@@ -0,0 +1,34 @@
package com.willfp.eco.internal.spigot.data.handlers
import com.willfp.eco.core.data.handlers.PersistentDataHandler
import com.willfp.eco.core.registry.KRegistrable
import com.willfp.eco.core.registry.Registry
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
abstract class PersistentDataHandlerFactory(
override val id: String
): KRegistrable {
abstract fun create(plugin: EcoSpigotPlugin): PersistentDataHandler
}
object PersistentDataHandlers: Registry<PersistentDataHandlerFactory>() {
init {
register(object : PersistentDataHandlerFactory("yaml") {
override fun create(plugin: EcoSpigotPlugin) =
YamlPersistentDataHandler(plugin)
})
register(object : PersistentDataHandlerFactory("mysql") {
override fun create(plugin: EcoSpigotPlugin) =
MySQLPersistentDataHandler(plugin, plugin.configYml.getSubsection("mysql"))
})
register(object : PersistentDataHandlerFactory("mongo") {
override fun create(plugin: EcoSpigotPlugin) =
MongoPersistentDataHandler(plugin, plugin.configYml.getSubsection("mongodb"))
})
}
}

View File

@@ -1,7 +1,6 @@
package com.willfp.eco.internal.spigot.data.handlers package com.willfp.eco.internal.spigot.data.handlers.impl
import com.willfp.eco.core.config.ConfigType import com.willfp.eco.core.config.ConfigType
import com.willfp.eco.core.config.Configs
import com.willfp.eco.core.config.interfaces.Config import com.willfp.eco.core.config.interfaces.Config
import com.willfp.eco.core.config.readConfig import com.willfp.eco.core.config.readConfig
import com.willfp.eco.core.data.handlers.DataTypeSerializer import com.willfp.eco.core.data.handlers.DataTypeSerializer
@@ -9,23 +8,12 @@ import com.willfp.eco.core.data.handlers.PersistentDataHandler
import com.willfp.eco.core.data.keys.PersistentDataKey import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
import com.zaxxer.hikari.HikariConfig import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource import com.zaxxer.hikari.HikariDataSource
import eu.decentsoftware.holograms.api.utils.scheduler.S
import kotlinx.serialization.Contextual
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import org.jetbrains.exposed.dao.id.UUIDTable import org.jetbrains.exposed.dao.id.UUIDTable
import org.jetbrains.exposed.sql.Column
import org.jetbrains.exposed.sql.Database import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.SchemaUtils import org.jetbrains.exposed.sql.SchemaUtils
import org.jetbrains.exposed.sql.Table
import org.jetbrains.exposed.sql.Table.Dual.decimal
import org.jetbrains.exposed.sql.Table.Dual.double
import org.jetbrains.exposed.sql.Table.Dual.varchar
import org.jetbrains.exposed.sql.and
import org.jetbrains.exposed.sql.deleteWhere
import org.jetbrains.exposed.sql.insert
import org.jetbrains.exposed.sql.select import org.jetbrains.exposed.sql.select
import org.jetbrains.exposed.sql.selectAll import org.jetbrains.exposed.sql.selectAll
import org.jetbrains.exposed.sql.transactions.transaction import org.jetbrains.exposed.sql.transactions.transaction
@@ -35,7 +23,7 @@ import java.util.UUID
class LegacyMySQLPersistentDataHandler( class LegacyMySQLPersistentDataHandler(
plugin: EcoSpigotPlugin, plugin: EcoSpigotPlugin,
config: Config config: Config
) : PersistentDataHandler("mysql_legacy") { ) : PersistentDataHandler("legacy_mysql") {
private val dataSource = HikariDataSource(HikariConfig().apply { private val dataSource = HikariDataSource(HikariConfig().apply {
driverClassName = "com.mysql.cj.jdbc.Driver" driverClassName = "com.mysql.cj.jdbc.Driver"
username = config.getString("user") username = config.getString("user")
@@ -110,4 +98,10 @@ class LegacyMySQLPersistentDataHandler(
throw UnsupportedOperationException("Legacy MySQL does not support writing") throw UnsupportedOperationException("Legacy MySQL does not support writing")
} }
} }
object Factory: PersistentDataHandlerFactory("legacy_mysql") {
override fun create(plugin: EcoSpigotPlugin): PersistentDataHandler {
return LegacyMySQLPersistentDataHandler(plugin, plugin.configYml.getSubsection("mysql"))
}
}
} }

View File

@@ -1,4 +1,4 @@
package com.willfp.eco.internal.spigot.data.handlers package com.willfp.eco.internal.spigot.data.handlers.impl
import com.mongodb.client.model.Filters import com.mongodb.client.model.Filters
import com.mongodb.client.model.ReplaceOptions import com.mongodb.client.model.ReplaceOptions
@@ -35,16 +35,7 @@ class MongoPersistentDataHandler(
PersistentDataKeyType.INT.registerSerializer(this, MongoSerializer<Int>()) PersistentDataKeyType.INT.registerSerializer(this, MongoSerializer<Int>())
PersistentDataKeyType.DOUBLE.registerSerializer(this, MongoSerializer<Double>()) PersistentDataKeyType.DOUBLE.registerSerializer(this, MongoSerializer<Double>())
PersistentDataKeyType.STRING_LIST.registerSerializer(this, MongoSerializer<List<String>>()) PersistentDataKeyType.STRING_LIST.registerSerializer(this, MongoSerializer<List<String>>())
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, MongoSerializer<BigDecimal>())
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : MongoSerializer<BigDecimal>() {
override fun convertToMongo(value: BigDecimal): Any {
return value.toString()
}
override fun convertFromMongo(value: Any): BigDecimal {
return BigDecimal(value.toString())
}
})
PersistentDataKeyType.CONFIG.registerSerializer(this, object : MongoSerializer<Config>() { PersistentDataKeyType.CONFIG.registerSerializer(this, object : MongoSerializer<Config>() {
override fun convertToMongo(value: Config): Any { override fun convertToMongo(value: Config): Any {

View File

@@ -1,4 +1,4 @@
package com.willfp.eco.internal.spigot.data.handlers package com.willfp.eco.internal.spigot.data.handlers.impl
import com.willfp.eco.core.config.ConfigType import com.willfp.eco.core.config.ConfigType
import com.willfp.eco.core.config.Configs import com.willfp.eco.core.config.Configs
@@ -11,19 +11,10 @@ import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.zaxxer.hikari.HikariConfig import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource import com.zaxxer.hikari.HikariDataSource
import eu.decentsoftware.holograms.api.utils.scheduler.S
import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking
import kotlinx.serialization.Contextual
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import org.jetbrains.exposed.dao.id.UUIDTable
import org.jetbrains.exposed.sql.Column import org.jetbrains.exposed.sql.Column
import org.jetbrains.exposed.sql.Database import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.SchemaUtils import org.jetbrains.exposed.sql.SchemaUtils
import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq
import org.jetbrains.exposed.sql.Table import org.jetbrains.exposed.sql.Table
import org.jetbrains.exposed.sql.TextColumnType
import org.jetbrains.exposed.sql.and import org.jetbrains.exposed.sql.and
import org.jetbrains.exposed.sql.deleteWhere import org.jetbrains.exposed.sql.deleteWhere
import org.jetbrains.exposed.sql.insert import org.jetbrains.exposed.sql.insert

View File

@@ -1,9 +1,8 @@
package com.willfp.eco.internal.spigot.data.handlers package com.willfp.eco.internal.spigot.data.handlers.impl
import com.willfp.eco.core.config.interfaces.Config import com.willfp.eco.core.config.interfaces.Config
import com.willfp.eco.core.data.handlers.DataTypeSerializer import com.willfp.eco.core.data.handlers.DataTypeSerializer
import com.willfp.eco.core.data.handlers.PersistentDataHandler import com.willfp.eco.core.data.handlers.PersistentDataHandler
import com.willfp.eco.core.data.handlers.SerializedProfile
import com.willfp.eco.core.data.keys.PersistentDataKey import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin import com.willfp.eco.internal.spigot.EcoSpigotPlugin

View File

@@ -0,0 +1,126 @@
package com.willfp.eco.internal.spigot.data.profiles
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.ServerLocking
import com.willfp.eco.internal.spigot.data.KeyRegistry
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlers
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoPlayerProfile
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoProfile
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoServerProfile
import com.willfp.eco.internal.spigot.data.profiles.impl.serverProfileUUID
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap
class ProfileHandler(
private val plugin: EcoSpigotPlugin
) {
private val handlerId = plugin.dataYml.getString("data-handler")
val localHandler = YamlPersistentDataHandler(plugin)
val defaultHandler = PersistentDataHandlers[handlerId]
?.create(plugin) ?: throw IllegalArgumentException("Invalid data handler ($handlerId)")
val profileWriter = ProfileWriter(plugin, this)
private val loaded = ConcurrentHashMap<UUID, EcoProfile>()
fun getPlayerProfile(uuid: UUID): EcoPlayerProfile {
return loaded.computeIfAbsent(uuid) {
EcoPlayerProfile(it, this)
} as EcoPlayerProfile
}
fun getServerProfile(): EcoServerProfile {
return loaded.computeIfAbsent(serverProfileUUID) {
EcoServerProfile(this)
} as EcoServerProfile
}
fun unloadProfile(uuid: UUID) {
loaded.remove(uuid)
}
fun save() {
localHandler.save()
defaultHandler.save()
localHandler.awaitOutstandingWrites()
defaultHandler.awaitOutstandingWrites()
}
fun migrateIfNecessary(): Boolean {
if (!plugin.configYml.getBool("perform-data-migration")) {
return false
}
if (!plugin.dataYml.has("previous-handler")) {
plugin.dataYml.set("previous-handler", defaultHandler.id)
plugin.dataYml.save()
return false
}
if (defaultHandler.id == "mysql" && !plugin.dataYml.getBool("legacy-mysql-migrated")) {
plugin.logger.info("eco has detected a legacy MySQL database. Migrating to new MySQL database...")
scheduleMigration(LegacyMySQLPersistentDataHandler.Factory)
plugin.dataYml.set("legacy-mysql-migrated", true)
plugin.dataYml.save()
return true
}
val previousHandlerId = plugin.dataYml.getString("previous-handler")
if (previousHandlerId != defaultHandler.id) {
val fromFactory = PersistentDataHandlers[previousHandlerId] ?: return false
scheduleMigration(fromFactory)
return true
}
return false
}
private fun scheduleMigration(fromFactory: PersistentDataHandlerFactory) {
ServerLocking.lock("Migrating player data! Check console for more information.")
// Run after 5 ticks to allow plugins to load their data keys
plugin.scheduler.runLater(5) {
doMigrate(fromFactory)
}
}
private fun doMigrate(fromFactory: PersistentDataHandlerFactory) {
plugin.logger.info("eco has detected a change in data handler")
plugin.logger.info("${fromFactory.id} --> $handlerId")
plugin.logger.info("This will take a while! Players will not be able to join during this time.")
val fromHandler = fromFactory.create(plugin)
val toHandler = defaultHandler
plugin.logger.info("Loading data from ${fromFactory.id}...")
val serialized = fromHandler.serializeData(KeyRegistry.getRegisteredKeys())
plugin.logger.info("Found ${serialized.size} profiles to migrate")
for ((index, profile) in serialized.withIndex()) {
plugin.logger.info("(${index + 1}/${serialized.size}) Migrating ${profile.uuid}")
toHandler.loadSerializedProfile(profile)
}
plugin.logger.info("Profile writes submitted! Waiting for completion...")
toHandler.awaitOutstandingWrites()
plugin.logger.info("Updating previous handler...")
plugin.dataYml.set("previous-handler", handlerId)
plugin.dataYml.save()
plugin.logger.info("The server will now automatically be restarted...")
plugin.server.shutdown()
}
}

View File

@@ -1,4 +1,4 @@
package com.willfp.eco.internal.spigot.data package com.willfp.eco.internal.spigot.data.profiles
import com.willfp.eco.core.EcoPlugin import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.util.PlayerUtils import com.willfp.eco.util.PlayerUtils
@@ -9,15 +9,18 @@ import org.bukkit.event.player.PlayerJoinEvent
import org.bukkit.event.player.PlayerLoginEvent import org.bukkit.event.player.PlayerLoginEvent
import org.bukkit.event.player.PlayerQuitEvent import org.bukkit.event.player.PlayerQuitEvent
class DataListener( class ProfileLoadListener(
private val plugin: EcoPlugin, private val plugin: EcoPlugin,
private val handler: ProfileHandler private val handler: ProfileHandler
) : Listener { ) : Listener {
@EventHandler(priority = EventPriority.LOWEST)
fun onLogin(event: PlayerLoginEvent) {
handler.unloadProfile(event.player.uniqueId)
}
@EventHandler(priority = EventPriority.HIGHEST) @EventHandler(priority = EventPriority.HIGHEST)
fun onLeave(event: PlayerQuitEvent) { fun onLeave(event: PlayerQuitEvent) {
val profile = handler.accessLoadedProfile(event.player.uniqueId) ?: return handler.unloadProfile(event.player.uniqueId)
handler.saveKeysFor(event.player.uniqueId, profile.data.keys)
handler.unloadPlayer(event.player.uniqueId)
} }
@EventHandler @EventHandler
@@ -26,9 +29,4 @@ class DataListener(
PlayerUtils.updateSavedDisplayName(event.player) PlayerUtils.updateSavedDisplayName(event.player)
} }
} }
@EventHandler(priority = EventPriority.LOWEST)
fun onLogin(event: PlayerLoginEvent) {
handler.unloadPlayer(event.player.uniqueId)
}
} }

View File

@@ -0,0 +1,60 @@
package com.willfp.eco.internal.spigot.data.profiles
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.internal.spigot.data.profiles.impl.localServerIDKey
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap
/*
The profile writer exists as an optimization to batch writes to the database.
This is necessary because values frequently change multiple times per tick,
and we don't want to write to the database every time a value changes.
Instead, we only commit the last value that was set every interval (default 1 tick).
*/
class ProfileWriter(
private val plugin: EcoPlugin,
private val handler: ProfileHandler
) {
private val saveInterval = plugin.configYml.getInt("save-interval").toLong()
private val autosaveInterval = plugin.configYml.getInt("autosave-interval").toLong()
private val valuesToWrite = ConcurrentHashMap<WriteRequest<*>, Any>()
fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
valuesToWrite[WriteRequest(uuid, key)] = value
}
fun startTickingSaves() {
plugin.scheduler.runTimer(20, saveInterval) {
val iterator = valuesToWrite.iterator()
while (iterator.hasNext()) {
val (request, value) = iterator.next()
iterator.remove()
val dataHandler = if (request.key.isSavedLocally) handler.localHandler else handler.defaultHandler
// Pass the value to the data handler
@Suppress("UNCHECKED_CAST")
dataHandler.write(request.uuid, request.key as PersistentDataKey<Any>, value)
}
}
}
fun startTickingAutosave() {
plugin.scheduler.runTimer(autosaveInterval, autosaveInterval) {
if (handler.localHandler.shouldAutosave()) {
handler.localHandler.save()
}
}
}
private data class WriteRequest<T>(val uuid: UUID, val key: PersistentDataKey<T>)
}
val PersistentDataKey<*>.isSavedLocally: Boolean
get() = this.isLocal || EcoPlugin.getPlugin(this.key.namespace)?.isUsingLocalStorage == true

View File

@@ -0,0 +1,14 @@
package com.willfp.eco.internal.spigot.data.profiles.impl
import com.willfp.eco.core.data.PlayerProfile
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
import java.util.UUID
class EcoPlayerProfile(
uuid: UUID,
handler: ProfileHandler
) : EcoProfile(uuid, handler), PlayerProfile {
override fun toString(): String {
return "EcoPlayerProfile{uuid=$uuid}"
}
}

View File

@@ -0,0 +1,48 @@
package com.willfp.eco.internal.spigot.data.profiles.impl
import com.willfp.eco.core.data.Profile
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
import com.willfp.eco.internal.spigot.data.profiles.isSavedLocally
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap
abstract class EcoProfile(
val uuid: UUID,
private val handler: ProfileHandler
) : Profile {
private val data = ConcurrentHashMap<PersistentDataKey<*>, Any>()
override fun <T : Any> write(key: PersistentDataKey<T>, value: T) {
this.data[key] = value
handler.profileWriter.write(uuid, key, value)
}
override fun <T : Any> read(key: PersistentDataKey<T>): T {
@Suppress("UNCHECKED_CAST")
if (this.data.containsKey(key)) {
return this.data[key] as T
}
this.data[key] = if (key.isSavedLocally) {
handler.localHandler.read(uuid, key)
} else {
handler.defaultHandler.read(uuid, key)
} ?: key.defaultValue
return read(key)
}
override fun equals(other: Any?): Boolean {
if (other !is EcoProfile) {
return false
}
return this.uuid == other.uuid
}
override fun hashCode(): Int {
return this.uuid.hashCode()
}
}

View File

@@ -0,0 +1,47 @@
package com.willfp.eco.internal.spigot.data.profiles.impl
import com.willfp.eco.core.data.ServerProfile
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.data.profiles.ProfileHandler
import com.willfp.eco.util.namespacedKeyOf
import java.util.UUID
val serverIDKey = PersistentDataKey(
namespacedKeyOf("eco", "server_id"),
PersistentDataKeyType.STRING,
""
)
val localServerIDKey = PersistentDataKey(
namespacedKeyOf("eco", "local_server_id"),
PersistentDataKeyType.STRING,
"",
true
)
val serverProfileUUID = UUID(0, 0)
class EcoServerProfile(
handler: ProfileHandler
) : EcoProfile(serverProfileUUID, handler), ServerProfile {
override fun getServerID(): String {
if (this.read(serverIDKey).isBlank()) {
this.write(serverIDKey, UUID.randomUUID().toString())
}
return this.read(serverIDKey)
}
override fun getLocalServerID(): String {
if (this.read(localServerIDKey).isBlank()) {
this.write(localServerIDKey, UUID.randomUUID().toString())
}
return this.read(localServerIDKey)
}
override fun toString(): String {
return "EcoServerProfile"
}
}

View File

@@ -1,37 +0,0 @@
package com.willfp.eco.internal.spigot.data.storage
import com.willfp.eco.core.data.keys.PersistentDataKey
import java.util.UUID
abstract class DataHandler(
val type: HandlerType
) {
/**
* Read value from a key.
*/
abstract fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T?
/**
* Write value to a key.
*/
abstract fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T)
/**
* Save a set of keys for a given UUID.
*/
abstract fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>)
// Everything below this are methods that are only needed for certain implementations.
open fun save() {
}
open fun saveAsync() {
}
open fun initialize() {
}
}

View File

@@ -1,7 +0,0 @@
package com.willfp.eco.internal.spigot.data.storage
enum class HandlerType {
YAML,
MYSQL,
MONGO
}

View File

@@ -1,131 +0,0 @@
package com.willfp.eco.internal.spigot.data.storage
import com.mongodb.client.model.Filters
import com.mongodb.client.model.ReplaceOptions
import com.mongodb.client.model.Updates
import com.mongodb.kotlin.client.coroutine.MongoClient
import com.mongodb.kotlin.client.coroutine.MongoCollection
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.ProfileHandler
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.firstOrNull
import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
import kotlinx.serialization.Contextual
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import java.util.UUID
@Suppress("UNCHECKED_CAST")
class MongoDataHandler(
plugin: EcoSpigotPlugin,
private val handler: ProfileHandler
) : DataHandler(HandlerType.MONGO) {
private val client: MongoClient
private val collection: MongoCollection<UUIDProfile>
private val scope = CoroutineScope(Dispatchers.IO)
init {
System.setProperty(
"org.litote.mongo.mapping.service",
"org.litote.kmongo.jackson.JacksonClassMappingTypeService"
)
val url = plugin.configYml.getString("mongodb.url")
client = MongoClient.create(url)
collection = client.getDatabase(plugin.configYml.getString("mongodb.database"))
.getCollection<UUIDProfile>("uuidprofile") // Compat with jackson mapping
}
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
return runBlocking {
doRead(uuid, key)
}
}
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
scope.launch {
doWrite(uuid, key, value)
}
}
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
scope.launch {
for ((key, value) in keys) {
saveKey(uuid, key, value)
}
}
}
private suspend fun <T : Any> saveKey(uuid: UUID, key: PersistentDataKey<T>, value: Any) {
val data = value as T
doWrite(uuid, key, data)
}
private suspend fun <T> doWrite(uuid: UUID, key: PersistentDataKey<T>, value: T) {
val profile = getOrCreateDocument(uuid)
profile.data.run {
if (value == null) {
this.remove(key.key.toString())
} else {
this[key.key.toString()] = value
}
}
collection.updateOne(
Filters.eq(UUIDProfile::uuid.name, uuid.toString()),
Updates.set(UUIDProfile::data.name, profile.data)
)
}
private suspend fun <T> doRead(uuid: UUID, key: PersistentDataKey<T>): T? {
val profile = collection.find<UUIDProfile>(Filters.eq(UUIDProfile::uuid.name, uuid.toString()))
.firstOrNull() ?: return key.defaultValue
return profile.data[key.key.toString()] as? T?
}
private suspend fun getOrCreateDocument(uuid: UUID): UUIDProfile {
val profile = collection.find<UUIDProfile>(Filters.eq(UUIDProfile::uuid.name, uuid.toString()))
.firstOrNull()
return if (profile == null) {
val toInsert = UUIDProfile(
uuid.toString(),
mutableMapOf()
)
collection.replaceOne(
Filters.eq(UUIDProfile::uuid.name, uuid.toString()),
toInsert,
ReplaceOptions().upsert(true)
)
toInsert
} else {
profile
}
}
override fun equals(other: Any?): Boolean {
if (this === other) {
return true
}
return other is MongoDataHandler
}
override fun hashCode(): Int {
return type.hashCode()
}
}
@Serializable
internal data class UUIDProfile(
// Storing UUID as strings for serialization
@SerialName("_id") val uuid: String,
// Storing NamespacedKeys as strings for serialization
val data: MutableMap<String, @Contextual Any>
)

View File

@@ -1,169 +0,0 @@
package com.willfp.eco.internal.spigot.data.storage
import com.github.benmanes.caffeine.cache.Caffeine
import com.google.common.util.concurrent.ThreadFactoryBuilder
import com.willfp.eco.core.config.ConfigType
import com.willfp.eco.core.config.interfaces.Config
import com.willfp.eco.core.config.readConfig
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.ProfileHandler
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import org.jetbrains.exposed.dao.id.UUIDTable
import org.jetbrains.exposed.sql.Column
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.ResultRow
import org.jetbrains.exposed.sql.SchemaUtils
import org.jetbrains.exposed.sql.TextColumnType
import org.jetbrains.exposed.sql.insert
import org.jetbrains.exposed.sql.select
import org.jetbrains.exposed.sql.transactions.transaction
import org.jetbrains.exposed.sql.update
import java.util.UUID
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
/*
Better than old MySQL data handler, but that's only because it's literally just dumping all the
data into a single text column, containing the contents of the players profile as a Config.
Whatever. At least it works.
*/
@Suppress("UNCHECKED_CAST")
class MySQLDataHandler(
plugin: EcoSpigotPlugin,
private val handler: ProfileHandler
) : DataHandler(HandlerType.MYSQL) {
private val database: Database
private val table = UUIDTable("eco_data")
private val rows = Caffeine.newBuilder()
.expireAfterWrite(3, TimeUnit.SECONDS)
.build<UUID, ResultRow>()
private val threadFactory = ThreadFactoryBuilder().setNameFormat("eco-mysql-thread-%d").build()
private val executor = Executors.newFixedThreadPool(plugin.configYml.getInt("mysql.threads"), threadFactory)
private val dataColumn: Column<String>
get() = table.columns.first { it.name == "json_data" } as Column<String>
init {
val config = HikariConfig()
config.driverClassName = "com.mysql.cj.jdbc.Driver"
config.username = plugin.configYml.getString("mysql.user")
config.password = plugin.configYml.getString("mysql.password")
config.jdbcUrl = "jdbc:mysql://" +
"${plugin.configYml.getString("mysql.host")}:" +
"${plugin.configYml.getString("mysql.port")}/" +
plugin.configYml.getString("mysql.database")
config.maximumPoolSize = plugin.configYml.getInt("mysql.connections")
database = Database.connect(HikariDataSource(config))
transaction(database) {
SchemaUtils.create(table)
table.apply {
registerColumn<String>("json_data", TextColumnType())
}
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
}
}
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
val data = getData(uuid)
val value: Any? = when (key.type) {
PersistentDataKeyType.INT -> data.getIntOrNull(key.key.toString())
PersistentDataKeyType.DOUBLE -> data.getDoubleOrNull(key.key.toString())
PersistentDataKeyType.STRING -> data.getStringOrNull(key.key.toString())
PersistentDataKeyType.BOOLEAN -> data.getBoolOrNull(key.key.toString())
PersistentDataKeyType.STRING_LIST -> data.getStringsOrNull(key.key.toString())
PersistentDataKeyType.CONFIG -> data.getSubsectionOrNull(key.key.toString())
PersistentDataKeyType.BIG_DECIMAL -> data.getBigDecimalOrNull(key.key.toString())
else -> null
}
return value as? T?
}
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
val data = getData(uuid)
data.set(key.key.toString(), value)
setData(uuid, data)
}
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
executor.submit {
val data = getData(uuid)
for ((key, value) in keys) {
data.set(key.key.toString(), value)
}
doSetData(uuid, data)
}
}
private fun getData(uuid: UUID): Config {
val plaintext = transaction(database) {
val row = rows.get(uuid) {
val row = table.select { table.id eq uuid }.limit(1).singleOrNull()
if (row != null) {
row
} else {
transaction(database) {
table.insert {
it[id] = uuid
it[dataColumn] = "{}"
}
}
table.select { table.id eq uuid }.limit(1).singleOrNull()
}
}
row.getOrNull(dataColumn) ?: "{}"
}
return readConfig(plaintext, ConfigType.JSON)
}
private fun setData(uuid: UUID, config: Config) {
executor.submit {
doSetData(uuid, config)
}
}
private fun doSetData(uuid: UUID, config: Config) {
transaction(database) {
table.update({ table.id eq uuid }) {
it[dataColumn] = config.toPlaintext()
}
}
}
override fun initialize() {
transaction(database) {
SchemaUtils.createMissingTablesAndColumns(table, withLogs = false)
}
}
override fun equals(other: Any?): Boolean {
if (this === other) {
return true
}
return other is MySQLDataHandler
}
override fun hashCode(): Int {
return type.hashCode()
}
}

View File

@@ -1,27 +0,0 @@
package com.willfp.eco.internal.spigot.data.storage
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.internal.spigot.data.EcoProfile
import com.willfp.eco.internal.spigot.data.ProfileHandler
class ProfileSaver(
private val plugin: EcoPlugin,
private val handler: ProfileHandler
) {
fun startTicking() {
val interval = plugin.configYml.getInt("save-interval").toLong()
plugin.scheduler.runTimer(20, interval) {
val iterator = EcoProfile.CHANGE_MAP.iterator()
while (iterator.hasNext()) {
val uuid = iterator.next()
iterator.remove()
val profile = handler.accessLoadedProfile(uuid) ?: continue
handler.saveKeysFor(uuid, profile.data.keys)
}
}
}
}

View File

@@ -1,67 +0,0 @@
package com.willfp.eco.internal.spigot.data.storage
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.ProfileHandler
import org.bukkit.NamespacedKey
import java.util.UUID
@Suppress("UNCHECKED_CAST")
class YamlDataHandler(
plugin: EcoSpigotPlugin,
private val handler: ProfileHandler
) : DataHandler(HandlerType.YAML) {
private val dataYml = plugin.dataYml
override fun save() {
dataYml.save()
}
override fun saveAsync() {
dataYml.saveAsync()
}
override fun <T : Any> read(uuid: UUID, key: PersistentDataKey<T>): T? {
// Separate `as T?` for each branch to prevent compiler warnings.
val value = when (key.type) {
PersistentDataKeyType.INT -> dataYml.getIntOrNull("player.$uuid.${key.key}") as T?
PersistentDataKeyType.DOUBLE -> dataYml.getDoubleOrNull("player.$uuid.${key.key}") as T?
PersistentDataKeyType.STRING -> dataYml.getStringOrNull("player.$uuid.${key.key}") as T?
PersistentDataKeyType.BOOLEAN -> dataYml.getBoolOrNull("player.$uuid.${key.key}") as T?
PersistentDataKeyType.STRING_LIST -> dataYml.getStringsOrNull("player.$uuid.${key.key}") as T?
PersistentDataKeyType.CONFIG -> dataYml.getSubsectionOrNull("player.$uuid.${key.key}") as T?
PersistentDataKeyType.BIG_DECIMAL -> dataYml.getBigDecimalOrNull("player.$uuid.${key.key}") as T?
else -> null
}
return value
}
override fun <T : Any> write(uuid: UUID, key: PersistentDataKey<T>, value: T) {
doWrite(uuid, key.key, value)
}
override fun saveKeysFor(uuid: UUID, keys: Map<PersistentDataKey<*>, Any>) {
for ((key, value) in keys) {
doWrite(uuid, key.key, value)
}
}
private fun doWrite(uuid: UUID, key: NamespacedKey, value: Any) {
dataYml.set("player.$uuid.$key", value)
}
override fun equals(other: Any?): Boolean {
if (this === other) {
return true
}
return other is YamlDataHandler
}
override fun hashCode(): Int {
return type.hashCode()
}
}

View File

@@ -33,16 +33,15 @@ mysql:
user: username user: username
password: passy password: passy
yaml:
autosave: true # If data should be saved automatically
autosave-interval: 1800 # How often data should be saved (in seconds)
# How many ticks to wait between committing data to a database. This doesn't # How many ticks to wait between committing data to a database. This doesn't
# affect yaml storage, only MySQL and MongoDB. By default, data is committed # affect yaml storage, only MySQL and MongoDB. By default, data is committed
# every tick, but you can increase this to be every x ticks, for example 20 # every tick, but you can increase this to be every x ticks, for example 20
# would be committing once a second. # would be committing once a second.
save-interval: 1 save-interval: 1
# How many ticks to wait between autosaves for data.yml.
autosave-interval: 36000 # 30 minutes
# Options to manage the conflict finder # Options to manage the conflict finder
conflicts: conflicts:
whitelist: # Plugins that should never be marked as conflicts whitelist: # Plugins that should never be marked as conflicts