Added legacy mongodb handler for migration

This commit is contained in:
Auxilor
2024-08-26 16:16:31 +01:00
parent a11815af82
commit 19fc168034
7 changed files with 198 additions and 38 deletions

View File

@@ -1,14 +1,11 @@
package com.willfp.eco.core.data.handlers;
import com.willfp.eco.core.Eco;
import com.willfp.eco.core.data.keys.PersistentDataKey;
import com.willfp.eco.core.registry.Registrable;
import org.bukkit.Bukkit;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
@@ -43,10 +40,12 @@ public abstract class PersistentDataHandler implements Registrable {
/**
* Get all UUIDs with saved data.
* <p>
* This is a blocking operation.
*
* @return All saved UUIDs.
*/
protected abstract Set<UUID> getSavedUUIDs();
public abstract Set<UUID> getSavedUUIDs();
/**
* Save to disk.
@@ -113,30 +112,26 @@ public abstract class PersistentDataHandler implements Registrable {
}
/**
* Serialize data.
* Serialize profile.
*
* @param uuid The uuid to serialize.
* @param keys The keys to serialize.
* @return The serialized data.
*/
@NotNull
public final Set<SerializedProfile> serializeData(@NotNull final Set<PersistentDataKey<?>> keys) {
Set<SerializedProfile> profiles = new HashSet<>();
public final SerializedProfile serializeProfile(@NotNull final UUID uuid,
@NotNull final Set<PersistentDataKey<?>> keys) {
Map<PersistentDataKey<?>, Object> data = new HashMap<>();
for (UUID uuid : getSavedUUIDs()) {
Map<PersistentDataKey<?>, Object> data = new HashMap<>();
for (PersistentDataKey<?> key : keys) {
Object value = read(uuid, key);
for (PersistentDataKey<?> key : keys) {
Object value = read(uuid, key);
if (value != null) {
data.put(key, value);
}
if (value != null) {
data.put(key, value);
}
profiles.add(new SerializedProfile(uuid, data));
}
return profiles;
return new SerializedProfile(uuid, data);
}
/**

View File

@@ -4,7 +4,7 @@ import com.willfp.eco.core.data.handlers.PersistentDataHandler
import com.willfp.eco.core.registry.KRegistrable
import com.willfp.eco.core.registry.Registry
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoDBPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
@@ -26,9 +26,15 @@ object PersistentDataHandlers: Registry<PersistentDataHandlerFactory>() {
MySQLPersistentDataHandler(plugin.configYml.getSubsection("mysql"))
})
register(object : PersistentDataHandlerFactory("mongodb") {
override fun create(plugin: EcoSpigotPlugin) =
MongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
})
// Configs should also accept "mongo"
register(object : PersistentDataHandlerFactory("mongo") {
override fun create(plugin: EcoSpigotPlugin) =
MongoPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
MongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
})
}
}

View File

@@ -0,0 +1,142 @@
package com.willfp.eco.internal.spigot.data.handlers.impl
import com.mongodb.MongoClientSettings
import com.mongodb.client.model.Filters
import com.mongodb.kotlin.client.coroutine.MongoClient
import com.willfp.eco.core.config.Configs
import com.willfp.eco.core.config.interfaces.Config
import com.willfp.eco.core.data.handlers.DataTypeSerializer
import com.willfp.eco.core.data.handlers.PersistentDataHandler
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.core.data.keys.PersistentDataKeyType
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
import kotlinx.coroutines.flow.firstOrNull
import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.runBlocking
import org.bson.BsonArray
import org.bson.BsonBoolean
import org.bson.BsonDecimal128
import org.bson.BsonDocument
import org.bson.BsonDouble
import org.bson.BsonInt32
import org.bson.BsonString
import org.bson.BsonValue
import org.bson.codecs.configuration.CodecRegistries
import org.bson.codecs.pojo.PojoCodecProvider
import java.math.BigDecimal
import java.util.UUID
class LegacyMongoDBPersistentDataHandler(
config: Config
) : PersistentDataHandler("legacy_mongodb") {
private val codecRegistry = CodecRegistries.fromRegistries(
MongoClientSettings.getDefaultCodecRegistry(),
CodecRegistries.fromProviders(PojoCodecProvider.builder().automatic(true).build())
)
private val client = MongoClient.create(config.getString("url"))
private val database = client.getDatabase(config.getString("database"))
private val collection = database.getCollection<BsonDocument>("uuidprofile")
.withCodecRegistry(codecRegistry)
init {
PersistentDataKeyType.STRING.registerSerializer(this, object : LegacyMongoSerializer<String>() {
override fun deserialize(value: BsonValue): String {
return value.asString().value
}
})
PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : LegacyMongoSerializer<Boolean>() {
override fun deserialize(value: BsonValue): Boolean {
return value.asBoolean().value
}
})
PersistentDataKeyType.INT.registerSerializer(this, object : LegacyMongoSerializer<Int>() {
override fun deserialize(value: BsonValue): Int {
return value.asInt32().value
}
})
PersistentDataKeyType.DOUBLE.registerSerializer(this, object : LegacyMongoSerializer<Double>() {
override fun deserialize(value: BsonValue): Double {
return value.asDouble().value
}
})
PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : LegacyMongoSerializer<List<String>>() {
override fun deserialize(value: BsonValue): List<String> {
return value.asArray().values.map { it.asString().value }
}
})
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : LegacyMongoSerializer<BigDecimal>() {
override fun deserialize(value: BsonValue): BigDecimal {
return value.asDecimal128().value.bigDecimalValue()
}
})
PersistentDataKeyType.CONFIG.registerSerializer(this, object : LegacyMongoSerializer<Config>() {
private fun deserializeConfigValue(value: BsonValue): Any {
return when (value) {
is BsonString -> value.value
is BsonInt32 -> value.value
is BsonDouble -> value.value
is BsonBoolean -> value.value
is BsonDecimal128 -> value.value.bigDecimalValue()
is BsonArray -> value.values.map { deserializeConfigValue(it) }
is BsonDocument -> value.mapValues { (_, v) -> deserializeConfigValue(v) }
else -> throw IllegalArgumentException("Could not deserialize config value type ${value::class.simpleName}")
}
}
override fun deserialize(value: BsonValue): Config {
@Suppress("UNCHECKED_CAST")
return Configs.fromMap(deserializeConfigValue(value.asDocument()) as Map<String, Any>)
}
})
}
override fun getSavedUUIDs(): Set<UUID> {
return runBlocking {
collection.find().toList().map {
UUID.fromString(it.getString("_id").value)
}.toSet()
}
}
private abstract inner class LegacyMongoSerializer<T : Any> : DataTypeSerializer<T>() {
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
return runBlocking {
val filter = Filters.eq("_id", uuid.toString())
val profile = collection.find(filter)
.firstOrNull() ?: return@runBlocking null
val dataMap = profile.getDocument("data")
val value = dataMap[key.key.toString()] ?: return@runBlocking null
try {
return@runBlocking deserialize(value)
} catch (e: Exception) {
null
}
}
}
override fun writeAsync(uuid: UUID, key: PersistentDataKey<T>, value: T) {
throw UnsupportedOperationException("Legacy Mongo does not support writing")
}
protected abstract fun deserialize(value: BsonValue): T
}
object Factory: PersistentDataHandlerFactory("legacy_mongo") {
override fun create(plugin: EcoSpigotPlugin): PersistentDataHandler {
return LegacyMongoDBPersistentDataHandler(plugin.configYml.getSubsection("mongodb"))
}
}
}

View File

@@ -44,13 +44,13 @@ class LegacyMySQLPersistentDataHandler(
SchemaUtils.create(table)
}
PersistentDataKeyType.STRING.registerSerializer(this, LegacySerializer<String>())
PersistentDataKeyType.BOOLEAN.registerSerializer(this, LegacySerializer<Boolean>())
PersistentDataKeyType.INT.registerSerializer(this, LegacySerializer<Int>())
PersistentDataKeyType.DOUBLE.registerSerializer(this, LegacySerializer<Double>())
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, LegacySerializer<BigDecimal>())
PersistentDataKeyType.CONFIG.registerSerializer(this, LegacySerializer<Config>())
PersistentDataKeyType.STRING_LIST.registerSerializer(this, LegacySerializer<List<String>>())
PersistentDataKeyType.STRING.registerSerializer(this, LegacyMySQLSerializer<String>())
PersistentDataKeyType.BOOLEAN.registerSerializer(this, LegacyMySQLSerializer<Boolean>())
PersistentDataKeyType.INT.registerSerializer(this, LegacyMySQLSerializer<Int>())
PersistentDataKeyType.DOUBLE.registerSerializer(this, LegacyMySQLSerializer<Double>())
PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, LegacyMySQLSerializer<BigDecimal>())
PersistentDataKeyType.CONFIG.registerSerializer(this, LegacyMySQLSerializer<Config>())
PersistentDataKeyType.STRING_LIST.registerSerializer(this, LegacyMySQLSerializer<List<String>>())
}
override fun getSavedUUIDs(): Set<UUID> {
@@ -61,7 +61,7 @@ class LegacyMySQLPersistentDataHandler(
}.map { it.value }.toSet()
}
private inner class LegacySerializer<T : Any> : DataTypeSerializer<T>() {
private inner class LegacyMySQLSerializer<T : Any> : DataTypeSerializer<T>() {
override fun readAsync(uuid: UUID, key: PersistentDataKey<T>): T? {
val json = transaction(database) {
table.selectAll()

View File

@@ -28,7 +28,7 @@ import org.bson.types.Decimal128
import java.math.BigDecimal
import java.util.UUID
class MongoPersistentDataHandler(
class MongoDBPersistentDataHandler(
config: Config
) : PersistentDataHandler("mongo") {
private val codecRegistry = CodecRegistries.fromRegistries(

View File

@@ -5,7 +5,9 @@ import com.willfp.eco.internal.spigot.ServerLocking
import com.willfp.eco.internal.spigot.data.KeyRegistry
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlerFactory
import com.willfp.eco.internal.spigot.data.handlers.PersistentDataHandlers
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMongoDBPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.LegacyMySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MongoDBPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.MySQLPersistentDataHandler
import com.willfp.eco.internal.spigot.data.handlers.impl.YamlPersistentDataHandler
import com.willfp.eco.internal.spigot.data.profiles.impl.EcoPlayerProfile
@@ -15,6 +17,8 @@ import com.willfp.eco.internal.spigot.data.profiles.impl.serverProfileUUID
import java.util.UUID
import java.util.concurrent.ConcurrentHashMap
const val LEGACY_MIGRATED_KEY = "legacy-data-migrated"
class ProfileHandler(
private val plugin: EcoSpigotPlugin
) {
@@ -57,7 +61,7 @@ class ProfileHandler(
// First install
if (!plugin.dataYml.has("previous-handler")) {
plugin.dataYml.set("previous-handler", defaultHandler.id)
plugin.dataYml.set("legacy-mysql-migrated", true)
plugin.dataYml.set(LEGACY_MIGRATED_KEY, true)
plugin.dataYml.save()
return false
}
@@ -70,13 +74,20 @@ class ProfileHandler(
return true
}
if (defaultHandler is MySQLPersistentDataHandler && !plugin.dataYml.getBool("legacy-mysql-migrated")) {
if (defaultHandler is MySQLPersistentDataHandler && !plugin.dataYml.getBool(LEGACY_MIGRATED_KEY)) {
plugin.logger.info("eco has detected a legacy MySQL database. Migrating to new MySQL database...")
scheduleMigration(LegacyMySQLPersistentDataHandler.Factory)
return true
}
if (defaultHandler is MongoDBPersistentDataHandler && !plugin.dataYml.getBool(LEGACY_MIGRATED_KEY)) {
plugin.logger.info("eco has detected a legacy MongoDB database. Migrating to new MongoDB database...")
scheduleMigration(LegacyMongoDBPersistentDataHandler.Factory)
return true
}
return false
}
@@ -87,7 +98,7 @@ class ProfileHandler(
plugin.scheduler.runLater(5) {
doMigrate(fromFactory)
plugin.dataYml.set("legacy-mysql-migrated", true)
plugin.dataYml.set(LEGACY_MIGRATED_KEY, true)
plugin.dataYml.save()
}
}
@@ -100,14 +111,20 @@ class ProfileHandler(
val fromHandler = fromFactory.create(plugin)
val toHandler = defaultHandler
plugin.logger.info("Loading data from ${fromFactory.id}...")
val keys = KeyRegistry.getRegisteredKeys()
val serialized = fromHandler.serializeData(KeyRegistry.getRegisteredKeys())
plugin.logger.info("Keys to migrate: ${keys.map { it.key }.joinToString(", ") }}")
plugin.logger.info("Found ${serialized.size} profiles to migrate")
plugin.logger.info("Loading profile UUIDs from ${fromFactory.id}...")
plugin.logger.info("This step may take a while depending on the size of your database.")
for ((index, profile) in serialized.withIndex()) {
plugin.logger.info("(${index + 1}/${serialized.size}) Migrating ${profile.uuid}")
val uuids = fromHandler.getSavedUUIDs()
plugin.logger.info("Found ${uuids.size} profiles to migrate")
for ((index, uuid) in uuids.withIndex()) {
plugin.logger.info("(${index + 1}/${uuids.size}) Migrating $uuid")
val profile = fromHandler.serializeProfile(uuid, keys)
toHandler.loadSerializedProfile(profile)
}

View File

@@ -7,7 +7,7 @@
# How player/server data is saved:
# yaml - Stored in data.yml: Good option for single-node servers (i.e. no BungeeCord/Velocity)
# mysql - Standard database, great option for multi-node servers (i.e. BungeeCord/Velocity)
# mongo - Alternative database, great option for multi-node servers (i.e. BungeeCord/Velocity)
# mongodb - Alternative database, great option for multi-node servers (i.e. BungeeCord/Velocity)
data-handler: yaml
# If data should be migrated automatically when changing data handler.