Added MongoDB data handler

This commit is contained in:
Auxilor
2022-05-25 20:08:24 +01:00
parent de9b961d83
commit 07c0e72564
9 changed files with 132 additions and 7 deletions

View File

@@ -14,6 +14,8 @@ dependencies {
implementation 'com.zaxxer:HikariCP:5.0.0'
implementation 'net.kyori:adventure-platform-bukkit:4.1.0'
implementation 'org.javassist:javassist:3.28.0-GA'
implementation 'org.mongodb:mongo-java-driver:2.12.3'
implementation 'org.litote.kmongo:kmongo-coroutine:4.6.0'
// Included in spigot jar
compileOnly 'com.google.code.gson:gson:2.8.8'

View File

@@ -29,6 +29,7 @@ import com.willfp.eco.internal.scheduling.EcoScheduler
import com.willfp.eco.internal.spigot.data.DataYml
import com.willfp.eco.internal.spigot.data.EcoKeyRegistry
import com.willfp.eco.internal.spigot.data.EcoProfileHandler
import com.willfp.eco.internal.spigot.data.storage.HandlerType
import com.willfp.eco.internal.spigot.integrations.bstats.MetricHandler
import com.willfp.eco.internal.spigot.proxy.CommonsInitializerProxy
import com.willfp.eco.internal.spigot.proxy.DummyEntityFactoryProxy
@@ -57,7 +58,7 @@ class EcoHandler : EcoSpigotPlugin(), Handler {
private var adventure: BukkitAudiences? = null
private val keyRegistry = EcoKeyRegistry()
private val playerProfileHandler = EcoProfileHandler(this.configYml.getBool("mysql.enabled"), this)
private val playerProfileHandler = EcoProfileHandler(HandlerType.valueOf(this.configYml.getString("data-handler").uppercase()), this)
@Suppress("RedundantNullableReturnType")
private val keyFactory: InternalNamespacedKeyFactory? =

View File

@@ -7,6 +7,8 @@ import com.willfp.eco.core.data.ServerProfile
import com.willfp.eco.core.data.keys.PersistentDataKey
import com.willfp.eco.internal.spigot.EcoSpigotPlugin
import com.willfp.eco.internal.spigot.data.storage.DataHandler
import com.willfp.eco.internal.spigot.data.storage.HandlerType
import com.willfp.eco.internal.spigot.data.storage.MongoDataHandler
import com.willfp.eco.internal.spigot.data.storage.MySQLDataHandler
import com.willfp.eco.internal.spigot.data.storage.YamlDataHandler
import java.util.UUID
@@ -14,12 +16,16 @@ import java.util.UUID
val serverProfileUUID = UUID(0, 0)
class EcoProfileHandler(
useSql: Boolean,
type: HandlerType,
plugin: EcoSpigotPlugin
) : ProfileHandler {
private val loaded = mutableMapOf<UUID, Profile>()
val handler: DataHandler = if (useSql) MySQLDataHandler(plugin, this) else
YamlDataHandler(plugin, this)
val handler: DataHandler = when(type) {
HandlerType.YAML -> YamlDataHandler(plugin, this)
HandlerType.MYSQL -> MySQLDataHandler(plugin, this)
HandlerType.MONGO -> MongoDataHandler(plugin)
}
fun loadGenericProfile(uuid: UUID): Profile {
val found = loaded[uuid]

View File

@@ -6,7 +6,10 @@ import org.bukkit.NamespacedKey
import java.util.UUID
interface DataHandler {
fun save()
fun save() {
}
fun saveAll(uuids: Iterable<UUID>)
fun categorize(key: PersistentDataKey<*>, category: KeyRegistry.KeyCategory) {

View File

@@ -0,0 +1,7 @@
package com.willfp.eco.internal.spigot.data.storage
enum class HandlerType {
YAML,
MYSQL,
MONGO
}

View File

@@ -0,0 +1,101 @@
package com.willfp.eco.internal.spigot.data.storage
import com.willfp.eco.core.EcoPlugin
import com.willfp.eco.core.data.keys.PersistentDataKey
import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
import org.bson.codecs.pojo.annotations.BsonId
import org.bukkit.NamespacedKey
import org.litote.kmongo.coroutine.CoroutineClient
import org.litote.kmongo.coroutine.CoroutineCollection
import org.litote.kmongo.coroutine.coroutine
import org.litote.kmongo.eq
import org.litote.kmongo.reactivestreams.KMongo
import org.litote.kmongo.setValue
import java.util.UUID
@Suppress("UNCHECKED_CAST")
class MongoDataHandler(
plugin: EcoPlugin
) : DataHandler {
private val client: CoroutineClient
private val collection: CoroutineCollection<SerializableProfile>
init {
val url = plugin.configYml.getString("mongodb.url")
client = KMongo.createClient(url).coroutine
collection = client.getDatabase("eco").getCollection()
}
override fun saveAll(uuids: Iterable<UUID>) {
for (uuid in uuids) {
saveKeysFor(uuid, PersistentDataKey.values())
}
}
override fun <T> write(uuid: UUID, key: NamespacedKey, value: T) {
runBlocking {
launch {
doWrite(uuid, key, value)
}
}
}
private suspend fun <T> doWrite(uuid: UUID, key: NamespacedKey, value: T) {
val profile = getOrCreateDocument(uuid)
val newData = profile.data.apply {
if (value == null) {
this.remove(key)
} else {
this[key] = value
}
}
collection.updateOne(SerializableProfile::uuid eq uuid, setValue(SerializableProfile::data, newData))
}
override fun saveKeysFor(uuid: UUID, keys: Set<PersistentDataKey<*>>) {
runBlocking {
launch {
for (key in keys) {
doWrite(uuid, key.key, read(uuid, key))
}
}
}
}
override fun <T> read(uuid: UUID, key: PersistentDataKey<T>): T? {
return runBlocking {
doRead(uuid, key)
}
}
private suspend fun <T> doRead(uuid: UUID, key: PersistentDataKey<T>): T? {
val profile = collection.findOne(SerializableProfile::uuid eq uuid) ?: return key.defaultValue
return profile.data[key.key] as? T?
}
private suspend fun getOrCreateDocument(uuid: UUID): SerializableProfile {
val profile = collection.findOne(SerializableProfile::uuid eq uuid)
return if (profile == null) {
collection.insertOne(
SerializableProfile(
uuid,
mutableMapOf()
)
)
getOrCreateDocument(uuid)
} else {
profile
}
}
}
private data class SerializableProfile(
@BsonId
val uuid: UUID,
val data: MutableMap<NamespacedKey, Any>
)

View File

@@ -45,7 +45,6 @@ class MySQLDataHandler(
private val serverHandler: ImplementedMySQLHandler
init {
val config = HikariConfig()
config.driverClassName = "com.mysql.cj.jdbc.Driver"
config.username = plugin.configYml.getString("mysql.user")

View File

@@ -3,8 +3,13 @@
# by Auxilor
#
handler-type: yaml # Pick from yaml/mongo/mysql - MongoDB is recommended over MySQL for networks.
mongodb:
# The full MongoDB connection URL.
url: ""
mysql:
enabled: false # Set to false, data.yml will be used instead.
# How many threads to execute statements on. Higher numbers can be faster however
# very high numbers can cause issues with OS configuration. If writes are taking
# too long, increase this value.