mirror of
https://github.com/WiIIiam278/HuskSync.git
synced 2025-12-28 19:19:13 +00:00
Re-add MPDB migrator loading on redis listener
Use Libby to download SQL dependencies at runtime
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
package me.william278.husksync;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Provides;
|
||||
import com.velocitypowered.api.command.CommandManager;
|
||||
import com.velocitypowered.api.command.CommandMeta;
|
||||
import com.velocitypowered.api.event.Subscribe;
|
||||
@@ -19,6 +20,8 @@ import me.william278.husksync.velocity.config.ConfigManager;
|
||||
import me.william278.husksync.velocity.listener.VelocityEventListener;
|
||||
import me.william278.husksync.velocity.listener.VelocityRedisListener;
|
||||
import me.william278.husksync.velocity.util.VelocityLogger;
|
||||
import net.byteflux.libby.Library;
|
||||
import net.byteflux.libby.VelocityLibraryManager;
|
||||
import org.bstats.velocity.Metrics;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
@@ -72,6 +75,8 @@ public class HuskSyncVelocity {
|
||||
private final ProxyServer server;
|
||||
private final Path dataDirectory;
|
||||
|
||||
private final VelocityLibraryManager<HuskSyncVelocity> manager;
|
||||
|
||||
// Get the data folder
|
||||
public File getDataFolder() {
|
||||
return dataDirectory.toFile();
|
||||
@@ -90,11 +95,13 @@ public class HuskSyncVelocity {
|
||||
}
|
||||
|
||||
@Inject
|
||||
public HuskSyncVelocity(ProxyServer server, Logger logger, @DataDirectory Path dataDirectory, Metrics.Factory metricsFactory) {
|
||||
public HuskSyncVelocity(ProxyServer server, Logger logger, @DataDirectory Path dataDirectory, Metrics.Factory metricsFactory, VelocityLibraryManager<HuskSyncVelocity> manager) {
|
||||
this.server = server;
|
||||
this.logger = logger;
|
||||
this.dataDirectory = dataDirectory;
|
||||
this.metricsFactory = metricsFactory;
|
||||
this.manager = manager;
|
||||
fetchDependencies();
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
@@ -105,6 +112,9 @@ public class HuskSyncVelocity {
|
||||
// Setup logger
|
||||
velocityLogger = new VelocityLogger(logger);
|
||||
|
||||
// Prepare synchronised servers tracker
|
||||
synchronisedServers = new HashSet<>();
|
||||
|
||||
// Load config
|
||||
ConfigManager.loadConfig();
|
||||
|
||||
@@ -125,6 +135,12 @@ public class HuskSyncVelocity {
|
||||
// Setup data manager
|
||||
dataManager = new DataManager(getVelocityLogger(), getDataFolder());
|
||||
|
||||
// Ensure the data manager initialized correctly
|
||||
if (dataManager.hasFailedInitialization) {
|
||||
getVelocityLogger().severe("Failed to initialize the HuskSync database(s).\n" +
|
||||
"HuskSync will now abort loading itself (Velocity) v" + VERSION);
|
||||
}
|
||||
|
||||
// Setup player data cache
|
||||
for (Settings.SynchronisationCluster cluster : Settings.clusters) {
|
||||
dataManager.playerDataCache.put(cluster, new DataManager.PlayerDataCache());
|
||||
@@ -186,4 +202,23 @@ public class HuskSyncVelocity {
|
||||
// Log to console
|
||||
getVelocityLogger().info("Disabled HuskSync (Velocity) v" + VERSION);
|
||||
}
|
||||
|
||||
// Load dependencies
|
||||
private void fetchDependencies() {
|
||||
Library mySqlLib = Library.builder()
|
||||
.groupId("mysql")
|
||||
.artifactId("mysql-connector-java")
|
||||
.version("8.0.25")
|
||||
.build();
|
||||
|
||||
Library sqLiteLib = Library.builder()
|
||||
.groupId("org.xerial")
|
||||
.artifactId("sqlite-jdbc")
|
||||
.version("3.36.0.3")
|
||||
.build();
|
||||
|
||||
manager.addMavenCentral();
|
||||
manager.loadLibrary(mySqlLib);
|
||||
manager.loadLibrary(sqLiteLib);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,7 +71,7 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
|
||||
}
|
||||
sender.sendMessage(new MineDown("[•](white) [Download links:](#00fb9a) [[⏩ Spigot]](gray open_url=https://www.spigotmc.org/resources/husktowns.92672/updates) [•](#262626) [[⏩ Polymart]](gray open_url=https://polymart.org/resource/husktowns.1056/updates)").toComponent());
|
||||
}
|
||||
});
|
||||
}).schedule();
|
||||
}
|
||||
case "invsee", "openinv", "inventory" -> {
|
||||
if (!player.hasPermission("husksync.command.inventory")) {
|
||||
@@ -294,7 +294,7 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
|
||||
HuskSyncVelocity.synchronisedServers)) {
|
||||
plugin.getProxyServer().getScheduler().buildTask(plugin, () ->
|
||||
HuskSyncVelocity.mpdbMigrator.executeMigrationOperations(HuskSyncVelocity.dataManager,
|
||||
HuskSyncVelocity.synchronisedServers));
|
||||
HuskSyncVelocity.synchronisedServers)).schedule();
|
||||
}
|
||||
}
|
||||
default -> sender.sendMessage(new MineDown("Error: Invalid argument for migration. Use \"husksync migrate\" to start the process").toComponent());
|
||||
@@ -338,7 +338,7 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
|
||||
return;
|
||||
}
|
||||
viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_cluster")).toComponent());
|
||||
});
|
||||
}).schedule();
|
||||
}
|
||||
|
||||
// View the ender chest of a player specified by their name
|
||||
@@ -372,7 +372,7 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
|
||||
return;
|
||||
}
|
||||
viewer.sendMessage(new MineDown(MessageManager.getMessage("error_invalid_cluster")).toComponent());
|
||||
});
|
||||
}).schedule();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -406,6 +406,11 @@ public class VelocityCommand implements SimpleCommand, HuskSyncCommand {
|
||||
}
|
||||
subCommands.add(subCommand.command());
|
||||
}
|
||||
// Return list of subcommands
|
||||
if (args[0].length() == 0) {
|
||||
return subCommands;
|
||||
}
|
||||
|
||||
// Automatically filter the sub commands' order in tab completion by what the player has typed
|
||||
return subCommands.stream().filter(val -> val.startsWith(args[0]))
|
||||
.sorted().collect(Collectors.toList());
|
||||
|
||||
@@ -78,7 +78,7 @@ public class ConfigLoader {
|
||||
// Read cluster data
|
||||
ConfigurationNode clusterSection = config.getNode("clusters");
|
||||
final String settingDatabaseName = Settings.mySQLDatabase != null ? Settings.mySQLDatabase : "HuskSync";
|
||||
for (ConfigurationNode cluster : clusterSection.getChildrenList()) {
|
||||
for (ConfigurationNode cluster : clusterSection.getChildrenMap().values()) {
|
||||
final String clusterId = (String) cluster.getKey();
|
||||
final String playerTableName = getConfigString(config, "husksync_players", "clusters", clusterId, "player_table");
|
||||
final String dataTableName = getConfigString(config, "husksync_data", "clusters", clusterId, "data_table");
|
||||
@@ -89,7 +89,7 @@ public class ConfigLoader {
|
||||
|
||||
public static void loadMessageStrings(ConfigurationNode config) {
|
||||
final HashMap<String, String> messages = new HashMap<>();
|
||||
for (ConfigurationNode message : config.getChildrenList()) {
|
||||
for (ConfigurationNode message : config.getChildrenMap().values()) {
|
||||
final String messageId = (String) message.getKey();
|
||||
messages.put(messageId, getConfigString(config, "", messageId));
|
||||
}
|
||||
|
||||
@@ -82,6 +82,8 @@ public class ConfigManager {
|
||||
File configFile = new File(plugin.getDataFolder(), "messages_" + Settings.language + ".yml");
|
||||
return YAMLConfigurationLoader.builder()
|
||||
.setPath(configFile.toPath())
|
||||
.setFlowStyle(DumperOptions.FlowStyle.BLOCK)
|
||||
.setIndent(2)
|
||||
.build()
|
||||
.load();
|
||||
} catch (IOException e) {
|
||||
|
||||
@@ -41,6 +41,6 @@ public class VelocityEventListener {
|
||||
plugin.getVelocityLogger().log(Level.SEVERE, "Failed to serialize request data on join message data");
|
||||
e.printStackTrace();
|
||||
}
|
||||
});
|
||||
}).schedule();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import me.william278.husksync.HuskSyncVelocity;
|
||||
import me.william278.husksync.PlayerData;
|
||||
import me.william278.husksync.Server;
|
||||
import me.william278.husksync.Settings;
|
||||
import me.william278.husksync.migrator.MPDBMigrator;
|
||||
import me.william278.husksync.redis.RedisListener;
|
||||
import me.william278.husksync.redis.RedisMessage;
|
||||
import me.william278.husksync.util.MessageManager;
|
||||
@@ -84,7 +85,7 @@ public class VelocityRedisListener extends RedisListener {
|
||||
log(Level.SEVERE, "Failed to serialize data when replying to a data request");
|
||||
e.printStackTrace();
|
||||
}
|
||||
});
|
||||
}).schedule();
|
||||
}
|
||||
case PLAYER_DATA_UPDATE -> {
|
||||
// Deserialize the PlayerData received
|
||||
@@ -174,18 +175,20 @@ public class VelocityRedisListener extends RedisListener {
|
||||
return;
|
||||
}
|
||||
|
||||
//todo Migrator
|
||||
/*// Add the incoming data to the data to be saved
|
||||
MPDBMigrator.incomingPlayerData.put(playerData, playerName);
|
||||
// Get the MPDB migrator
|
||||
MPDBMigrator migrator = HuskSyncVelocity.mpdbMigrator;
|
||||
|
||||
// Add the incoming data to the data to be saved
|
||||
migrator.incomingPlayerData.put(playerData, playerName);
|
||||
|
||||
// Increment players migrated
|
||||
MPDBMigrator.playersMigrated++;
|
||||
plugin.getBungeeLogger().log(Level.INFO, "Migrated " + MPDBMigrator.playersMigrated + "/" + MPDBMigrator.migratedDataSent + " players.");
|
||||
migrator.playersMigrated++;
|
||||
plugin.getVelocityLogger().log(Level.INFO, "Migrated " + migrator.playersMigrated + "/" + migrator.migratedDataSent + " players.");
|
||||
|
||||
// When all the data has been received, save it
|
||||
if (MPDBMigrator.migratedDataSent == MPDBMigrator.playersMigrated) {
|
||||
MPDBMigrator.loadIncomingData(MPDBMigrator.incomingPlayerData);
|
||||
}*/
|
||||
if (migrator.migratedDataSent == migrator.playersMigrated) {
|
||||
migrator.loadIncomingData(migrator.incomingPlayerData, HuskSyncVelocity.dataManager);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user