mirror of
https://github.com/WiIIiam278/HuskSync.git
synced 2025-12-28 11:09:11 +00:00
Refactor; consolidate Logger and ResourceReader, simplify certain method arguments
This commit is contained in:
@@ -24,10 +24,6 @@ import net.william278.husksync.migrator.MpdbMigrator;
|
||||
import net.william278.husksync.player.BukkitPlayer;
|
||||
import net.william278.husksync.player.OnlineUser;
|
||||
import net.william278.husksync.redis.RedisManager;
|
||||
import net.william278.husksync.util.BukkitLogger;
|
||||
import net.william278.husksync.util.BukkitResourceReader;
|
||||
import net.william278.husksync.util.Logger;
|
||||
import net.william278.husksync.util.ResourceReader;
|
||||
import org.bstats.bukkit.Metrics;
|
||||
import org.bukkit.Bukkit;
|
||||
import org.bukkit.command.PluginCommand;
|
||||
@@ -54,8 +50,6 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
private static final int METRICS_ID = 13140;
|
||||
private Database database;
|
||||
private RedisManager redisManager;
|
||||
private Logger logger;
|
||||
private ResourceReader resourceReader;
|
||||
private EventListener eventListener;
|
||||
private DataAdapter dataAdapter;
|
||||
private EventCannon eventCannon;
|
||||
@@ -85,19 +79,14 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
// Initialize HuskSync
|
||||
final AtomicBoolean initialized = new AtomicBoolean(true);
|
||||
try {
|
||||
// Set the logging adapter and resource reader
|
||||
this.logger = new BukkitLogger(this.getLogger());
|
||||
this.resourceReader = new BukkitResourceReader(this);
|
||||
|
||||
// Create adventure audience
|
||||
this.audiences = BukkitAudiences.create(this);
|
||||
|
||||
// Load settings and locales
|
||||
getLoggingAdapter().log(Level.INFO, "Loading plugin configuration settings & locales...");
|
||||
log(Level.INFO, "Loading plugin configuration settings & locales...");
|
||||
initialized.set(reload().join());
|
||||
if (initialized.get()) {
|
||||
logger.showDebugLogs(settings.debugLogging);
|
||||
getLoggingAdapter().log(Level.INFO, "Successfully loaded plugin configuration settings & locales");
|
||||
log(Level.INFO, "Successfully loaded plugin configuration settings & locales");
|
||||
} else {
|
||||
throw new HuskSyncInitializationException("Failed to load plugin configuration settings and/or locales");
|
||||
}
|
||||
@@ -121,11 +110,11 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
}
|
||||
|
||||
// Prepare database connection
|
||||
this.database = new MySqlDatabase(settings, resourceReader, logger, dataAdapter, eventCannon);
|
||||
getLoggingAdapter().log(Level.INFO, "Attempting to establish connection to the database...");
|
||||
this.database = new MySqlDatabase(this);
|
||||
log(Level.INFO, "Attempting to establish connection to the database...");
|
||||
initialized.set(this.database.initialize());
|
||||
if (initialized.get()) {
|
||||
getLoggingAdapter().log(Level.INFO, "Successfully established a connection to the database");
|
||||
log(Level.INFO, "Successfully established a connection to the database");
|
||||
} else {
|
||||
throw new HuskSyncInitializationException("Failed to establish a connection to the database. " +
|
||||
"Please check the supplied database credentials in the config file");
|
||||
@@ -133,22 +122,22 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
|
||||
// Prepare redis connection
|
||||
this.redisManager = new RedisManager(this);
|
||||
getLoggingAdapter().log(Level.INFO, "Attempting to establish connection to the Redis server...");
|
||||
log(Level.INFO, "Attempting to establish connection to the Redis server...");
|
||||
initialized.set(this.redisManager.initialize());
|
||||
if (initialized.get()) {
|
||||
getLoggingAdapter().log(Level.INFO, "Successfully established a connection to the Redis server");
|
||||
log(Level.INFO, "Successfully established a connection to the Redis server");
|
||||
} else {
|
||||
throw new HuskSyncInitializationException("Failed to establish a connection to the Redis server. " +
|
||||
"Please check the supplied Redis credentials in the config file");
|
||||
}
|
||||
|
||||
// Register events
|
||||
getLoggingAdapter().log(Level.INFO, "Registering events...");
|
||||
log(Level.INFO, "Registering events...");
|
||||
this.eventListener = new BukkitEventListener(this);
|
||||
getLoggingAdapter().log(Level.INFO, "Successfully registered events listener");
|
||||
log(Level.INFO, "Successfully registered events listener");
|
||||
|
||||
// Register permissions
|
||||
getLoggingAdapter().log(Level.INFO, "Registering permissions & commands...");
|
||||
log(Level.INFO, "Registering permissions & commands...");
|
||||
Arrays.stream(Permission.values()).forEach(permission -> getServer().getPluginManager()
|
||||
.addPermission(new org.bukkit.permissions.Permission(permission.node, switch (permission.defaultAccess) {
|
||||
case EVERYONE -> PermissionDefault.TRUE;
|
||||
@@ -163,32 +152,32 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
new BukkitCommand(bukkitCommandType.commandBase, this).register(pluginCommand);
|
||||
}
|
||||
}
|
||||
getLoggingAdapter().log(Level.INFO, "Successfully registered permissions & commands");
|
||||
log(Level.INFO, "Successfully registered permissions & commands");
|
||||
|
||||
// Hook into plan
|
||||
if (Bukkit.getPluginManager().getPlugin("Plan") != null) {
|
||||
getLoggingAdapter().log(Level.INFO, "Enabling Plan integration...");
|
||||
new PlanHook(database, logger).hookIntoPlan();
|
||||
getLoggingAdapter().log(Level.INFO, "Plan integration enabled!");
|
||||
log(Level.INFO, "Enabling Plan integration...");
|
||||
new PlanHook(this).hookIntoPlan();
|
||||
log(Level.INFO, "Plan integration enabled!");
|
||||
}
|
||||
|
||||
// Hook into bStats metrics
|
||||
try {
|
||||
new Metrics(this, METRICS_ID);
|
||||
} catch (final Exception e) {
|
||||
getLoggingAdapter().log(Level.WARNING, "Skipped bStats metrics initialization due to an exception.");
|
||||
log(Level.WARNING, "Skipped bStats metrics initialization due to an exception.");
|
||||
}
|
||||
|
||||
// Check for updates
|
||||
if (settings.checkForUpdates) {
|
||||
getLoggingAdapter().log(Level.INFO, "Checking for updates...");
|
||||
log(Level.INFO, "Checking for updates...");
|
||||
getLatestVersionIfOutdated().thenAccept(newestVersion ->
|
||||
newestVersion.ifPresent(newVersion -> getLoggingAdapter().log(Level.WARNING,
|
||||
newestVersion.ifPresent(newVersion -> log(Level.WARNING,
|
||||
"An update is available for HuskSync, v" + newVersion
|
||||
+ " (Currently running v" + getPluginVersion() + ")")));
|
||||
}
|
||||
} catch (HuskSyncInitializationException exception) {
|
||||
getLoggingAdapter().log(Level.SEVERE, """
|
||||
log(Level.SEVERE, """
|
||||
***************************************************
|
||||
|
||||
Failed to initialize HuskSync!
|
||||
@@ -203,14 +192,14 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
.replaceAll("%error_message%", exception.getMessage()));
|
||||
initialized.set(false);
|
||||
} catch (Exception exception) {
|
||||
getLoggingAdapter().log(Level.SEVERE, "An unhandled exception occurred initializing HuskSync!", exception);
|
||||
log(Level.SEVERE, "An unhandled exception occurred initializing HuskSync!", exception);
|
||||
initialized.set(false);
|
||||
} finally {
|
||||
// Validate initialization
|
||||
if (initialized.get()) {
|
||||
getLoggingAdapter().log(Level.INFO, "Successfully enabled HuskSync v" + getPluginVersion());
|
||||
log(Level.INFO, "Successfully enabled HuskSync v" + getPluginVersion());
|
||||
} else {
|
||||
getLoggingAdapter().log(Level.SEVERE, "Failed to initialize HuskSync. The plugin will now be disabled");
|
||||
log(Level.SEVERE, "Failed to initialize HuskSync. The plugin will now be disabled");
|
||||
getServer().getPluginManager().disablePlugin(this);
|
||||
}
|
||||
}
|
||||
@@ -221,7 +210,7 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
if (this.eventListener != null) {
|
||||
this.eventListener.handlePluginDisable();
|
||||
}
|
||||
getLoggingAdapter().log(Level.INFO, "Successfully disabled HuskSync v" + getPluginVersion());
|
||||
log(Level.INFO, "Successfully disabled HuskSync v" + getPluginVersion());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -275,14 +264,8 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull Logger getLoggingAdapter() {
|
||||
return logger;
|
||||
}
|
||||
|
||||
@NotNull
|
||||
@Override
|
||||
public ResourceReader getResourceReader() {
|
||||
return resourceReader;
|
||||
public void log(@NotNull Level level, @NotNull String message, @NotNull Throwable... throwable) {
|
||||
getLogger().log(level, message, throwable);
|
||||
}
|
||||
|
||||
@NotNull
|
||||
@@ -327,7 +310,7 @@ public class BukkitHuskSync extends JavaPlugin implements HuskSync {
|
||||
return true;
|
||||
} catch (IOException | NullPointerException | InvocationTargetException | IllegalAccessException |
|
||||
InstantiationException e) {
|
||||
getLoggingAdapter().log(Level.SEVERE, "Failed to load data from the config", e);
|
||||
log(Level.SEVERE, "Failed to load data from the config", e);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -18,13 +18,12 @@ public class BrigadierUtil {
|
||||
protected static void registerCommodore(@NotNull BukkitHuskSync plugin, @NotNull PluginCommand pluginCommand,
|
||||
@NotNull CommandBase command) {
|
||||
// Register command descriptions via commodore (brigadier wrapper)
|
||||
try (InputStream pluginFile = plugin.getResourceReader()
|
||||
.getResource("commodore/" + command.command + ".commodore")) {
|
||||
try (InputStream pluginFile = plugin.getResource("commodore/" + command.command + ".commodore")) {
|
||||
CommodoreProvider.getCommodore(plugin).register(pluginCommand,
|
||||
CommodoreFileReader.INSTANCE.parse(pluginFile),
|
||||
player -> player.hasPermission(command.permission));
|
||||
} catch (IOException e) {
|
||||
plugin.getLoggingAdapter().log(Level.SEVERE,
|
||||
plugin.log(Level.SEVERE,
|
||||
"Failed to load " + command.command + ".commodore command definitions", e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ public class BukkitMapHandler {
|
||||
}
|
||||
|
||||
// Get the map data
|
||||
plugin.getLoggingAdapter().debug("Rendering map view onto canvas for locked map");
|
||||
plugin.debug("Rendering map view onto canvas for locked map");
|
||||
final LockedMapCanvas canvas = new LockedMapCanvas(mapView);
|
||||
for (MapRenderer renderer : mapView.getRenderers()) {
|
||||
renderer.render(mapView, canvas, Bukkit.getServer()
|
||||
@@ -58,7 +58,7 @@ public class BukkitMapHandler {
|
||||
}
|
||||
|
||||
// Save the extracted rendered map data
|
||||
plugin.getLoggingAdapter().debug("Saving pixel canvas data for locked map");
|
||||
plugin.debug("Saving pixel canvas data for locked map");
|
||||
if (!mapMeta.getPersistentDataContainer().has(MAP_DATA_KEY, PersistentDataType.BYTE_ARRAY)) {
|
||||
mapMeta.getPersistentDataContainer().set(MAP_DATA_KEY, PersistentDataType.BYTE_ARRAY,
|
||||
canvas.extractMapData().toBytes());
|
||||
@@ -89,7 +89,7 @@ public class BukkitMapHandler {
|
||||
final byte[] serializedData = itemStack.getItemMeta().getPersistentDataContainer()
|
||||
.get(MAP_DATA_KEY, PersistentDataType.BYTE_ARRAY);
|
||||
final MapData mapData = MapData.fromByteArray(Objects.requireNonNull(serializedData));
|
||||
plugin.getLoggingAdapter().debug("Setting deserialized map data for an item stack");
|
||||
plugin.debug("Setting deserialized map data for an item stack");
|
||||
|
||||
// Create a new map view renderer with the map data color at each pixel
|
||||
final MapView view = Bukkit.createMap(Bukkit.getWorlds().get(0));
|
||||
@@ -101,7 +101,7 @@ public class BukkitMapHandler {
|
||||
view.setUnlimitedTracking(false);
|
||||
mapMeta.setMapView(view);
|
||||
itemStack.setItemMeta(mapMeta);
|
||||
plugin.getLoggingAdapter().debug("Successfully applied renderer to map item stack");
|
||||
plugin.debug("Successfully applied renderer to map item stack");
|
||||
} catch (IOException | NullPointerException e) {
|
||||
plugin.getLogger().log(Level.WARNING, "Failed to deserialize map data for a player", e);
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ public class BukkitSerializer {
|
||||
// Return encoded data, using the encoder from SnakeYaml to get a ByteArray conversion
|
||||
return Base64Coder.encodeLines(byteOutputStream.toByteArray());
|
||||
} catch (IOException e) {
|
||||
BukkitHuskSync.getInstance().getLoggingAdapter().log(Level.SEVERE, "Failed to serialize item stack data", e);
|
||||
BukkitHuskSync.getInstance().log(Level.SEVERE, "Failed to serialize item stack data", e);
|
||||
throw new DataSerializationException("Failed to serialize item stack data", e);
|
||||
}
|
||||
});
|
||||
@@ -108,7 +108,7 @@ public class BukkitSerializer {
|
||||
return inventoryContents;
|
||||
}
|
||||
} catch (IOException | ClassNotFoundException e) {
|
||||
BukkitHuskSync.getInstance().getLoggingAdapter().log(Level.SEVERE, "Failed to deserialize item stack data", e);
|
||||
BukkitHuskSync.getInstance().log(Level.SEVERE, "Failed to deserialize item stack data", e);
|
||||
throw new DataSerializationException("Failed to deserialize item stack data", e);
|
||||
}
|
||||
});
|
||||
@@ -165,7 +165,7 @@ public class BukkitSerializer {
|
||||
// Return encoded data, using the encoder from SnakeYaml to get a ByteArray conversion
|
||||
return Base64Coder.encodeLines(byteOutputStream.toByteArray());
|
||||
} catch (IOException e) {
|
||||
BukkitHuskSync.getInstance().getLoggingAdapter().log(Level.SEVERE, "Failed to serialize potion effect data", e);
|
||||
BukkitHuskSync.getInstance().log(Level.SEVERE, "Failed to serialize potion effect data", e);
|
||||
throw new DataSerializationException("Failed to serialize potion effect data", e);
|
||||
}
|
||||
});
|
||||
@@ -201,7 +201,7 @@ public class BukkitSerializer {
|
||||
return potionEffects;
|
||||
}
|
||||
} catch (IOException | ClassNotFoundException e) {
|
||||
BukkitHuskSync.getInstance().getLoggingAdapter().log(Level.SEVERE, "Failed to deserialize potion effect data", e);
|
||||
BukkitHuskSync.getInstance().log(Level.SEVERE, "Failed to deserialize potion effect data", e);
|
||||
throw new DataSerializationException("Failed to deserialize potion effects", e);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -49,26 +49,26 @@ public class LegacyMigrator extends Migrator {
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Boolean> start() {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Starting migration of legacy HuskSync v1.x data...");
|
||||
plugin.log(Level.INFO, "Starting migration of legacy HuskSync v1.x data...");
|
||||
final long startTime = System.currentTimeMillis();
|
||||
return CompletableFuture.supplyAsync(() -> {
|
||||
// Wipe the existing database, preparing it for data import
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Preparing existing database (wiping)...");
|
||||
plugin.log(Level.INFO, "Preparing existing database (wiping)...");
|
||||
plugin.getDatabase().wipeDatabase().join();
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Successfully wiped user data database (took " + (System.currentTimeMillis() - startTime) + "ms)");
|
||||
plugin.log(Level.INFO, "Successfully wiped user data database (took " + (System.currentTimeMillis() - startTime) + "ms)");
|
||||
|
||||
// Create jdbc driver connection url
|
||||
final String jdbcUrl = "jdbc:mysql://" + sourceHost + ":" + sourcePort + "/" + sourceDatabase;
|
||||
|
||||
// Create a new data source for the mpdb converter
|
||||
try (final HikariDataSource connectionPool = new HikariDataSource()) {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Establishing connection to legacy database...");
|
||||
plugin.log(Level.INFO, "Establishing connection to legacy database...");
|
||||
connectionPool.setJdbcUrl(jdbcUrl);
|
||||
connectionPool.setUsername(sourceUsername);
|
||||
connectionPool.setPassword(sourcePassword);
|
||||
connectionPool.setPoolName((getIdentifier() + "_migrator_pool").toUpperCase());
|
||||
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Downloading raw data from the legacy database (this might take a while)...");
|
||||
plugin.log(Level.INFO, "Downloading raw data from the legacy database (this might take a while)...");
|
||||
final List<LegacyData> dataToMigrate = new ArrayList<>();
|
||||
try (final Connection connection = connectionPool.getConnection()) {
|
||||
try (final PreparedStatement statement = connection.prepareStatement("""
|
||||
@@ -106,33 +106,33 @@ public class LegacyMigrator extends Migrator {
|
||||
));
|
||||
playersMigrated++;
|
||||
if (playersMigrated % 50 == 0) {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Downloaded legacy data for " + playersMigrated + " players...");
|
||||
plugin.log(Level.INFO, "Downloaded legacy data for " + playersMigrated + " players...");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Completed download of " + dataToMigrate.size() + " entries from the legacy database!");
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Converting HuskSync 1.x data to the new user data format (this might take a while)...");
|
||||
plugin.log(Level.INFO, "Completed download of " + dataToMigrate.size() + " entries from the legacy database!");
|
||||
plugin.log(Level.INFO, "Converting HuskSync 1.x data to the new user data format (this might take a while)...");
|
||||
|
||||
final AtomicInteger playersConverted = new AtomicInteger();
|
||||
dataToMigrate.forEach(data -> data.toUserData(hslConverter, minecraftVersion).thenAccept(convertedData -> {
|
||||
plugin.getDatabase().ensureUser(data.user()).thenRun(() ->
|
||||
plugin.getDatabase().setUserData(data.user(), convertedData, DataSaveCause.LEGACY_MIGRATION)
|
||||
.exceptionally(exception -> {
|
||||
plugin.getLoggingAdapter().log(Level.SEVERE, "Failed to migrate legacy data for " + data.user().username + ": " + exception.getMessage());
|
||||
plugin.log(Level.SEVERE, "Failed to migrate legacy data for " + data.user().username + ": " + exception.getMessage());
|
||||
return null;
|
||||
})).join();
|
||||
|
||||
playersConverted.getAndIncrement();
|
||||
if (playersConverted.get() % 50 == 0) {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Converted legacy data for " + playersConverted + " players...");
|
||||
plugin.log(Level.INFO, "Converted legacy data for " + playersConverted + " players...");
|
||||
}
|
||||
}).join());
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Migration complete for " + dataToMigrate.size() + " users in " + ((System.currentTimeMillis() - startTime) / 1000) + " seconds!");
|
||||
plugin.log(Level.INFO, "Migration complete for " + dataToMigrate.size() + " users in " + ((System.currentTimeMillis() - startTime) / 1000) + " seconds!");
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
plugin.getLoggingAdapter().log(Level.SEVERE, "Error while migrating legacy data: " + e.getMessage() + " - are your source database credentials correct?");
|
||||
plugin.log(Level.SEVERE, "Error while migrating legacy data: " + e.getMessage() + " - are your source database credentials correct?");
|
||||
return false;
|
||||
}
|
||||
});
|
||||
@@ -176,15 +176,15 @@ public class LegacyMigrator extends Migrator {
|
||||
}
|
||||
default -> false;
|
||||
}) {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, getHelpMenu());
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Successfully set " + args[0] + " to " +
|
||||
plugin.log(Level.INFO, getHelpMenu());
|
||||
plugin.log(Level.INFO, "Successfully set " + args[0] + " to " +
|
||||
obfuscateDataString(args[1]));
|
||||
} else {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Invalid operation, could not set " + args[0] + " to " +
|
||||
plugin.log(Level.INFO, "Invalid operation, could not set " + args[0] + " to " +
|
||||
obfuscateDataString(args[1]) + " (is it a valid option?)");
|
||||
}
|
||||
} else {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, getHelpMenu());
|
||||
plugin.log(Level.INFO, getHelpMenu());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -56,26 +56,26 @@ public class MpdbMigrator extends Migrator {
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Boolean> start() {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Starting migration from MySQLPlayerDataBridge to HuskSync...");
|
||||
plugin.log(Level.INFO, "Starting migration from MySQLPlayerDataBridge to HuskSync...");
|
||||
final long startTime = System.currentTimeMillis();
|
||||
return CompletableFuture.supplyAsync(() -> {
|
||||
// Wipe the existing database, preparing it for data import
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Preparing existing database (wiping)...");
|
||||
plugin.log(Level.INFO, "Preparing existing database (wiping)...");
|
||||
plugin.getDatabase().wipeDatabase().join();
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Successfully wiped user data database (took " + (System.currentTimeMillis() - startTime) + "ms)");
|
||||
plugin.log(Level.INFO, "Successfully wiped user data database (took " + (System.currentTimeMillis() - startTime) + "ms)");
|
||||
|
||||
// Create jdbc driver connection url
|
||||
final String jdbcUrl = "jdbc:mysql://" + sourceHost + ":" + sourcePort + "/" + sourceDatabase;
|
||||
|
||||
// Create a new data source for the mpdb converter
|
||||
try (final HikariDataSource connectionPool = new HikariDataSource()) {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Establishing connection to MySQLPlayerDataBridge database...");
|
||||
plugin.log(Level.INFO, "Establishing connection to MySQLPlayerDataBridge database...");
|
||||
connectionPool.setJdbcUrl(jdbcUrl);
|
||||
connectionPool.setUsername(sourceUsername);
|
||||
connectionPool.setPassword(sourcePassword);
|
||||
connectionPool.setPoolName((getIdentifier() + "_migrator_pool").toUpperCase());
|
||||
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Downloading raw data from the MySQLPlayerDataBridge database (this might take a while)...");
|
||||
plugin.log(Level.INFO, "Downloading raw data from the MySQLPlayerDataBridge database (this might take a while)...");
|
||||
final List<MpdbData> dataToMigrate = new ArrayList<>();
|
||||
try (final Connection connection = connectionPool.getConnection()) {
|
||||
try (final PreparedStatement statement = connection.prepareStatement("""
|
||||
@@ -103,32 +103,32 @@ public class MpdbMigrator extends Migrator {
|
||||
));
|
||||
playersMigrated++;
|
||||
if (playersMigrated % 25 == 0) {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Downloaded MySQLPlayerDataBridge data for " + playersMigrated + " players...");
|
||||
plugin.log(Level.INFO, "Downloaded MySQLPlayerDataBridge data for " + playersMigrated + " players...");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Completed download of " + dataToMigrate.size() + " entries from the MySQLPlayerDataBridge database!");
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Converting raw MySQLPlayerDataBridge data to HuskSync user data (this might take a while)...");
|
||||
plugin.log(Level.INFO, "Completed download of " + dataToMigrate.size() + " entries from the MySQLPlayerDataBridge database!");
|
||||
plugin.log(Level.INFO, "Converting raw MySQLPlayerDataBridge data to HuskSync user data (this might take a while)...");
|
||||
|
||||
final AtomicInteger playersConverted = new AtomicInteger();
|
||||
dataToMigrate.forEach(data -> data.toUserData(mpdbConverter, minecraftVersion).thenAccept(convertedData -> {
|
||||
plugin.getDatabase().ensureUser(data.user()).thenRun(() ->
|
||||
plugin.getDatabase().setUserData(data.user(), convertedData, DataSaveCause.MPDB_MIGRATION))
|
||||
.exceptionally(exception -> {
|
||||
plugin.getLoggingAdapter().log(Level.SEVERE, "Failed to migrate MySQLPlayerDataBridge data for " + data.user().username + ": " + exception.getMessage());
|
||||
plugin.log(Level.SEVERE, "Failed to migrate MySQLPlayerDataBridge data for " + data.user().username + ": " + exception.getMessage());
|
||||
return null;
|
||||
}).join();
|
||||
playersConverted.getAndIncrement();
|
||||
if (playersConverted.get() % 50 == 0) {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Converted MySQLPlayerDataBridge data for " + playersConverted + " players...");
|
||||
plugin.log(Level.INFO, "Converted MySQLPlayerDataBridge data for " + playersConverted + " players...");
|
||||
}
|
||||
}).join());
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Migration complete for " + dataToMigrate.size() + " users in " + ((System.currentTimeMillis() - startTime) / 1000) + " seconds!");
|
||||
plugin.log(Level.INFO, "Migration complete for " + dataToMigrate.size() + " users in " + ((System.currentTimeMillis() - startTime) / 1000) + " seconds!");
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
plugin.getLoggingAdapter().log(Level.SEVERE, "Error while migrating data: " + e.getMessage() + " - are your source database credentials correct?");
|
||||
plugin.log(Level.SEVERE, "Error while migrating data: " + e.getMessage() + " - are your source database credentials correct?");
|
||||
return false;
|
||||
}
|
||||
});
|
||||
@@ -176,15 +176,15 @@ public class MpdbMigrator extends Migrator {
|
||||
}
|
||||
default -> false;
|
||||
}) {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, getHelpMenu());
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Successfully set " + args[0] + " to " +
|
||||
plugin.log(Level.INFO, getHelpMenu());
|
||||
plugin.log(Level.INFO, "Successfully set " + args[0] + " to " +
|
||||
obfuscateDataString(args[1]));
|
||||
} else {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, "Invalid operation, could not set " + args[0] + " to " +
|
||||
plugin.log(Level.INFO, "Invalid operation, could not set " + args[0] + " to " +
|
||||
obfuscateDataString(args[1]) + " (is it a valid option?)");
|
||||
}
|
||||
} else {
|
||||
plugin.getLoggingAdapter().log(Level.INFO, getHelpMenu());
|
||||
plugin.log(Level.INFO, getHelpMenu());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -60,6 +60,7 @@ public class BukkitPlayer extends OnlineUser {
|
||||
this.audience = BukkitHuskSync.getInstance().getAudiences().player(player);
|
||||
}
|
||||
|
||||
@NotNull
|
||||
public static BukkitPlayer adapt(@NotNull Player player) {
|
||||
return new BukkitPlayer(player);
|
||||
}
|
||||
@@ -507,7 +508,7 @@ public class BukkitPlayer extends OnlineUser {
|
||||
}
|
||||
return new PersistentDataContainerData(persistentDataMap);
|
||||
}).exceptionally(throwable -> {
|
||||
BukkitHuskSync.getInstance().getLoggingAdapter().log(Level.WARNING,
|
||||
BukkitHuskSync.getInstance().log(Level.WARNING,
|
||||
"Could not read " + player.getName() + "'s persistent data map, skipping!");
|
||||
throwable.printStackTrace();
|
||||
return new PersistentDataContainerData(new HashMap<>());
|
||||
@@ -515,65 +516,62 @@ public class BukkitPlayer extends OnlineUser {
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Void> setPersistentDataContainer(@NotNull PersistentDataContainerData persistentDataContainerData) {
|
||||
public CompletableFuture<Void> setPersistentDataContainer(@NotNull PersistentDataContainerData container) {
|
||||
return CompletableFuture.runAsync(() -> {
|
||||
player.getPersistentDataContainer().getKeys().forEach(namespacedKey ->
|
||||
player.getPersistentDataContainer().remove(namespacedKey));
|
||||
persistentDataContainerData.getTags().forEach(keyString -> {
|
||||
container.getTags().forEach(keyString -> {
|
||||
final NamespacedKey key = NamespacedKey.fromString(keyString);
|
||||
if (key != null) {
|
||||
// Set a tag with the given key and value. This is crying out for a refactor.
|
||||
persistentDataContainerData.getTagType(keyString).ifPresentOrElse(dataType -> {
|
||||
container.getTagType(keyString).ifPresentOrElse(dataType -> {
|
||||
switch (dataType) {
|
||||
case BYTE -> persistentDataContainerData.getTagValue(keyString, byte.class).ifPresent(
|
||||
case BYTE -> container.getTagValue(keyString, byte.class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.BYTE, value));
|
||||
case SHORT -> persistentDataContainerData.getTagValue(keyString, short.class).ifPresent(
|
||||
case SHORT -> container.getTagValue(keyString, short.class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.SHORT, value));
|
||||
case INTEGER -> persistentDataContainerData.getTagValue(keyString, int.class).ifPresent(
|
||||
case INTEGER -> container.getTagValue(keyString, int.class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.INTEGER, value));
|
||||
case LONG -> persistentDataContainerData.getTagValue(keyString, long.class).ifPresent(
|
||||
case LONG -> container.getTagValue(keyString, long.class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.LONG, value));
|
||||
case FLOAT -> persistentDataContainerData.getTagValue(keyString, float.class).ifPresent(
|
||||
case FLOAT -> container.getTagValue(keyString, float.class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.FLOAT, value));
|
||||
case DOUBLE -> persistentDataContainerData.getTagValue(keyString, double.class).ifPresent(
|
||||
case DOUBLE -> container.getTagValue(keyString, double.class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.DOUBLE, value));
|
||||
case STRING -> persistentDataContainerData.getTagValue(keyString, String.class).ifPresent(
|
||||
case STRING -> container.getTagValue(keyString, String.class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.STRING, value));
|
||||
case BYTE_ARRAY ->
|
||||
persistentDataContainerData.getTagValue(keyString, byte[].class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.BYTE_ARRAY, value));
|
||||
case INTEGER_ARRAY ->
|
||||
persistentDataContainerData.getTagValue(keyString, int[].class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.INTEGER_ARRAY, value));
|
||||
case LONG_ARRAY ->
|
||||
persistentDataContainerData.getTagValue(keyString, long[].class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.LONG_ARRAY, value));
|
||||
case BYTE_ARRAY -> container.getTagValue(keyString, byte[].class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.BYTE_ARRAY, value));
|
||||
case INTEGER_ARRAY -> container.getTagValue(keyString, int[].class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.INTEGER_ARRAY, value));
|
||||
case LONG_ARRAY -> container.getTagValue(keyString, long[].class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.LONG_ARRAY, value));
|
||||
case TAG_CONTAINER ->
|
||||
persistentDataContainerData.getTagValue(keyString, PersistentDataContainer.class).ifPresent(
|
||||
container.getTagValue(keyString, PersistentDataContainer.class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.TAG_CONTAINER, value));
|
||||
case TAG_CONTAINER_ARRAY ->
|
||||
persistentDataContainerData.getTagValue(keyString, PersistentDataContainer[].class).ifPresent(
|
||||
container.getTagValue(keyString, PersistentDataContainer[].class).ifPresent(
|
||||
value -> player.getPersistentDataContainer().set(key,
|
||||
PersistentDataType.TAG_CONTAINER_ARRAY, value));
|
||||
}
|
||||
}, () -> BukkitHuskSync.getInstance().getLoggingAdapter().log(Level.WARNING,
|
||||
}, () -> BukkitHuskSync.getInstance().log(Level.WARNING,
|
||||
"Could not set " + player.getName() + "'s persistent data key " + keyString +
|
||||
" as it has an invalid type. Skipping!"));
|
||||
}
|
||||
});
|
||||
}).exceptionally(throwable -> {
|
||||
BukkitHuskSync.getInstance().getLoggingAdapter().log(Level.WARNING,
|
||||
BukkitHuskSync.getInstance().log(Level.WARNING,
|
||||
"Could not write " + player.getName() + "'s persistent data map, skipping!");
|
||||
throwable.printStackTrace();
|
||||
return null;
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
package net.william278.husksync.util;
|
||||
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.logging.Level;
|
||||
|
||||
public class BukkitLogger extends Logger {
|
||||
|
||||
private final java.util.logging.Logger logger;
|
||||
|
||||
public BukkitLogger(@NotNull java.util.logging.Logger logger) {
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void log(@NotNull Level level, @NotNull String message, @NotNull Throwable e) {
|
||||
logger.log(level, message, e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void log(@NotNull Level level, @NotNull String message) {
|
||||
logger.log(level, message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void info(@NotNull String message) {
|
||||
logger.info(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void severe(@NotNull String message) {
|
||||
logger.severe(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void config(@NotNull String message) {
|
||||
logger.config(message);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
package net.william278.husksync.util;
|
||||
|
||||
import net.william278.husksync.BukkitHuskSync;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.Objects;
|
||||
|
||||
public class BukkitResourceReader implements ResourceReader {
|
||||
|
||||
private final BukkitHuskSync plugin;
|
||||
|
||||
public BukkitResourceReader(BukkitHuskSync plugin) {
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull InputStream getResource(String fileName) {
|
||||
return Objects.requireNonNull(plugin.getResource(fileName));
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user