From ef507b147a8a0c7d209b62303de50fb895851c82 Mon Sep 17 00:00:00 2001 From: Luck Date: Sat, 26 Oct 2024 11:08:49 +0100 Subject: [PATCH] Improve error logging in various places --- .../lucko/spark/bukkit/BukkitSparkPlugin.java | 5 ++ .../bungeecord/BungeeCordSparkPlugin.java | 5 ++ .../me/lucko/spark/common/SparkPlatform.java | 7 +- .../me/lucko/spark/common/SparkPlugin.java | 12 +-- .../common/command/modules/HealthModule.java | 3 +- .../command/modules/HeapAnalysisModule.java | 11 +-- .../common/command/modules/SamplerModule.java | 11 +-- .../platform/PlatformStatisticsProvider.java | 2 +- .../spark/common/platform/SparkMetadata.java | 8 +- .../spark/common/sampler/AbstractSampler.java | 3 +- .../sampler/BackgroundSamplerManager.java | 2 +- .../common/sampler/async/AsyncSampler.java | 7 +- .../sampler/source/ClassSourceLookup.java | 3 +- .../window/WindowStatisticsCollector.java | 2 +- .../spark/common/util/SparkThreadFactory.java | 5 +- .../lucko/spark/common/util/log/Logger.java | 80 +++++++++++++++++++ .../util/{ => log}/SparkStaticLogger.java | 22 ++--- .../lucko/spark/common/ws/ViewerSocket.java | 3 +- .../common/ws/ViewerSocketConnection.java | 9 +-- .../spark/test/plugin/TestSparkPlugin.java | 5 ++ .../fabric/plugin/FabricSparkPlugin.java | 21 +++-- .../spark/forge/plugin/ForgeSparkPlugin.java | 21 +++-- .../neoforge/plugin/NeoForgeSparkPlugin.java | 21 +++-- .../lucko/spark/paper/PaperSparkPlugin.java | 5 ++ .../lucko/spark/sponge/SpongeSparkPlugin.java | 21 +++-- .../spark/velocity/VelocitySparkPlugin.java | 21 +++-- 26 files changed, 222 insertions(+), 93 deletions(-) create mode 100644 spark-common/src/main/java/me/lucko/spark/common/util/log/Logger.java rename spark-common/src/main/java/me/lucko/spark/common/util/{ => log}/SparkStaticLogger.java (76%) diff --git a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java index afab5554..affb5436 100644 --- a/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java +++ b/spark-bukkit/src/main/java/me/lucko/spark/bukkit/BukkitSparkPlugin.java @@ -153,6 +153,11 @@ public void log(Level level, String msg) { getLogger().log(level, msg); } + @Override + public void log(Level level, String msg, Throwable throwable) { + getLogger().log(level, msg, throwable); + } + @Override public ThreadDumper getDefaultThreadDumper() { return this.gameThreadDumper; diff --git a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java index 85d72c77..c8cafb32 100644 --- a/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java +++ b/spark-bungeecord/src/main/java/me/lucko/spark/bungeecord/BungeeCordSparkPlugin.java @@ -87,6 +87,11 @@ public void log(Level level, String msg) { getLogger().log(level, msg); } + @Override + public void log(Level level, String msg, Throwable throwable) { + getLogger().log(level, msg, throwable); + } + @Override public ClassSourceLookup createClassSourceLookup() { return new BungeeCordClassSourceLookup(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java index bedc605f..0466e75d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlatform.java @@ -53,12 +53,12 @@ import me.lucko.spark.common.tick.TickHook; import me.lucko.spark.common.tick.TickReporter; import me.lucko.spark.common.util.BytebinClient; -import me.lucko.spark.common.util.SparkStaticLogger; import me.lucko.spark.common.util.TemporaryFiles; import me.lucko.spark.common.util.classfinder.ClassFinder; import me.lucko.spark.common.util.config.Configuration; import me.lucko.spark.common.util.config.FileConfiguration; import me.lucko.spark.common.util.config.RuntimeConfiguration; +import me.lucko.spark.common.util.log.SparkStaticLogger; import me.lucko.spark.common.ws.TrustedKeyStore; import net.kyori.adventure.text.Component; import net.kyori.adventure.text.event.ClickEvent; @@ -124,7 +124,7 @@ public class SparkPlatform { public SparkPlatform(SparkPlugin plugin) { this.plugin = plugin; - SparkStaticLogger.setLogger(plugin::log); + SparkStaticLogger.setLogger(plugin); this.temporaryFiles = new TemporaryFiles(this.plugin.getPlatformInfo().getType() == PlatformInfo.Type.CLIENT ? this.plugin.getPluginDirectory().resolve("tmp") @@ -370,8 +370,7 @@ public CompletableFuture executeCommand(CommandSender sender, String[] arg executeCommand0(sender, args); future.complete(null); } catch (Exception e) { - this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command"); - e.printStackTrace(); + this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command", e); future.completeExceptionally(e); } finally { this.commandExecuteLock.unlock(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java index bf745dfd..9901bdb9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java +++ b/spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java @@ -36,17 +36,17 @@ import me.lucko.spark.common.util.classfinder.ClassFinder; import me.lucko.spark.common.util.classfinder.FallbackClassFinder; import me.lucko.spark.common.util.classfinder.InstrumentationClassFinder; +import me.lucko.spark.common.util.log.Logger; import java.nio.file.Path; import java.util.Collection; import java.util.Collections; -import java.util.logging.Level; import java.util.stream.Stream; /** * Spark plugin interface */ -public interface SparkPlugin { +public interface SparkPlugin extends Logger { /** * Gets the version of the plugin. @@ -92,14 +92,6 @@ default void executeSync(Runnable task) { throw new UnsupportedOperationException(); } - /** - * Print to the plugin logger. - * - * @param level the log level - * @param msg the message - */ - void log(Level level, String msg); - /** * Gets the default {@link ThreadDumper} to be used by the plugin. * diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java index fbf79ef6..6bfdd0ef 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HealthModule.java @@ -57,6 +57,7 @@ import java.util.Map; import java.util.Set; import java.util.function.Consumer; +import java.util.logging.Level; import static net.kyori.adventure.text.Component.empty; import static net.kyori.adventure.text.Component.space; @@ -249,7 +250,7 @@ private static void uploadHealthReport(SparkPlatform platform, CommandSender sen platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Health report", url)); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst uploading data", e); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java index 54f7df1f..9a97ca7e 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/HeapAnalysisModule.java @@ -44,6 +44,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.function.LongConsumer; +import java.util.logging.Level; import static net.kyori.adventure.text.Component.text; import static net.kyori.adventure.text.format.NamedTextColor.GOLD; @@ -85,7 +86,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co heapDump = HeapDumpSummary.createNew(); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst inspecting the heap.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst inspecting the heap.", e); return; } @@ -110,7 +111,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump summary", url)); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst uploading the data.", e); saveToFile = true; } } @@ -131,7 +132,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump summary", file.toString())); } catch (IOException e) { resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst saving the data.", e); } } @@ -153,7 +154,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma HeapDump.dumpHeap(file, liveOnly); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst creating a heap dump.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst creating a heap dump.", e); return; } @@ -180,7 +181,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma try { heapDumpCompress(platform, resp, file, compressionMethod); } catch (IOException e) { - e.printStackTrace(); + platform.getPlugin().log(Level.SEVERE, "An error occurred whilst compressing the heap dump.", e); } } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java index cc7d56b0..d65172b5 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java +++ b/spark-common/src/main/java/me/lucko/spark/common/command/modules/SamplerModule.java @@ -59,6 +59,7 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Supplier; +import java.util.logging.Level; import static net.kyori.adventure.text.Component.empty; import static net.kyori.adventure.text.Component.space; @@ -277,8 +278,8 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command // send message if profiling fails future.whenCompleteAsync((s, throwable) -> { if (throwable != null) { - resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable.toString(), RED)); - throwable.printStackTrace(); + resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable, RED)); + platform.getPlugin().log(Level.SEVERE, "Profiler operation failed unexpectedly", throwable); } }); @@ -439,7 +440,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Profiler", url)); } catch (Exception e) { resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Error whilst uploading profiler results", e); saveToFile = true; } } @@ -456,7 +457,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Profiler", file.toString())); } catch (IOException e) { resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Error whilst saving profiler results", e); } } } @@ -498,7 +499,7 @@ private void handleOpen(SparkPlatform platform, BytesocksClient bytesocksClient, platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Profiler (live)", url)); } catch (Exception e) { resp.replyPrefixed(text("An error occurred whilst opening the live profiler.", RED)); - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Error whilst opening live profiler", e); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java index e8401fcc..93bd59d6 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java +++ b/spark-common/src/main/java/me/lucko/spark/common/platform/PlatformStatisticsProvider.java @@ -248,7 +248,7 @@ public PlatformStatistics getPlatformStatistics(Map sources = platform.getPlugin().getKnownSources(); @@ -77,7 +77,7 @@ public static SparkMetadata gather(SparkPlatform platform, CommandSender.Data cr extraPlatformMetadata = extraMetadataProvider.export(); } } catch (Exception e) { - platform.getPlugin().log(Level.WARNING, "Failed to gather extra platform metadata - " + e); + platform.getPlugin().log(Level.WARNING, "Failed to gather extra platform metadata", e); } return new SparkMetadata(creator, platformMetadata, platformStatistics, systemStatistics, generatedTime, serverConfigurations, sources, extraPlatformMetadata); diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java index aecdc713..17e0e64d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/AbstractSampler.java @@ -44,6 +44,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Function; import java.util.function.Supplier; +import java.util.logging.Level; /** * Base implementation class for {@link Sampler}s. @@ -173,7 +174,7 @@ protected void sendStatisticsToSocket() { viewerSocket.sendUpdatedStatistics(platform, system); } } catch (Exception e) { - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Exception occurred while sending statistics to viewer", e); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java index 18893049..25cdc6bb 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/BackgroundSamplerManager.java @@ -89,7 +89,7 @@ public void initialise() { } } catch (Throwable e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Failed to start background profiler."); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java index 994c03bc..3d179489 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/async/AsyncSampler.java @@ -37,6 +37,7 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.function.IntPredicate; +import java.util.logging.Level; /** * A sampler implementation using async-profiler. @@ -124,7 +125,7 @@ private void rotateProfilerJob() { // stop the previous job previousJob.stop(); } catch (Exception e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Failed to stop previous profiler job", e); } // start a new job @@ -139,7 +140,7 @@ private void rotateProfilerJob() { try { this.windowStatisticsCollector.measureNow(previousJob.getWindow()); } catch (Exception e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Failed to measure window statistics", e); } // aggregate the output of the previous job @@ -153,7 +154,7 @@ private void rotateProfilerJob() { this.scheduler.execute(this::processWindowRotate); } } catch (Throwable e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while rotating profiler job", e); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java index a3b4f024..5397a0e3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/source/ClassSourceLookup.java @@ -43,6 +43,7 @@ import java.util.Queue; import java.util.function.Function; import java.util.function.Supplier; +import java.util.logging.Level; import java.util.stream.Collectors; /** @@ -92,7 +93,7 @@ static ClassSourceLookup create(SparkPlatform platform) { try { return platform.createClassSourceLookup(); } catch (Exception e) { - e.printStackTrace(); + platform.getPlugin().log(Level.WARNING, "Failed to create ClassSourceLookup", e); return NO_OP; } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java index 86c0b20d..0ef2eb3b 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java +++ b/spark-common/src/main/java/me/lucko/spark/common/sampler/window/WindowStatisticsCollector.java @@ -190,7 +190,7 @@ private SparkProtos.WindowStatistics measure(int window) { builder.setChunks(counts.chunks()); } } catch (Exception e) { - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while getting world info", e); } return builder.build(); diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java index 1d6971be..99e50ff9 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/SparkThreadFactory.java @@ -20,6 +20,8 @@ package me.lucko.spark.common.util; +import me.lucko.spark.common.util.log.SparkStaticLogger; + import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; @@ -27,8 +29,7 @@ public class SparkThreadFactory implements ThreadFactory { public static final Thread.UncaughtExceptionHandler EXCEPTION_HANDLER = (t, e) -> { - SparkStaticLogger.log(Level.SEVERE, "Uncaught exception thrown by thread " + t.getName()); - e.printStackTrace(); + SparkStaticLogger.log(Level.SEVERE, "Uncaught exception thrown by thread " + t.getName(), e); }; private static final AtomicInteger poolNumber = new AtomicInteger(1); diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/log/Logger.java b/spark-common/src/main/java/me/lucko/spark/common/util/log/Logger.java new file mode 100644 index 00000000..1f147877 --- /dev/null +++ b/spark-common/src/main/java/me/lucko/spark/common/util/log/Logger.java @@ -0,0 +1,80 @@ +/* + * This file is part of spark. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +package me.lucko.spark.common.util.log; + +import java.util.logging.Level; + +public interface Logger { + + /** + * Print to the plugin logger. + * + * @param level the log level + * @param msg the message + */ + void log(Level level, String msg); + + /** + * Print to the plugin logger. + * + * @param level the log level + * @param msg the message + * @param throwable the throwable + */ + void log(Level level, String msg, Throwable throwable); + + /** + * A fallback logger + */ + Logger FALLBACK = new Logger() { + @Override + public void log(Level level, String msg) { + if (level.intValue() >= 1000) { + System.err.println(msg); + } else { + System.out.println(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (isSevere(level)) { + System.err.println(msg); + if (throwable != null) { + throwable.printStackTrace(System.err); + } + } else { + System.out.println(msg); + if (throwable != null) { + throwable.printStackTrace(System.out); + } + } + } + }; + + static boolean isSevere(Level level) { + return level.intValue() >= 1000; + } + + static boolean isWarning(Level level) { + return level.intValue() >= 900; + } +} diff --git a/spark-common/src/main/java/me/lucko/spark/common/util/SparkStaticLogger.java b/spark-common/src/main/java/me/lucko/spark/common/util/log/SparkStaticLogger.java similarity index 76% rename from spark-common/src/main/java/me/lucko/spark/common/util/SparkStaticLogger.java rename to spark-common/src/main/java/me/lucko/spark/common/util/log/SparkStaticLogger.java index eb5f3163..3c118fa3 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/util/SparkStaticLogger.java +++ b/spark-common/src/main/java/me/lucko/spark/common/util/log/SparkStaticLogger.java @@ -18,7 +18,7 @@ * along with this program. If not, see . */ -package me.lucko.spark.common.util; +package me.lucko.spark.common.util.log; import java.util.logging.Level; @@ -32,7 +32,7 @@ public enum SparkStaticLogger { ; - private static Logger logger = null; + private static Logger logger = Logger.FALLBACK; public synchronized static void setLogger(Logger logger) { if (SparkStaticLogger.logger == null) { @@ -40,22 +40,12 @@ public synchronized static void setLogger(Logger logger) { } } - public static void log(Level level, String msg) { - Logger logger = SparkStaticLogger.logger; - if (logger == null) { - if (level.intValue() >= 1000) { - System.err.println(msg); - } else { - System.out.println(msg); - } - return; - } - - logger.log(level, msg); + public static void log(Level level, String msg, Throwable throwable) { + logger.log(level, msg, throwable); } - public interface Logger { - void log(Level level, String msg); + public static void log(Level level, String msg) { + logger.log(level, msg); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java index 0356b7dc..49b2837d 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java +++ b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocket.java @@ -117,8 +117,7 @@ public void processWindowRotate(AbstractSampler sampler) { String key = this.platform.getBytebinClient().postContent(samplerData, MediaTypes.SPARK_SAMPLER_MEDIA_TYPE, "live").key(); sendUpdatedSamplerData(key); } catch (Exception e) { - this.platform.getPlugin().log(Level.WARNING, "Error whilst sending updated sampler data to the socket"); - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Error whilst sending updated sampler data to the socket", e); } } diff --git a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocketConnection.java b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocketConnection.java index 2173f53d..5ec72aac 100644 --- a/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocketConnection.java +++ b/spark-common/src/main/java/me/lucko/spark/common/ws/ViewerSocketConnection.java @@ -104,15 +104,13 @@ public void onText(String data) { RawPacket packet = decodeRawPacket(data); handleRawPacket(packet); } catch (Exception e) { - this.platform.getPlugin().log(Level.WARNING, "Exception occurred while reading data from the socket"); - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while reading data from the socket", e); } } @Override public void onError(Throwable error) { - this.platform.getPlugin().log(Level.INFO, "Socket error: " + error.getClass().getName() + " " + error.getMessage()); - error.printStackTrace(); + this.platform.getPlugin().log(Level.INFO, "Socket error: " + error.getClass().getName() + " " + error.getMessage(), error); } @Override @@ -133,8 +131,7 @@ public void sendPacket(Consumer packetBuilder) { try { sendPacket(wrapper); } catch (Exception e) { - this.platform.getPlugin().log(Level.WARNING, "Exception occurred while sending data to the socket"); - e.printStackTrace(); + this.platform.getPlugin().log(Level.WARNING, "Exception occurred while sending data to the socket", e); } } diff --git a/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java index 866a2b66..77dffd26 100644 --- a/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java +++ b/spark-common/src/test/java/me/lucko/spark/test/plugin/TestSparkPlugin.java @@ -101,6 +101,11 @@ public void log(Level level, String msg) { LOGGER.log(level, msg); } + @Override + public void log(Level level, String msg, Throwable throwable) { + LOGGER.log(level, msg, throwable); + } + @Override public PlatformInfo getPlatformInfo() { return new PlatformInfo() { diff --git a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java index 0bdbd30f..348284bf 100644 --- a/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java +++ b/spark-fabric/src/main/java/me/lucko/spark/fabric/plugin/FabricSparkPlugin.java @@ -93,14 +93,23 @@ public void executeAsync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } } diff --git a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java index 28773269..a543bfe1 100644 --- a/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java +++ b/spark-forge/src/main/java/me/lucko/spark/forge/plugin/ForgeSparkPlugin.java @@ -92,14 +92,23 @@ public void executeAsync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } } diff --git a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java index 10c7a683..667a12d6 100644 --- a/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java +++ b/spark-neoforge/src/main/java/me/lucko/spark/neoforge/plugin/NeoForgeSparkPlugin.java @@ -92,14 +92,23 @@ public void executeAsync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } } diff --git a/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java index 73694d27..3969d51f 100644 --- a/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java +++ b/spark-paper/src/main/java/me/lucko/spark/paper/PaperSparkPlugin.java @@ -148,6 +148,11 @@ public void log(Level level, String msg) { this.logger.log(level, msg); } + @Override + public void log(Level level, String msg, Throwable throwable) { + this.logger.log(level, msg, throwable); + } + @Override public ThreadDumper getDefaultThreadDumper() { return this.gameThreadDumper; diff --git a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java index 3542daed..411c6cef 100644 --- a/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java +++ b/spark-sponge/src/main/java/me/lucko/spark/sponge/SpongeSparkPlugin.java @@ -151,14 +151,23 @@ public void executeSync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } } diff --git a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java index f31963de..88bbf1d8 100644 --- a/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java +++ b/spark-velocity/src/main/java/me/lucko/spark/velocity/VelocitySparkPlugin.java @@ -122,14 +122,23 @@ public void executeAsync(Runnable task) { @Override public void log(Level level, String msg) { - if (level == Level.INFO) { - this.logger.info(msg); - } else if (level == Level.WARNING) { - this.logger.warn(msg); - } else if (level == Level.SEVERE) { + if (level.intValue() >= 1000) { // severe this.logger.error(msg); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg); + } else { + this.logger.info(msg); + } + } + + @Override + public void log(Level level, String msg, Throwable throwable) { + if (level.intValue() >= 1000) { // severe + this.logger.error(msg, throwable); + } else if (level.intValue() >= 900) { // warning + this.logger.warn(msg, throwable); } else { - throw new IllegalArgumentException(level.getName()); + this.logger.info(msg, throwable); } }