Skip to content

Commit

Permalink
Improve error logging in various places
Browse files Browse the repository at this point in the history
  • Loading branch information
lucko committed Oct 26, 2024
1 parent c58ddcf commit ef507b1
Show file tree
Hide file tree
Showing 26 changed files with 222 additions and 93 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,11 @@ public void log(Level level, String msg) {
getLogger().log(level, msg);
}

@Override
public void log(Level level, String msg, Throwable throwable) {
getLogger().log(level, msg, throwable);
}

@Override
public ThreadDumper getDefaultThreadDumper() {
return this.gameThreadDumper;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,11 @@ public void log(Level level, String msg) {
getLogger().log(level, msg);
}

@Override
public void log(Level level, String msg, Throwable throwable) {
getLogger().log(level, msg, throwable);
}

@Override
public ClassSourceLookup createClassSourceLookup() {
return new BungeeCordClassSourceLookup();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,12 @@
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
import me.lucko.spark.common.util.SparkStaticLogger;
import me.lucko.spark.common.util.TemporaryFiles;
import me.lucko.spark.common.util.classfinder.ClassFinder;
import me.lucko.spark.common.util.config.Configuration;
import me.lucko.spark.common.util.config.FileConfiguration;
import me.lucko.spark.common.util.config.RuntimeConfiguration;
import me.lucko.spark.common.util.log.SparkStaticLogger;
import me.lucko.spark.common.ws.TrustedKeyStore;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.event.ClickEvent;
Expand Down Expand Up @@ -124,7 +124,7 @@ public class SparkPlatform {

public SparkPlatform(SparkPlugin plugin) {
this.plugin = plugin;
SparkStaticLogger.setLogger(plugin::log);
SparkStaticLogger.setLogger(plugin);

this.temporaryFiles = new TemporaryFiles(this.plugin.getPlatformInfo().getType() == PlatformInfo.Type.CLIENT
? this.plugin.getPluginDirectory().resolve("tmp")
Expand Down Expand Up @@ -370,8 +370,7 @@ public CompletableFuture<Void> executeCommand(CommandSender sender, String[] arg
executeCommand0(sender, args);
future.complete(null);
} catch (Exception e) {
this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command");
e.printStackTrace();
this.plugin.log(Level.SEVERE, "Exception occurred whilst executing a spark command", e);
future.completeExceptionally(e);
} finally {
this.commandExecuteLock.unlock();
Expand Down
12 changes: 2 additions & 10 deletions spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,17 +36,17 @@
import me.lucko.spark.common.util.classfinder.ClassFinder;
import me.lucko.spark.common.util.classfinder.FallbackClassFinder;
import me.lucko.spark.common.util.classfinder.InstrumentationClassFinder;
import me.lucko.spark.common.util.log.Logger;

import java.nio.file.Path;
import java.util.Collection;
import java.util.Collections;
import java.util.logging.Level;
import java.util.stream.Stream;

/**
* Spark plugin interface
*/
public interface SparkPlugin {
public interface SparkPlugin extends Logger {

/**
* Gets the version of the plugin.
Expand Down Expand Up @@ -92,14 +92,6 @@ default void executeSync(Runnable task) {
throw new UnsupportedOperationException();
}

/**
* Print to the plugin logger.
*
* @param level the log level
* @param msg the message
*/
void log(Level level, String msg);

/**
* Gets the default {@link ThreadDumper} to be used by the plugin.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.logging.Level;

import static net.kyori.adventure.text.Component.empty;
import static net.kyori.adventure.text.Component.space;
Expand Down Expand Up @@ -249,7 +250,7 @@ private static void uploadHealthReport(SparkPlatform platform, CommandSender sen
platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Health report", url));
} catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the data.", RED));
e.printStackTrace();
platform.getPlugin().log(Level.SEVERE, "An error occurred whilst uploading data", e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.LongConsumer;
import java.util.logging.Level;

import static net.kyori.adventure.text.Component.text;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
Expand Down Expand Up @@ -85,7 +86,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co
heapDump = HeapDumpSummary.createNew();
} catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst inspecting the heap.", RED));
e.printStackTrace();
platform.getPlugin().log(Level.SEVERE, "An error occurred whilst inspecting the heap.", e);
return;
}

Expand All @@ -110,7 +111,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co
platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump summary", url));
} catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the data. Attempting to save to disk instead.", RED));
e.printStackTrace();
platform.getPlugin().log(Level.SEVERE, "An error occurred whilst uploading the data.", e);
saveToFile = true;
}
}
Expand All @@ -131,7 +132,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co
platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Heap dump summary", file.toString()));
} catch (IOException e) {
resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED));
e.printStackTrace();
platform.getPlugin().log(Level.SEVERE, "An error occurred whilst saving the data.", e);
}
}

Expand All @@ -153,7 +154,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma
HeapDump.dumpHeap(file, liveOnly);
} catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst creating a heap dump.", RED));
e.printStackTrace();
platform.getPlugin().log(Level.SEVERE, "An error occurred whilst creating a heap dump.", e);
return;
}

Expand All @@ -180,7 +181,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma
try {
heapDumpCompress(platform, resp, file, compressionMethod);
} catch (IOException e) {
e.printStackTrace();
platform.getPlugin().log(Level.SEVERE, "An error occurred whilst compressing the heap dump.", e);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.logging.Level;

import static net.kyori.adventure.text.Component.empty;
import static net.kyori.adventure.text.Component.space;
Expand Down Expand Up @@ -277,8 +278,8 @@ private void profilerStart(SparkPlatform platform, CommandSender sender, Command
// send message if profiling fails
future.whenCompleteAsync((s, throwable) -> {
if (throwable != null) {
resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable.toString(), RED));
throwable.printStackTrace();
resp.broadcastPrefixed(text("Profiler operation failed unexpectedly. Error: " + throwable, RED));
platform.getPlugin().log(Level.SEVERE, "Profiler operation failed unexpectedly", throwable);
}
});

Expand Down Expand Up @@ -439,7 +440,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S
platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Profiler", url));
} catch (Exception e) {
resp.broadcastPrefixed(text("An error occurred whilst uploading the results. Attempting to save to disk instead.", RED));
e.printStackTrace();
platform.getPlugin().log(Level.WARNING, "Error whilst uploading profiler results", e);
saveToFile = true;
}
}
Expand All @@ -456,7 +457,7 @@ private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, S
platform.getActivityLog().addToLog(Activity.fileActivity(resp.senderData(), System.currentTimeMillis(), "Profiler", file.toString()));
} catch (IOException e) {
resp.broadcastPrefixed(text("An error occurred whilst saving the data.", RED));
e.printStackTrace();
platform.getPlugin().log(Level.WARNING, "Error whilst saving profiler results", e);
}
}
}
Expand Down Expand Up @@ -498,7 +499,7 @@ private void handleOpen(SparkPlatform platform, BytesocksClient bytesocksClient,
platform.getActivityLog().addToLog(Activity.urlActivity(resp.senderData(), System.currentTimeMillis(), "Profiler (live)", url));
} catch (Exception e) {
resp.replyPrefixed(text("An error occurred whilst opening the live profiler.", RED));
e.printStackTrace();
platform.getPlugin().log(Level.WARNING, "Error whilst opening live profiler", e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ public PlatformStatistics getPlatformStatistics(Map<String, GarbageCollectorStat
builder.setWorld(worldStatistics);
}
} catch (Exception e) {
this.platform.getPlugin().log(Level.WARNING, "Failed to gather world statistics - " + e);
this.platform.getPlugin().log(Level.WARNING, "Failed to gather world statistics", e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,14 +46,14 @@ public static SparkMetadata gather(SparkPlatform platform, CommandSender.Data cr
try {
platformStatistics = platform.getStatisticsProvider().getPlatformStatistics(initialGcStats, true);
} catch (Exception e) {
platform.getPlugin().log(Level.WARNING, "Failed to gather platform statistics - " + e);
platform.getPlugin().log(Level.WARNING, "Failed to gather platform statistics", e);
}

SystemStatistics systemStatistics = null;
try {
systemStatistics = platform.getStatisticsProvider().getSystemStatistics();
} catch (Exception e) {
platform.getPlugin().log(Level.WARNING, "Failed to gather system statistics - " + e);
platform.getPlugin().log(Level.WARNING, "Failed to gather system statistics", e);
}

long generatedTime = System.currentTimeMillis();
Expand All @@ -65,7 +65,7 @@ public static SparkMetadata gather(SparkPlatform platform, CommandSender.Data cr
serverConfigurations = serverConfigProvider.export();
}
} catch (Exception e) {
platform.getPlugin().log(Level.WARNING, "Failed to gather server configurations - " + e);
platform.getPlugin().log(Level.WARNING, "Failed to gather server configurations", e);
}

Collection<SourceMetadata> sources = platform.getPlugin().getKnownSources();
Expand All @@ -77,7 +77,7 @@ public static SparkMetadata gather(SparkPlatform platform, CommandSender.Data cr
extraPlatformMetadata = extraMetadataProvider.export();
}
} catch (Exception e) {
platform.getPlugin().log(Level.WARNING, "Failed to gather extra platform metadata - " + e);
platform.getPlugin().log(Level.WARNING, "Failed to gather extra platform metadata", e);
}

return new SparkMetadata(creator, platformMetadata, platformStatistics, systemStatistics, generatedTime, serverConfigurations, sources, extraPlatformMetadata);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.logging.Level;

/**
* Base implementation class for {@link Sampler}s.
Expand Down Expand Up @@ -173,7 +174,7 @@ protected void sendStatisticsToSocket() {
viewerSocket.sendUpdatedStatistics(platform, system);
}
} catch (Exception e) {
e.printStackTrace();
platform.getPlugin().log(Level.WARNING, "Exception occurred while sending statistics to viewer", e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public void initialise() {
}

} catch (Throwable e) {
e.printStackTrace();
this.platform.getPlugin().log(Level.WARNING, "Failed to start background profiler.");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.IntPredicate;
import java.util.logging.Level;

/**
* A sampler implementation using async-profiler.
Expand Down Expand Up @@ -124,7 +125,7 @@ private void rotateProfilerJob() {
// stop the previous job
previousJob.stop();
} catch (Exception e) {
e.printStackTrace();
this.platform.getPlugin().log(Level.WARNING, "Failed to stop previous profiler job", e);
}

// start a new job
Expand All @@ -139,7 +140,7 @@ private void rotateProfilerJob() {
try {
this.windowStatisticsCollector.measureNow(previousJob.getWindow());
} catch (Exception e) {
e.printStackTrace();
this.platform.getPlugin().log(Level.WARNING, "Failed to measure window statistics", e);
}

// aggregate the output of the previous job
Expand All @@ -153,7 +154,7 @@ private void rotateProfilerJob() {
this.scheduler.execute(this::processWindowRotate);
}
} catch (Throwable e) {
e.printStackTrace();
this.platform.getPlugin().log(Level.WARNING, "Exception occurred while rotating profiler job", e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import java.util.Queue;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.logging.Level;
import java.util.stream.Collectors;

/**
Expand Down Expand Up @@ -92,7 +93,7 @@ static ClassSourceLookup create(SparkPlatform platform) {
try {
return platform.createClassSourceLookup();
} catch (Exception e) {
e.printStackTrace();
platform.getPlugin().log(Level.WARNING, "Failed to create ClassSourceLookup", e);
return NO_OP;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ private SparkProtos.WindowStatistics measure(int window) {
builder.setChunks(counts.chunks());
}
} catch (Exception e) {
e.printStackTrace();
this.platform.getPlugin().log(Level.WARNING, "Exception occurred while getting world info", e);
}

return builder.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,16 @@

package me.lucko.spark.common.util;

import me.lucko.spark.common.util.log.SparkStaticLogger;

import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;

public class SparkThreadFactory implements ThreadFactory {

public static final Thread.UncaughtExceptionHandler EXCEPTION_HANDLER = (t, e) -> {
SparkStaticLogger.log(Level.SEVERE, "Uncaught exception thrown by thread " + t.getName());
e.printStackTrace();
SparkStaticLogger.log(Level.SEVERE, "Uncaught exception thrown by thread " + t.getName(), e);
};

private static final AtomicInteger poolNumber = new AtomicInteger(1);
Expand Down
Loading

0 comments on commit ef507b1

Please sign in to comment.