diff --git a/src/main/resources/reference.conf b/src/main/resources/reference.conf index cbb24de..ed08c2c 100644 --- a/src/main/resources/reference.conf +++ b/src/main/resources/reference.conf @@ -4,7 +4,8 @@ kamon { statsd { - # Hostname and port in which your dogstatsd is running (if not using the API). Remember that Datadog packets are sent using UDP and + + # Hostname and port in which your StatsD is running. Remember that StatsD packets are sent using UDP and # setting unreachable hosts and/or not open ports wont be warned by the Kamon, your data wont go anywhere. hostname = "127.0.0.1" port = 8125 @@ -13,7 +14,7 @@ kamon { max-packet-size = 1024 bytes # All time values are collected in nanoseconds, - # to scale before sending to statsd set "time-units" to "s" or "ms" or "µs". + # to scale before sending to StatsD set "time-units" to "s" or "ms" or "µs". # Value "n" is equivalent to omitting the setting time-unit = "ms" @@ -27,9 +28,10 @@ kamon { metric-key-generator = kamon.statsd.SimpleMetricKeyGenerator simple-metric-key-generator { - # Includes the name of the hostname in the generated metric. When set to false, the scheme for the metrics - # will look as follows: - # application.entity.entity-name.metric-name + + # Indicates whether to include the hostname in the generated metric name. The generated metric names follow this + # format: + # service-name[.host-name].metric-name[.tag1-key.tag1-value][.tag2-key.tag2-value]... include-hostname = true # When the sections that make up the metric names have special characters like dots (very common in dispatcher @@ -44,11 +46,11 @@ kamon { } modules { - statsd { + statsd-reporter { enabled = true name = "StatsD Reporter" description = "StatsD Reporter" - factory = "kamon.statsd.StatsDReporterFactory" + factory = "kamon.statsd.StatsDReporter$Factory" } } } diff --git a/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala b/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala index c60ca9b..a869247 100644 --- a/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala +++ b/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala @@ -28,15 +28,15 @@ class SimpleMetricKeyGenerator(statsDConfig: Config) extends MetricKeyGenerator type Normalizer = String => String val configSettings: Config = statsDConfig.getConfig("simple-metric-key-generator") - val application: String = Kamon.environment.service + val serviceName: String = Kamon.environment.service val includeHostname: Boolean = configSettings.getBoolean("include-hostname") val hostname: String = Kamon.environment.host val normalizer: Normalizer = createNormalizer(configSettings.getString("metric-name-normalization-strategy")) val normalizedHostname: String = normalizer(hostname) val baseName: String = - if (includeHostname) s"$application.$normalizedHostname" - else application + if (includeHostname) s"$serviceName.$normalizedHostname" + else serviceName private def createNormalizer(strategy: String): Normalizer = strategy match { case "percent-encode" => PercentEncoder.encode diff --git a/src/main/scala/kamon/statsd/StatsDReporter.scala b/src/main/scala/kamon/statsd/StatsDReporter.scala index 1a991e6..c47ca1d 100644 --- a/src/main/scala/kamon/statsd/StatsDReporter.scala +++ b/src/main/scala/kamon/statsd/StatsDReporter.scala @@ -31,10 +31,6 @@ import kamon.statsd.StatsDReporter.MetricDataPacketBuffer import kamon.util.DynamicAccess import org.slf4j.LoggerFactory -class StatsDReporterFactory extends ModuleFactory { - override def create(settings: ModuleFactory.Settings): StatsDReporter = new StatsDReporter() -} - class StatsDReporter(configPath: String) extends MetricReporter { private val logger = LoggerFactory.getLogger(classOf[StatsDReporter]) @volatile private var reporterConfiguration = StatsDReporter.Settings.readSettings(Kamon.config().getConfig(configPath)) @@ -42,7 +38,7 @@ class StatsDReporter(configPath: String) extends MetricReporter { val symbols: DecimalFormatSymbols = DecimalFormatSymbols.getInstance(Locale.US) symbols.setDecimalSeparator('.') // Just in case there is some weird locale config we are not aware of. - // Absurdly high number of decimal digits, let the other end lose precision if it needs to. + // Absurdly high number of decimal digits, let the other end loose precision if it needs to. val samplingRateFormat = new DecimalFormat("#.################################################################", symbols) val clientChannel: DatagramChannel = DatagramChannel.open() @@ -61,25 +57,29 @@ class StatsDReporter(configPath: String) extends MetricReporter { val keyGenerator = reporterConfiguration.keyGenerator val packetBuffer = new MetricDataPacketBuffer(reporterConfiguration.maxPacketSize, clientChannel, reporterConfiguration.agentAddress) - for ( - counter <- snapshot.counters; - instrument <- counter.instruments - ) { - packetBuffer.appendMeasurement(keyGenerator.generateKey(counter.name, instrument.tags), encodeStatsDCounter(reporterConfiguration, instrument.value, counter.settings.unit)) + for { + counter <- snapshot.counters + instrument <- counter.instruments + } { + packetBuffer.appendMeasurement( + key = keyGenerator.generateKey(counter.name, instrument.tags), + measurementData = encodeStatsDCounter(reporterConfiguration, instrument.value, counter.settings.unit)) } - for ( - gauge <- snapshot.gauges; - instrument <- gauge.instruments - ) { - packetBuffer.appendMeasurement(keyGenerator.generateKey(gauge.name, instrument.tags), encodeStatsDGauge(reporterConfiguration, instrument.value, gauge.settings.unit)) + for { + gauge <- snapshot.gauges + instrument <- gauge.instruments + } { + packetBuffer.appendMeasurement( + key = keyGenerator.generateKey(gauge.name, instrument.tags), + measurementData = encodeStatsDGauge(reporterConfiguration, instrument.value, gauge.settings.unit)) } - for ( - metric <- snapshot.histograms ++ snapshot.rangeSamplers ++ snapshot.timers; - instrument <- metric.instruments; - bucket <- instrument.value.bucketsIterator - ) { + for { + metric <- snapshot.histograms ++ snapshot.rangeSamplers ++ snapshot.timers + instrument <- metric.instruments + bucket <- instrument.value.bucketsIterator + } { val bucketData = encodeStatsDTimer(reporterConfiguration, bucket.value, bucket.frequency, metric.settings.unit) packetBuffer.appendMeasurement(keyGenerator.generateKey(metric.name, instrument.tags), bucketData) } @@ -87,9 +87,11 @@ class StatsDReporter(configPath: String) extends MetricReporter { packetBuffer.flush() } - private def encodeStatsDCounter(config: StatsDReporter.Settings, count: Long, unit: MeasurementUnit): String = s"${scale(config, count, unit)}|c" + private def encodeStatsDCounter(config: StatsDReporter.Settings, count: Long, unit: MeasurementUnit): String = + s"${scale(config, count, unit)}|c" - private def encodeStatsDGauge(config: StatsDReporter.Settings, value: Double, unit: MeasurementUnit): String = s"${scale(config, value.toLong, unit)}|g" + private def encodeStatsDGauge(config: StatsDReporter.Settings, value: Double, unit: MeasurementUnit): String = + s"${scale(config, value.toLong, unit)}|g" private def encodeStatsDTimer(config: StatsDReporter.Settings, level: Long, count: Long, unit: MeasurementUnit): String = { val samplingRate: Double = 1D / count @@ -105,6 +107,12 @@ class StatsDReporter(configPath: String) extends MetricReporter { } object StatsDReporter { + + class Factory extends ModuleFactory { + override def create(settings: ModuleFactory.Settings): StatsDReporter = + new StatsDReporter() + } + case class Settings( agentAddress: InetSocketAddress, maxPacketSize: Long,