Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Instrument the Kafka bootstrap nodes #1130

Merged
merged 2 commits into from
Feb 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@

import com.newrelic.agent.bridge.AgentBridge;
import com.newrelic.api.agent.NewRelic;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand All @@ -34,6 +36,16 @@ public class NewRelicMetricsReporter implements MetricsReporter {

private final Map<String, KafkaMetric> metrics = new ConcurrentHashMap<>();

private final List<String> nodes;

public NewRelicMetricsReporter() {
this.nodes = Collections.emptyList();
}

public NewRelicMetricsReporter(List<String> nodes) {
this.nodes = nodes;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

To prevent the concatenation of Strings every harvest, the metric name for the nodes could be calculated here.

Copy link
Author

@fryckbos fryckbos Feb 17, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I moved the creation of the metric names to init, does that make sense to you?

}

@Override
public void init(final List<KafkaMetric> initMetrics) {
for (KafkaMetric kafkaMetric : initMetrics) {
Expand All @@ -45,6 +57,16 @@ public void init(final List<KafkaMetric> initMetrics) {
}

final String metricPrefix = "MessageBroker/Kafka/Internal/";

final String nodePrefix = "MessageBroker/Kafka/Nodes/";
final List<String> nodeMetricNames = new ArrayList<String>(nodes.size());
final List<String> nodeMetricAsEventsNames = new ArrayList<String>(nodes.size());
for (String node : nodes) {
String metricName = nodePrefix + node;
nodeMetricNames.add(metricName);
nodeMetricAsEventsNames.add(metricName.replace('/', '.'));
}

executor.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
Expand All @@ -66,7 +88,15 @@ public void run() {
}
}
}

if (metricsAsEvents) {
for (String nodeMetric : nodeMetricAsEventsNames) {
eventData.put(nodeMetric, 1f);
}
} else {
for (String nodeMetric : nodeMetricNames) {
NewRelic.recordMetric(nodeMetric, 1f);
}
}
if (metricsAsEvents) {
NewRelic.getAgent().getInsights().recordCustomEvent("KafkaMetrics", eventData);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,14 @@
package org.apache.kafka.clients.consumer;

import java.time.Duration;
import java.util.List;
import java.util.ArrayList;

import org.apache.kafka.clients.consumer.internals.ConsumerMetadata;
import org.apache.kafka.common.errors.WakeupException;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.utils.Timer;
import org.apache.kafka.common.Node;

import com.newrelic.agent.bridge.AgentBridge;
import com.newrelic.api.agent.DestinationType;
Expand All @@ -20,20 +26,26 @@
import com.newrelic.api.agent.weaver.WeaveAllConstructors;
import com.newrelic.api.agent.weaver.Weaver;
import com.nr.instrumentation.kafka.NewRelicMetricsReporter;
import org.apache.kafka.common.utils.Timer;

@Weave(originalName = "org.apache.kafka.clients.consumer.KafkaConsumer")
public class KafkaConsumer_Instrumentation<K, V> {

private final Metrics metrics = Weaver.callOriginal();

private final ConsumerMetadata metadata = Weaver.callOriginal();

@NewField
private boolean initialized;

@WeaveAllConstructors
public KafkaConsumer_Instrumentation() {
if (!initialized) {
metrics.addReporter(new NewRelicMetricsReporter());
List<Node> nodes = metadata.fetch().nodes();
List<String> nodeNames = new ArrayList<>(nodes.size());
for (Node node : nodes) {
nodeNames.add(node.host() + ":" + node.port());
}
metrics.addReporter(new NewRelicMetricsReporter(nodeNames));
initialized = true;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@

package org.apache.kafka.clients.producer;

import org.apache.kafka.clients.producer.internals.ProducerMetadata;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.Node;

import com.newrelic.agent.bridge.AgentBridge;
import com.newrelic.api.agent.DestinationType;
import com.newrelic.api.agent.MessageProduceParameters;
Expand All @@ -18,24 +22,32 @@
import com.newrelic.api.agent.weaver.Weaver;
import com.nr.instrumentation.kafka.CallbackWrapper;
import com.nr.instrumentation.kafka.NewRelicMetricsReporter;
import org.apache.kafka.common.metrics.Metrics;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Future;
import java.util.List;
import java.util.ArrayList;

@Weave(originalName = "org.apache.kafka.clients.producer.KafkaProducer")
public class KafkaProducer_Instrumentation<K, V> {

private final Metrics metrics = Weaver.callOriginal();

private final ProducerMetadata metadata = Weaver.callOriginal();

@NewField
private boolean initialized;

@WeaveAllConstructors
public KafkaProducer_Instrumentation() {
if (!initialized) {
metrics.addReporter(new NewRelicMetricsReporter());
List<Node> nodes = metadata.fetch().nodes();
List<String> nodeNames = new ArrayList<>(nodes.size());
for (Node node : nodes) {
nodeNames.add(node.host() + ":" + node.port());
}
metrics.addReporter(new NewRelicMetricsReporter(nodeNames));
initialized = true;
}
}
Expand Down