Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,11 @@ public static void start(
setSystemPropertyDefault(
propertyNameToSystemPropertyName("integration.kafka.enabled"), "true");

if (Config.get().isDataJobsOpenLineageEnabled()) {
setSystemPropertyDefault(
propertyNameToSystemPropertyName("integration.spark-openlineage.enabled"), "true");
}

String javaCommand = System.getProperty("sun.java.command");
String dataJobsCommandPattern = Config.get().getDataJobsCommandPattern();
if (!isDataJobsSupported(javaCommand, dataJobsCommandPattern)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,11 @@

import com.google.auto.service.AutoService;
import datadog.trace.agent.tooling.InstrumenterModule;
import datadog.trace.api.Config;
import net.bytebuddy.asm.Advice;
import org.apache.spark.SparkContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@AutoService(InstrumenterModule.class)
public class Spark212Instrumentation extends AbstractSparkInstrumentation {
Expand All @@ -17,6 +20,7 @@ public String[] helperClassNames() {
packageName + ".DatabricksParentContext",
packageName + ".OpenlineageParentContext",
packageName + ".DatadogSpark212Listener",
packageName + ".PredeterminedTraceIdContext",
packageName + ".RemoveEldestHashMap",
packageName + ".SparkAggregatedTaskMetrics",
packageName + ".SparkConfAllowList",
Expand All @@ -41,6 +45,33 @@ public void methodAdvice(MethodTransformer transformer) {
public static class InjectListener {
@Advice.OnMethodEnter(suppress = Throwable.class)
public static void enter(@Advice.This SparkContext sparkContext) {
Logger log = LoggerFactory.getLogger("Spark212InjectListener");
if (Config.get().isDataJobsOpenLineageEnabled()
&& AbstractDatadogSparkListener.classIsLoadable(
"io.openlineage.spark.agent.OpenLineageSparkListener")
&& AbstractDatadogSparkListener.classIsLoadable(
"io.openlineage.spark.agent.facets.builder.TagsRunFacetBuilder")) {
if (!sparkContext.conf().contains("spark.extraListeners")) {
log.debug("spark.extraListeners does not contain any listeners. Adding OpenLineage");
sparkContext
.conf()
.set("spark.extraListeners", "io.openlineage.spark.agent.OpenLineageSparkListener");
} else {
String extraListeners = sparkContext.conf().get("spark.extraListeners");
if (!extraListeners.contains("io.openlineage.spark.agent.OpenLineageSparkListener")) {
log.debug(
"spark.extraListeners does contain listeners {}. Adding OpenLineage",
extraListeners);
sparkContext
.conf()
.set(
"spark.extraListeners",
extraListeners + ",io.openlineage.spark.agent.OpenLineageSparkListener");
}
}
}

// We want to add the Datadog listener as the first listener
AbstractDatadogSparkListener.listener =
new DatadogSpark212Listener(
sparkContext.getConf(), sparkContext.applicationId(), sparkContext.version());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,11 @@

import com.google.auto.service.AutoService;
import datadog.trace.agent.tooling.InstrumenterModule;
import datadog.trace.api.Config;
import net.bytebuddy.asm.Advice;
import org.apache.spark.SparkContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@AutoService(InstrumenterModule.class)
public class Spark213Instrumentation extends AbstractSparkInstrumentation {
Expand All @@ -17,6 +20,7 @@ public String[] helperClassNames() {
packageName + ".DatabricksParentContext",
packageName + ".OpenlineageParentContext",
packageName + ".DatadogSpark213Listener",
packageName + ".PredeterminedTraceIdContext",
packageName + ".RemoveEldestHashMap",
packageName + ".SparkAggregatedTaskMetrics",
packageName + ".SparkConfAllowList",
Expand All @@ -41,6 +45,34 @@ public void methodAdvice(MethodTransformer transformer) {
public static class InjectListener {
@Advice.OnMethodEnter(suppress = Throwable.class)
public static void enter(@Advice.This SparkContext sparkContext) {
// checking whether OpenLineage integration is enabled, available and that it supports tags
Logger log = LoggerFactory.getLogger("Spark212InjectListener");
if (Config.get().isDataJobsOpenLineageEnabled()
&& AbstractDatadogSparkListener.classIsLoadable(
"io.openlineage.spark.agent.OpenLineageSparkListener")
&& AbstractDatadogSparkListener.classIsLoadable(
"io.openlineage.spark.agent.facets.builder.TagsRunFacetBuilder")) {
if (!sparkContext.conf().contains("spark.extraListeners")) {
log.debug("spark.extraListeners does not contain any listeners. Adding OpenLineage");
sparkContext
.conf()
.set("spark.extraListeners", "io.openlineage.spark.agent.OpenLineageSparkListener");
} else {
String extraListeners = sparkContext.conf().get("spark.extraListeners");
if (!extraListeners.contains("io.openlineage.spark.agent.OpenLineageSparkListener")) {
log.debug(
"spark.extraListeners does contain listeners {}. Adding OpenLineage",
extraListeners);
sparkContext
.conf()
.set(
"spark.extraListeners",
extraListeners + ",io.openlineage.spark.agent.OpenLineageSparkListener");
}
}
}

// We want to add the Datadog listener as the first listener
AbstractDatadogSparkListener.listener =
new DatadogSpark213Listener(
sparkContext.getConf(), sparkContext.applicationId(), sparkContext.version());
Expand Down
Loading