Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support dead letters with input_timestamp field #15

Merged
merged 4 commits into from
Jul 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ dependencies {
implementation(group = "com.bakdata.kafka", name = "brute-force-serde", version = "1.2.1")
implementation(group = "com.bakdata.kafka", name = "large-message-serde", version = "2.7.0")
implementation(group = "org.jooq", name = "jool", version = "0.9.14")
avroApi(group = "com.bakdata.kafka", name = "error-handling-avro", version = "1.4.2")
avroApi(group = "com.bakdata.kafka", name = "error-handling-avro", version = "1.5.0")

val junitVersion = "5.10.1"
testRuntimeOnly(group = "org.junit.jupiter", name = "junit-jupiter-engine", version = junitVersion)
Expand Down
5 changes: 4 additions & 1 deletion src/main/java/com/bakdata/kafka/ConnectDeadLetterParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import static org.apache.kafka.connect.runtime.errors.DeadLetterQueueReporter.ERROR_HEADER_STAGE;
import static org.apache.kafka.connect.runtime.errors.DeadLetterQueueReporter.ERROR_HEADER_TASK_ID;

import java.time.Instant;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.common.header.Headers;
Expand All @@ -45,7 +46,7 @@
class ConnectDeadLetterParser implements DeadLetterParser {

@Override
public DeadLetter convert(final Object value, final Headers headers) {
public DeadLetter convert(final Object value, final Headers headers, final long recordTimestamp) {
final Optional<Integer> partition = getHeader(headers, ERROR_HEADER_ORIG_PARTITION)
.map(HeaderHelper::intValue);
final Optional<String> topic = getHeader(headers, ERROR_HEADER_ORIG_TOPIC)
Expand Down Expand Up @@ -83,6 +84,8 @@ public DeadLetter convert(final Object value, final Headers headers) {
.setDescription(
String.format("Error in stage %s (%s) in %s[%d]", stage, clazz, connectorName, taskId))
.setCause(description)
// Kafka Connect propagates the timestamp of the original message
.setInputTimestamp(Instant.ofEpochMilli(recordTimestamp))
.build();
}

Expand Down
4 changes: 3 additions & 1 deletion src/main/java/com/bakdata/kafka/DeadLetterParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
package com.bakdata.kafka;

import org.apache.kafka.common.header.Headers;
import org.apache.kafka.streams.processor.api.FixedKeyRecord;

@FunctionalInterface
interface DeadLetterParser {
Expand All @@ -36,5 +37,6 @@ interface DeadLetterParser {
* @param headers headers to retrieve meta information such as topic, partition, and offset from.
* @return {@link DeadLetter} object representing error
*/
DeadLetter convert(Object value, Headers headers);
DeadLetter convert(Object value, Headers headers, long recordTimestamp);

}
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ public void init(final FixedKeyProcessorContext<K, DeadLetter> context) {

@Override
public void process(final FixedKeyRecord<K, Object> inputRecord) {
final DeadLetter deadLetter = this.converter.convert(inputRecord.value(), inputRecord.headers());
final DeadLetter deadLetter =
this.converter.convert(inputRecord.value(), inputRecord.headers(), inputRecord.timestamp());
this.context.forward(inputRecord.withValue(deadLetter));
}

Expand Down
5 changes: 4 additions & 1 deletion src/main/java/com/bakdata/kafka/StreamsDeadLetterParser.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import static com.bakdata.kafka.HeaderHelper.missingRequiredHeader;
import static com.bakdata.kafka.HeaderHelper.stringValue;

import java.time.Instant;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.common.header.Headers;
Expand All @@ -45,7 +46,7 @@ class StreamsDeadLetterParser implements DeadLetterParser {
static final String FAULTY_OFFSET_HEADER = "HEADER_PREFIX + offset";

@Override
public DeadLetter convert(final Object value, final Headers headers) {
public DeadLetter convert(final Object value, final Headers headers, final long recordTimestamp) {
final int partition = getHeader(headers, PARTITION)
.map(HeaderHelper::intValue)
.orElseThrow(missingRequiredHeader(PARTITION));
Expand Down Expand Up @@ -82,6 +83,8 @@ public DeadLetter convert(final Object value, final Headers headers) {
.setInputValue(Optional.ofNullable(value).map(ErrorUtil::toString).orElse(null))
.setDescription(description)
.setCause(errorDescription)
// The Header processor propagates the timestamp of the original message
.setInputTimestamp(Instant.ofEpochMilli(recordTimestamp))
.build();
}
}
13 changes: 8 additions & 5 deletions src/test/java/com/bakdata/kafka/ConnectDeadLetterParserTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import static org.apache.kafka.connect.runtime.errors.DeadLetterQueueReporter.ERROR_HEADER_TASK_ID;

import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.util.stream.Stream;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeaders;
Expand Down Expand Up @@ -140,7 +141,8 @@ void shouldConvert() {
.add(ERROR_HEADER_CONNECTOR_NAME, toBytes("my-connector"))
.add(ERROR_HEADER_EXCEPTION_MESSAGE, toBytes("my message"))
.add(ERROR_HEADER_EXCEPTION_STACK_TRACE, toBytes(StackTraceClassifierTest.STACK_TRACE));
this.softly.assertThat(new ConnectDeadLetterParser().convert("foo", headers))

this.softly.assertThat(new ConnectDeadLetterParser().convert("foo", headers, 200))
.satisfies(deadLetter -> {
this.softly.assertThat(deadLetter.getInputValue()).hasValue("foo");
this.softly.assertThat(deadLetter.getPartition()).hasValue(1);
Expand All @@ -154,13 +156,14 @@ void shouldConvert() {
this.softly.assertThat(deadLetter.getDescription())
.isEqualTo("Error in stage VALUE_CONVERTER (org.apache.kafka.connect.json.JsonConverter) "
+ "in my-connector[2]");
this.softly.assertThat(deadLetter.getInputTimestamp()).hasValue(Instant.ofEpochMilli(200));
});
}

@Test
void shouldConvertWithMissingHeaders() {
final Headers headers = generateDefaultHeaders();
this.softly.assertThat(new ConnectDeadLetterParser().convert("foo", headers))
this.softly.assertThat(new ConnectDeadLetterParser().convert("foo", headers, 0))
.satisfies(deadLetter -> {
this.softly.assertThat(deadLetter.getPartition()).isNotPresent();
this.softly.assertThat(deadLetter.getTopic()).isNotPresent();
Expand All @@ -176,22 +179,22 @@ void shouldConvertWithMissingHeaders() {
void shouldConvertWithNullHeaders() {
final Headers headers = generateDefaultHeaders()
.add(ERROR_HEADER_EXCEPTION_MESSAGE, null);
this.softly.assertThat(new ConnectDeadLetterParser().convert("foo", headers))
this.softly.assertThat(new ConnectDeadLetterParser().convert("foo", headers, 0))
.satisfies(deadLetter -> this.softly.assertThat(deadLetter.getCause().getMessage()).isNotPresent());
}

@ParameterizedTest
@MethodSource("generateMissingRequiredHeaders")
void shouldThrowWithMissingRequiredHeaders(final Headers headers, final String message) {
this.softly.assertThatThrownBy(() -> new ConnectDeadLetterParser().convert("foo", headers))
this.softly.assertThatThrownBy(() -> new ConnectDeadLetterParser().convert("foo", headers, 0))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(message);
}

@ParameterizedTest
@MethodSource("generateNonNullableHeaders")
void shouldThrowWithNonNullableHeaders(final Headers headers, final String message) {
this.softly.assertThatThrownBy(() -> new ConnectDeadLetterParser().convert("foo", headers))
this.softly.assertThatThrownBy(() -> new ConnectDeadLetterParser().convert("foo", headers, 0))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(message);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ void shouldProcessDeadLetter() {
.setStackTrace(StackTraceClassifierTest.STACK_TRACE)
.build())
.setDescription("description")
.setInputTimestamp(Instant.ofEpochMilli(200L))
.build();
final long timestamp = 0L;
input.add("key", deadLetter, timestamp);
Expand Down Expand Up @@ -180,6 +181,7 @@ void shouldAggregateStatistics() {
.setStackTrace(StackTraceClassifierTest.STACK_TRACE)
.build())
.setDescription("description")
.setInputTimestamp(Instant.ofEpochMilli(200L))
.build();
final long firstTimestamp = 0L;
input.add("key", firstDeadLetter, firstTimestamp);
Expand Down Expand Up @@ -239,6 +241,7 @@ void shouldOnlyForwardFirstExample() {
.setStackTrace(StackTraceClassifierTest.STACK_TRACE)
.build())
.setDescription("description")
.setInputTimestamp(Instant.ofEpochMilli(200L))
.build();
final long firstTimestamp = 0L;
input.add("key", firstDeadLetter, firstTimestamp);
Expand Down Expand Up @@ -300,6 +303,7 @@ void shouldProduceDeadLetterAndAnalyze() {
.setStackTrace(null)
.build())
.setDescription("description")
.setInputTimestamp(Instant.ofEpochMilli(200L))
.build();

final TestOutput<String, FullDeadLetterWithContext> processedDeadLetters =
Expand Down Expand Up @@ -388,7 +392,9 @@ void shouldProcessConnectErrors() {
.setPartition(1)
.setTopic("my-topic")
.setOffset(10L)
.setInputTimestamp(Instant.ofEpochMilli(firstTimestamp))
.build();

this.softly.assertThat(seq(processedDeadLetters).toList())
.hasSize(1)
.anySatisfy(record -> {
Expand Down Expand Up @@ -454,7 +460,8 @@ void shouldProcessStreamsHeaderErrors() {
.add(EXCEPTION_CLASS_NAME, toBytes("org.apache.kafka.connect.errors.DataException"))
.add(EXCEPTION_MESSAGE, toBytes("my message"))
.add(EXCEPTION_STACK_TRACE, toBytes(StackTraceClassifierTest.STACK_TRACE));
input.add("key", "value", 0L, headers);

input.add("key", "value", firstTimestamp, headers);

final DeadLetter expectedDeadLetter = DeadLetter.newBuilder()
.setInputValue("value")
Expand All @@ -467,7 +474,9 @@ void shouldProcessStreamsHeaderErrors() {
.setPartition(1)
.setTopic("my-topic")
.setOffset(10L)
.setInputTimestamp(Instant.ofEpochMilli(firstTimestamp))
.build();

this.softly.assertThat(seq(processedDeadLetters).toList())
.hasSize(1)
.anySatisfy(record -> {
Expand Down Expand Up @@ -531,6 +540,7 @@ void shouldReadAvroKey() {
.setStackTrace(StackTraceClassifierTest.STACK_TRACE)
.build())
.setDescription("description")
.setInputTimestamp(Instant.ofEpochMilli(200L))
.build();
input.add(TestRecord.newBuilder().setId(1).build(), firstDeadLetter);
this.softly.assertThat(seq(processedDeadLetters).toList())
Expand Down
16 changes: 10 additions & 6 deletions src/test/java/com/bakdata/kafka/StreamsDeadLetterParserTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import static com.bakdata.kafka.ErrorHeaderProcessor.TOPIC;
import static com.bakdata.kafka.StreamsDeadLetterParser.FAULTY_OFFSET_HEADER;

import java.time.Instant;
import java.util.stream.Stream;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeaders;
Expand Down Expand Up @@ -152,7 +153,8 @@ void shouldConvert() {
.add(EXCEPTION_CLASS_NAME, toBytes("org.apache.kafka.connect.errors.DataException"))
.add(EXCEPTION_MESSAGE, toBytes("my message"))
.add(EXCEPTION_STACK_TRACE, toBytes(StackTraceClassifierTest.STACK_TRACE));
this.softly.assertThat(new StreamsDeadLetterParser().convert("foo", headers))

this.softly.assertThat(new StreamsDeadLetterParser().convert("foo", headers, 200L))
.satisfies(deadLetter -> {
this.softly.assertThat(deadLetter.getInputValue()).hasValue("foo");
this.softly.assertThat(deadLetter.getPartition()).hasValue(1);
Expand All @@ -164,6 +166,7 @@ void shouldConvert() {
this.softly.assertThat(cause.getMessage()).hasValue("my message");
this.softly.assertThat(cause.getStackTrace()).hasValue(StackTraceClassifierTest.STACK_TRACE);
this.softly.assertThat(deadLetter.getDescription()).isEqualTo("description");
this.softly.assertThat(deadLetter.getInputTimestamp()).hasValue(Instant.ofEpochMilli(200L));
});
}

Expand All @@ -172,7 +175,7 @@ void shouldConvertFaultyOffsetHeader() {
final Headers headers = generateDefaultHeaders()
.remove(OFFSET)
.add(FAULTY_OFFSET_HEADER, toBytes(100L));
this.softly.assertThat(new StreamsDeadLetterParser().convert("foo", headers))
this.softly.assertThat(new StreamsDeadLetterParser().convert("foo", headers, 0))
.satisfies(deadLetter -> this.softly.assertThat(deadLetter.getOffset()).hasValue(100L));
}

Expand All @@ -181,30 +184,31 @@ void shouldIgnoreFaultyOffsetHeader() {
final Headers headers = generateDefaultHeaders()
.add(OFFSET, toBytes(10L))
.add(FAULTY_OFFSET_HEADER, toBytes(100L));
this.softly.assertThat(new StreamsDeadLetterParser().convert("foo", headers))
this.softly.assertThat(new StreamsDeadLetterParser().convert("foo", headers, 0))
.satisfies(deadLetter -> this.softly.assertThat(deadLetter.getOffset()).hasValue(10L));
}

@Test
void shouldConvertNullMessageHeader() {
final Headers headers = generateDefaultHeaders()
.add(EXCEPTION_MESSAGE, null);
this.softly.assertThat(new StreamsDeadLetterParser().convert("foo", headers))
this.softly.assertThat(new StreamsDeadLetterParser().convert("foo", headers, 0))
.satisfies(deadLetter -> this.softly.assertThat(deadLetter.getCause().getMessage()).isNotPresent());
}


@ParameterizedTest
@MethodSource("generateMissingRequiredHeaders")
void shouldThrowWithMissingRequiredHeaders(final Headers headers, final String message) {
this.softly.assertThatThrownBy(() -> new StreamsDeadLetterParser().convert("foo", headers))
this.softly.assertThatThrownBy(() -> new StreamsDeadLetterParser().convert("foo", headers, 0))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(message);
}

@ParameterizedTest
@MethodSource("generateNonNullableHeaders")
void shouldThrowWithNonNullableHeaders(final Headers headers, final String message) {
this.softly.assertThatThrownBy(() -> new StreamsDeadLetterParser().convert("foo", headers))
this.softly.assertThatThrownBy(() -> new StreamsDeadLetterParser().convert("foo", headers, 0))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage(message);
}
Expand Down
Loading