Skip to content

Commit

Permalink
Fix BLOB reading and writing.
Browse files Browse the repository at this point in the history
We now correctly consider ByteBuffer, Clob, and Blob types as additional types to read and write blob data.

Closes #1408
  • Loading branch information
mp911de committed Jan 10, 2023
1 parent 1ff403e commit d6e00a8
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
*/
package org.springframework.data.r2dbc.convert;

import io.r2dbc.spi.Blob;
import io.r2dbc.spi.Clob;
import io.r2dbc.spi.ColumnMetadata;
import io.r2dbc.spi.Row;
import io.r2dbc.spi.RowMetadata;
Expand Down Expand Up @@ -158,7 +160,14 @@ private Object readFrom(Row row, @Nullable RowMetadata metadata, RelationalPersi

Object value = null;
if (metadata == null || RowMetadataUtils.containsColumn(metadata, identifier)) {
value = row.get(identifier);

if (property.getType().equals(Clob.class)) {
value = row.get(identifier, Clob.class);
} else if (property.getType().equals(Blob.class)) {
value = row.get(identifier, Blob.class);
} else {
value = row.get(identifier);
}
}

if (value == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,13 @@
*/
package org.springframework.data.r2dbc.mapping;

import io.r2dbc.spi.Blob;
import io.r2dbc.spi.Clob;
import io.r2dbc.spi.Row;

import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
Expand All @@ -37,8 +40,9 @@ public class R2dbcSimpleTypeHolder extends SimpleTypeHolder {
/**
* Set of R2DBC simple types.
*/
public static final Set<Class<?>> R2DBC_SIMPLE_TYPES = Collections.unmodifiableSet(
new HashSet<>(Arrays.asList(OutboundRow.class, Row.class, BigInteger.class, BigDecimal.class, UUID.class)));
public static final Set<Class<?>> R2DBC_SIMPLE_TYPES = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList(OutboundRow.class, Row.class, BigInteger.class, BigDecimal.class,
UUID.class, Blob.class, Clob.class, ByteBuffer.class)));

public static final SimpleTypeHolder HOLDER = new R2dbcSimpleTypeHolder();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,12 @@
*/
package org.springframework.data.r2dbc.core;

import static io.netty.buffer.ByteBufUtil.*;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.relational.core.query.Criteria.*;

import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.Unpooled;
import io.r2dbc.postgresql.PostgresqlConnectionConfiguration;
import io.r2dbc.postgresql.PostgresqlConnectionFactory;
import io.r2dbc.postgresql.codec.Box;
Expand All @@ -30,21 +33,26 @@
import io.r2dbc.postgresql.codec.Point;
import io.r2dbc.postgresql.codec.Polygon;
import io.r2dbc.postgresql.extension.CodecRegistrar;
import io.r2dbc.spi.Blob;
import io.r2dbc.spi.ConnectionFactory;
import lombok.AllArgsConstructor;
import lombok.Data;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;

import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CompletableFuture;

import javax.sql.DataSource;

import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;

import org.springframework.dao.DataAccessException;
import org.springframework.data.annotation.Id;
import org.springframework.data.r2dbc.convert.EnumWriteSupport;
Expand Down Expand Up @@ -81,6 +89,13 @@ void before() {
+ "primitive_array INT[]," //
+ "multidimensional_array INT[]," //
+ "collection_array INT[][])");

template.execute("DROP TABLE IF EXISTS with_blobs");
template.execute("CREATE TABLE with_blobs (" //
+ "id serial PRIMARY KEY," //
+ "byte_array bytea," //
+ "byte_buffer bytea," //
+ "byte_blob bytea)");
}

@Test // gh-411
Expand Down Expand Up @@ -198,9 +213,9 @@ void shouldReadAndWriteInterval() {

template.execute("DROP TABLE IF EXISTS with_interval");
template.execute("CREATE TABLE with_interval (" //
+ "id serial PRIMARY KEY," //
+ "interval INTERVAL" //
+ ")");
+ "id serial PRIMARY KEY," //
+ "interval INTERVAL" //
+ ")");

R2dbcEntityTemplate template = new R2dbcEntityTemplate(client,
new DefaultReactiveDataAccessStrategy(PostgresDialect.INSTANCE));
Expand All @@ -213,6 +228,62 @@ void shouldReadAndWriteInterval() {
}).verifyComplete();
}

@Test // gh-1408
void shouldReadAndWriteBlobs() {

R2dbcEntityTemplate template = new R2dbcEntityTemplate(client,
new DefaultReactiveDataAccessStrategy(PostgresDialect.INSTANCE));

WithBlobs withBlobs = new WithBlobs();
byte[] content = "123ä".getBytes(StandardCharsets.UTF_8);

withBlobs.byteArray = content;
withBlobs.byteBuffer = ByteBuffer.wrap(content);
withBlobs.byteBlob = Blob.from(Mono.just(ByteBuffer.wrap(content)));

template.insert(withBlobs) //
.as(StepVerifier::create) //
.expectNextCount(1) //
.verifyComplete();

template.selectOne(Query.empty(), WithBlobs.class) //
.flatMap(it -> {
return Flux.from(it.byteBlob.stream()).last().map(blob -> {
it.byteBlob = Blob.from(Mono.just(blob));
return it;
});
}).as(StepVerifier::create) //
.consumeNextWith(actual -> {

CompletableFuture<byte[]> cf = Mono.from(actual.byteBlob.stream()).map(Unpooled::wrappedBuffer)
.map(ByteBufUtil::getBytes).toFuture();
assertThat(actual.getByteArray()).isEqualTo(content);
assertThat(getBytes(Unpooled.wrappedBuffer(actual.getByteBuffer()))).isEqualTo(content);
assertThat(cf.join()).isEqualTo(content);
}).verifyComplete();

template.selectOne(Query.empty(), WithBlobs.class)
.doOnNext(it -> it.byteArray = "foo".getBytes(StandardCharsets.UTF_8)).flatMap(template::update) //
.as(StepVerifier::create) //
.expectNextCount(1).verifyComplete();

template.selectOne(Query.empty(), WithBlobs.class) //
.flatMap(it -> {
return Flux.from(it.byteBlob.stream()).last().map(blob -> {
it.byteBlob = Blob.from(Mono.just(blob));
return it;
});
}).as(StepVerifier::create) //
.consumeNextWith(actual -> {

CompletableFuture<byte[]> cf = Mono.from(actual.byteBlob.stream()).map(Unpooled::wrappedBuffer)
.map(ByteBufUtil::getBytes).toFuture();
assertThat(actual.getByteArray()).isEqualTo("foo".getBytes(StandardCharsets.UTF_8));
assertThat(getBytes(Unpooled.wrappedBuffer(actual.getByteBuffer()))).isEqualTo(content);
assertThat(cf.join()).isEqualTo(content);
}).verifyComplete();
}

@Data
@AllArgsConstructor
static class EntityWithEnum {
Expand Down Expand Up @@ -260,4 +331,16 @@ static class EntityWithInterval {

}

@Data
@Table("with_blobs")
static class WithBlobs {

@Id Integer id;

byte[] byteArray;
ByteBuffer byteBuffer;
Blob byteBlob;

}

}

0 comments on commit d6e00a8

Please sign in to comment.