Skip to content

Commit ddc8d74

Browse files
authored
For Oracle, change the data type for BLOB column to support storing up to 2GB (#3070)
1 parent 367eb2e commit ddc8d74

28 files changed

+798
-139
lines changed

core/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -251,6 +251,7 @@ task integrationTestJdbc(type: Test) {
251251
options {
252252
systemProperties(System.getProperties().findAll{it.key.toString().startsWith("scalardb")})
253253
}
254+
maxHeapSize = "4g"
254255
}
255256

256257
task integrationTestMultiStorage(type: Test) {

core/src/integration-test/java/com/scalar/db/storage/jdbc/ConsensusCommitAdminIntegrationTestWithJdbcDatabase.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -497,6 +497,6 @@ public void alterColumnType_Sqlite_AlterColumnType_ShouldThrowUnsupportedOperati
497497

498498
@Override
499499
protected boolean isIndexOnBlobColumnSupported() {
500-
return !JdbcTestUtils.isDb2(rdbEngine);
500+
return !(JdbcTestUtils.isDb2(rdbEngine) || JdbcTestUtils.isOracle(rdbEngine));
501501
}
502502
}

core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminCaseSensitivityIntegrationTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -611,6 +611,6 @@ public void alterColumnType_Sqlite_AlterColumnType_ShouldThrowUnsupportedOperati
611611

612612
@Override
613613
protected boolean isIndexOnBlobColumnSupported() {
614-
return !JdbcTestUtils.isDb2(rdbEngine);
614+
return !(JdbcTestUtils.isDb2(rdbEngine) || JdbcTestUtils.isOracle(rdbEngine));
615615
}
616616
}

core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminImportTestUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -514,7 +514,7 @@ private LinkedHashMap<String, String> prepareColumnsForDb2() {
514514
columns.put("col17", "NCLOB(512)");
515515
columns.put("col18", "BINARY(5)");
516516
columns.put("col19", "VARBINARY(512)");
517-
columns.put("col20", "BLOB(1024)");
517+
columns.put("col20", "BLOB(2G)");
518518
columns.put("col21", "CHAR(5) FOR BIT DATA");
519519
columns.put("col22", "VARCHAR(512) FOR BIT DATA");
520520
columns.put("col23", "DATE");

core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcAdminIntegrationTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -611,6 +611,6 @@ public void alterColumnType_Sqlite_AlterColumnType_ShouldThrowUnsupportedOperati
611611

612612
@Override
613613
protected boolean isIndexOnBlobColumnSupported() {
614-
return !JdbcTestUtils.isDb2(rdbEngine);
614+
return !(JdbcTestUtils.isDb2(rdbEngine) || JdbcTestUtils.isOracle(rdbEngine));
615615
}
616616
}

core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcDatabaseColumnValueIntegrationTest.java

Lines changed: 211 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,41 @@
11
package com.scalar.db.storage.jdbc;
22

3+
import static org.assertj.core.api.Assertions.assertThat;
4+
35
import com.scalar.db.api.DistributedStorageColumnValueIntegrationTestBase;
6+
import com.scalar.db.api.Get;
7+
import com.scalar.db.api.Put;
8+
import com.scalar.db.api.PutBuilder;
9+
import com.scalar.db.api.Result;
10+
import com.scalar.db.api.TableMetadata;
411
import com.scalar.db.config.DatabaseConfig;
12+
import com.scalar.db.exception.storage.ExecutionException;
13+
import com.scalar.db.io.BigIntColumn;
14+
import com.scalar.db.io.BlobColumn;
15+
import com.scalar.db.io.BooleanColumn;
516
import com.scalar.db.io.Column;
617
import com.scalar.db.io.DataType;
18+
import com.scalar.db.io.DateColumn;
19+
import com.scalar.db.io.DoubleColumn;
20+
import com.scalar.db.io.FloatColumn;
21+
import com.scalar.db.io.IntColumn;
22+
import com.scalar.db.io.Key;
23+
import com.scalar.db.io.TextColumn;
24+
import com.scalar.db.io.TimeColumn;
25+
import com.scalar.db.io.TimestampColumn;
26+
import com.scalar.db.io.TimestampTZColumn;
727
import com.scalar.db.util.TestUtils;
28+
import java.util.ArrayList;
29+
import java.util.List;
30+
import java.util.Optional;
831
import java.util.Properties;
932
import java.util.Random;
33+
import java.util.stream.Stream;
34+
import org.junit.jupiter.api.Test;
35+
import org.junit.jupiter.api.condition.EnabledIf;
36+
import org.junit.jupiter.params.ParameterizedTest;
37+
import org.junit.jupiter.params.provider.Arguments;
38+
import org.junit.jupiter.params.provider.MethodSource;
1039

1140
public class JdbcDatabaseColumnValueIntegrationTest
1241
extends DistributedStorageColumnValueIntegrationTestBase {
@@ -68,4 +97,186 @@ protected Column<?> getColumnWithMaxValue(String columnName, DataType dataType)
6897
}
6998
return super.getColumnWithMaxValue(columnName, dataType);
7099
}
100+
// TODO: Expand this test to cover all supported storages, not just Oracle/DB2.
101+
// This test verifies that large BLOB data can be inserted and retrieved correctly.
102+
// Currently, it is limited to Oracle and DB2 due to known differences in BLOB handling,
103+
// potential resource constraints, or lack of support for large BLOBs in other engines.
104+
// Before enabling for other storages, investigate their BLOB size limits and behavior,
105+
// and ensure the test does not cause failures or excessive resource usage.
106+
@EnabledIf("isDb2OrOracle")
107+
@ParameterizedTest()
108+
@MethodSource("provideBlobSizes")
109+
public void put_largeBlobData_ShouldWorkCorrectly(int blobSize, String humanReadableBlobSize)
110+
throws ExecutionException {
111+
String tableName = TABLE + "_large_single_blob";
112+
try {
113+
// Arrange
114+
TableMetadata.Builder metadata =
115+
TableMetadata.newBuilder()
116+
.addColumn(COL_NAME1, DataType.INT)
117+
.addColumn(COL_NAME2, DataType.BLOB)
118+
.addPartitionKey(COL_NAME1);
119+
120+
admin.createTable(namespace, tableName, metadata.build(), true, getCreationOptions());
121+
byte[] blobData = createLargeBlob(blobSize);
122+
Put put =
123+
Put.newBuilder()
124+
.namespace(namespace)
125+
.table(tableName)
126+
.partitionKey(Key.ofInt(COL_NAME1, 1))
127+
.blobValue(COL_NAME2, blobData)
128+
.build();
129+
130+
// Act
131+
storage.put(put);
132+
133+
// Assert
134+
Optional<Result> optionalResult =
135+
storage.get(
136+
Get.newBuilder()
137+
.namespace(namespace)
138+
.table(tableName)
139+
.partitionKey(Key.ofInt(COL_NAME1, 1))
140+
.build());
141+
assertThat(optionalResult).isPresent();
142+
Result result = optionalResult.get();
143+
assertThat(result.getColumns().get(COL_NAME2).getBlobValueAsBytes()).isEqualTo(blobData);
144+
} finally {
145+
admin.dropTable(namespace, tableName, true);
146+
}
147+
}
148+
149+
Stream<Arguments> provideBlobSizes() {
150+
List<Arguments> args = new ArrayList<>();
151+
if (isOracle()) {
152+
// As explained in
153+
// `com.scalar.db.storage.jdbc.RdbEngineOracle.bindBlobColumnToPreparedStatement()`,
154+
// handing a BLOB size bigger than 32,766 bytes requires a workaround so we particularly test
155+
// values around it.
156+
args.add(Arguments.of(32_766, "32.766 KB"));
157+
args.add(Arguments.of(32_767, "32.767 KB"));
158+
}
159+
args.add(Arguments.of(100_000_000, "100 MB"));
160+
return args.stream();
161+
}
162+
163+
@EnabledIf("isOracle")
164+
@Test
165+
public void put_largeBlobData_WithMultipleBlobColumnsShouldWorkCorrectly()
166+
throws ExecutionException {
167+
String tableName = TABLE + "_large_multiples_blob";
168+
try {
169+
// Arrange
170+
TableMetadata.Builder metadata =
171+
TableMetadata.newBuilder()
172+
.addColumn(COL_NAME1, DataType.INT)
173+
.addColumn(COL_NAME2, DataType.BLOB)
174+
.addColumn(COL_NAME3, DataType.BLOB)
175+
.addPartitionKey(COL_NAME1);
176+
177+
admin.createTable(namespace, tableName, metadata.build(), true, getCreationOptions());
178+
byte[] blobDataCol2 = createLargeBlob(32_766);
179+
byte[] blobDataCol3 = createLargeBlob(5000);
180+
Put put =
181+
Put.newBuilder()
182+
.namespace(namespace)
183+
.table(tableName)
184+
.partitionKey(Key.ofInt(COL_NAME1, 1))
185+
.blobValue(COL_NAME2, blobDataCol2)
186+
.blobValue(COL_NAME3, blobDataCol3)
187+
.build();
188+
189+
// Act
190+
storage.put(put);
191+
192+
// Assert
193+
Optional<Result> optionalResult =
194+
storage.get(
195+
Get.newBuilder()
196+
.namespace(namespace)
197+
.table(tableName)
198+
.partitionKey(Key.ofInt(COL_NAME1, 1))
199+
.build());
200+
assertThat(optionalResult).isPresent();
201+
Result result = optionalResult.get();
202+
assertThat(result.getColumns().get(COL_NAME2).getBlobValueAsBytes()).isEqualTo(blobDataCol2);
203+
assertThat(result.getColumns().get(COL_NAME3).getBlobValueAsBytes()).isEqualTo(blobDataCol3);
204+
} finally {
205+
admin.dropTable(namespace, tableName, true);
206+
}
207+
}
208+
209+
@EnabledIf("isOracle")
210+
@Test
211+
public void put_largeBlobData_WithAllColumnsTypesShouldWorkCorrectly() throws ExecutionException {
212+
// Arrange
213+
IntColumn partitionKeyValue = (IntColumn) getColumnWithMaxValue(PARTITION_KEY, DataType.INT);
214+
BooleanColumn col1Value = (BooleanColumn) getColumnWithMaxValue(COL_NAME1, DataType.BOOLEAN);
215+
IntColumn col2Value = (IntColumn) getColumnWithMaxValue(COL_NAME2, DataType.INT);
216+
BigIntColumn col3Value = (BigIntColumn) getColumnWithMaxValue(COL_NAME3, DataType.BIGINT);
217+
FloatColumn col4Value = (FloatColumn) getColumnWithMaxValue(COL_NAME4, DataType.FLOAT);
218+
DoubleColumn col5Value = (DoubleColumn) getColumnWithMaxValue(COL_NAME5, DataType.DOUBLE);
219+
TextColumn col6Value = (TextColumn) getColumnWithMaxValue(COL_NAME6, DataType.TEXT);
220+
BlobColumn col7Value = BlobColumn.of(COL_NAME7, createLargeBlob(32_766));
221+
DateColumn col8Value = (DateColumn) getColumnWithMaxValue(COL_NAME8, DataType.DATE);
222+
TimeColumn col9Value = (TimeColumn) getColumnWithMaxValue(COL_NAME9, DataType.TIME);
223+
TimestampTZColumn col10Value =
224+
(TimestampTZColumn) getColumnWithMaxValue(COL_NAME10, DataType.TIMESTAMPTZ);
225+
TimestampColumn column11Value = null;
226+
if (isTimestampTypeSupported()) {
227+
column11Value = (TimestampColumn) getColumnWithMaxValue(COL_NAME11, DataType.TIMESTAMP);
228+
}
229+
230+
PutBuilder.Buildable put =
231+
Put.newBuilder()
232+
.namespace(namespace)
233+
.table(TABLE)
234+
.partitionKey(Key.newBuilder().add(partitionKeyValue).build())
235+
.value(col1Value)
236+
.value(col2Value)
237+
.value(col3Value)
238+
.value(col4Value)
239+
.value(col5Value)
240+
.value(col6Value)
241+
.value(col7Value)
242+
.value(col8Value)
243+
.value(col9Value)
244+
.value(col10Value);
245+
if (isTimestampTypeSupported()) {
246+
put.value(column11Value);
247+
}
248+
// Act
249+
storage.put(put.build());
250+
251+
// Assert
252+
assertResult(
253+
partitionKeyValue,
254+
col1Value,
255+
col2Value,
256+
col3Value,
257+
col4Value,
258+
col5Value,
259+
col6Value,
260+
col7Value,
261+
col8Value,
262+
col9Value,
263+
col10Value,
264+
column11Value);
265+
}
266+
267+
private byte[] createLargeBlob(int size) {
268+
byte[] blob = new byte[size];
269+
random.nextBytes(blob);
270+
return blob;
271+
}
272+
273+
@SuppressWarnings("unused")
274+
private boolean isDb2OrOracle() {
275+
return JdbcEnv.isOracle() || JdbcEnv.isDb2();
276+
}
277+
278+
@SuppressWarnings("unused")
279+
private boolean isOracle() {
280+
return JdbcEnv.isOracle();
281+
}
71282
}

core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcDatabaseConditionalMutationIntegrationTest.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,4 +42,9 @@ protected Column<?> getColumnWithRandomValue(
4242
}
4343
return super.getColumnWithRandomValue(random, columnName, dataType);
4444
}
45+
46+
@Override
47+
protected boolean isConditionOnBlobColumnSupported() {
48+
return !JdbcTestUtils.isOracle(rdbEngine);
49+
}
4550
}

core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcDatabaseCrossPartitionScanIntegrationTest.java

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,11 @@ protected Stream<Arguments> provideColumnsForCNFConditionsTest() {
8383

8484
@Override
8585
protected boolean isOrderingOnBlobColumnSupported() {
86-
return !JdbcTestUtils.isDb2(rdbEngine);
86+
return !(JdbcTestUtils.isDb2(rdbEngine) || JdbcTestUtils.isOracle(rdbEngine));
87+
}
88+
89+
@Override
90+
protected boolean isConditionOnBlobColumnSupported() {
91+
return !JdbcTestUtils.isOracle(rdbEngine);
8792
}
8893
}

core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcDatabaseMultipleClusteringKeyScanIntegrationTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ protected List<DataType> getDataTypes() {
100100
rdbEngine,
101101
ImmutableMap.of(
102102
RdbEngineOracle.class,
103-
ImmutableList.of(DataType.TIMESTAMPTZ),
103+
ImmutableList.of(DataType.TIMESTAMPTZ, DataType.BLOB),
104104
RdbEngineDb2.class,
105105
ImmutableList.of(DataType.BLOB)));
106106
}

core/src/integration-test/java/com/scalar/db/storage/jdbc/JdbcDatabaseMultiplePartitionKeyIntegrationTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ protected List<DataType> getDataTypes() {
9494
rdbEngine,
9595
ImmutableMap.of(
9696
RdbEngineOracle.class,
97-
ImmutableList.of(DataType.TIMESTAMPTZ),
97+
ImmutableList.of(DataType.TIMESTAMPTZ, DataType.BLOB),
9898
RdbEngineYugabyte.class,
9999
ImmutableList.of(DataType.FLOAT, DataType.DOUBLE),
100100
RdbEngineDb2.class,

0 commit comments

Comments
 (0)