diff --git a/core/src/main/java/com/scalar/db/common/error/CoreError.java b/core/src/main/java/com/scalar/db/common/error/CoreError.java index 29d8c35c94..582e23994f 100644 --- a/core/src/main/java/com/scalar/db/common/error/CoreError.java +++ b/core/src/main/java/com/scalar/db/common/error/CoreError.java @@ -610,6 +610,16 @@ public enum CoreError implements ScalarDbError { "Invalid file extension: %s. Allowed extensions are: %s", "", ""), + DATA_LOADER_INVALID_COLUMN_KEY_PARSING_FAILED( + Category.USER_ERROR, "0136", "Invalid key: Column %s does not exist in the table.", "", ""), + DATA_LOADER_INVALID_VALUE_KEY_PARSING_FAILED( + Category.USER_ERROR, "0137", "Parsing of key value %s failed. Details:%s.", "", ""), + DATA_LOADER_INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE( + Category.USER_ERROR, "0138", "Invalid base64 encoding for blob value for column %s", "", ""), + DATA_LOADER_INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE( + Category.USER_ERROR, "0139", "Invalid number specified for column %s", "", ""), + DATA_LOADER_MISSING_NAMESPACE_OR_TABLE( + Category.USER_ERROR, "0140", "Missing namespace or table: %s, %s.", "", ""), // // Errors for the concurrency error category diff --git a/data-loader/build.gradle b/data-loader/build.gradle index 1e09d80d15..dc73d45023 100644 --- a/data-loader/build.gradle +++ b/data-loader/build.gradle @@ -18,5 +18,10 @@ subprojects { // Apache Commons implementation("org.apache.commons:commons-lang3:${apacheCommonsLangVersion}") implementation("commons-io:commons-io:${apacheCommonsIoVersion}") + + // Mockito + testImplementation "org.mockito:mockito-core:${mockitoVersion}" + testImplementation "org.mockito:mockito-inline:${mockitoVersion}" + testImplementation "org.mockito:mockito-junit-jupiter:${mockitoVersion}" } } diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/ColumnKeyValueConverter.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/ColumnKeyValueConverter.java index d0e49f9df3..9022e4cf0c 100644 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/ColumnKeyValueConverter.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/ColumnKeyValueConverter.java @@ -1,5 +1,6 @@ package com.scalar.db.dataloader.cli.command; +import com.scalar.db.dataloader.core.ColumnKeyValue; import picocli.CommandLine; /** diff --git a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java index 873fad3c22..1dafa4a05c 100644 --- a/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java +++ b/data-loader/cli/src/main/java/com/scalar/db/dataloader/cli/command/dataexport/ExportCommand.java @@ -1,10 +1,15 @@ package com.scalar.db.dataloader.cli.command.dataexport; +import com.scalar.db.api.TableMetadata; import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.cli.exception.DirectoryValidationException; import com.scalar.db.dataloader.cli.exception.InvalidFileExtensionException; import com.scalar.db.dataloader.cli.util.DirectoryUtils; +import com.scalar.db.dataloader.core.tablemetadata.TableMetadataService; +import com.scalar.db.dataloader.core.util.KeyUtils; +import com.scalar.db.service.StorageFactory; import java.io.File; +import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; @@ -25,6 +30,13 @@ public class ExportCommand extends ExportCommandOptions implements Callable getTableMetadata(Collection requests) + throws TableMetadataException { + Map metadataMap = new HashMap<>(); + + for (TableMetadataRequest request : requests) { + String namespace = request.getNamespace(); + String tableName = request.getTableName(); + TableMetadata tableMetadata = getTableMetadata(namespace, tableName); + String key = TableMetadataUtils.getTableLookupKey(namespace, tableName); + metadataMap.put(key, tableMetadata); + } + + return metadataMap; + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/ColumnUtils.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/ColumnUtils.java new file mode 100644 index 0000000000..a27702ae84 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/ColumnUtils.java @@ -0,0 +1,74 @@ +package com.scalar.db.dataloader.core.util; + +import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.exception.Base64Exception; +import com.scalar.db.io.BigIntColumn; +import com.scalar.db.io.BlobColumn; +import com.scalar.db.io.BooleanColumn; +import com.scalar.db.io.Column; +import com.scalar.db.io.DataType; +import com.scalar.db.io.DoubleColumn; +import com.scalar.db.io.FloatColumn; +import com.scalar.db.io.IntColumn; +import com.scalar.db.io.TextColumn; +import java.util.Base64; + +/** Utility class for dealing and creating ScalarDB Columns */ +public final class ColumnUtils { + private ColumnUtils() { + // restrict instantiation + } + + /** + * Create a ScalarDB column from the given data type, column name, and value. Blob source values + * need to be base64 encoded. + * + * @param dataType Data type of the specified column + * @param columnName ScalarDB table column name + * @param value Value for the ScalarDB column + * @return ScalarDB column + * @throws Base64Exception if an error occurs while base64 decoding + */ + public static Column createColumnFromValue(DataType dataType, String columnName, String value) + throws Base64Exception { + try { + switch (dataType) { + case BOOLEAN: + return value != null + ? BooleanColumn.of(columnName, Boolean.parseBoolean(value)) + : BooleanColumn.ofNull(columnName); + case INT: + return value != null + ? IntColumn.of(columnName, Integer.parseInt(value)) + : IntColumn.ofNull(columnName); + case BIGINT: + return value != null + ? BigIntColumn.of(columnName, Long.parseLong(value)) + : BigIntColumn.ofNull(columnName); + case FLOAT: + return value != null + ? FloatColumn.of(columnName, Float.parseFloat(value)) + : FloatColumn.ofNull(columnName); + case DOUBLE: + return value != null + ? DoubleColumn.of(columnName, Double.parseDouble(value)) + : DoubleColumn.ofNull(columnName); + case TEXT: + return value != null ? TextColumn.of(columnName, value) : TextColumn.ofNull(columnName); + case BLOB: + // Source blob values need to be base64 encoded + return value != null + ? BlobColumn.of(columnName, Base64.getDecoder().decode(value)) + : BlobColumn.ofNull(columnName); + default: + throw new AssertionError(); + } + } catch (NumberFormatException e) { + throw new NumberFormatException( + CoreError.DATA_LOADER_INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE.buildMessage(columnName)); + } catch (IllegalArgumentException e) { + throw new Base64Exception( + CoreError.DATA_LOADER_INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE.buildMessage(columnName)); + } + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/KeyUtils.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/KeyUtils.java new file mode 100644 index 0000000000..e13b24e15e --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/KeyUtils.java @@ -0,0 +1,63 @@ +package com.scalar.db.dataloader.core.util; + +import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.ColumnKeyValue; +import com.scalar.db.dataloader.core.exception.Base64Exception; +import com.scalar.db.dataloader.core.exception.KeyParsingException; +import com.scalar.db.io.Column; +import com.scalar.db.io.DataType; +import com.scalar.db.io.Key; +import javax.annotation.Nullable; + +/** Utility class for creating and dealing with ScalarDB keys. */ +public final class KeyUtils { + + private KeyUtils() { + // restrict instantiation + } + + /** + * Convert a keyValue, in the format of =, to a ScalarDB Key instance. + * + * @param columnKeyValue A key value in the format of = + * @param tableMetadata Metadata for one ScalarDB table + * @return A new ScalarDB Key instance formatted by data type + * @throws KeyParsingException if there is an error parsing the key value + */ + public static Key parseKeyValue( + @Nullable ColumnKeyValue columnKeyValue, TableMetadata tableMetadata) + throws KeyParsingException { + if (columnKeyValue == null) { + return null; + } + String columnName = columnKeyValue.getColumnName(); + DataType columnDataType = tableMetadata.getColumnDataType(columnName); + if (columnDataType == null) { + throw new KeyParsingException( + CoreError.DATA_LOADER_INVALID_COLUMN_KEY_PARSING_FAILED.buildMessage(columnName)); + } + try { + return createKey(columnDataType, columnName, columnKeyValue.getColumnValue()); + } catch (Base64Exception e) { + throw new KeyParsingException( + CoreError.DATA_LOADER_INVALID_VALUE_KEY_PARSING_FAILED.buildMessage( + columnKeyValue.getColumnValue(), e.getMessage())); + } + } + + /** + * Create a ScalarDB key based on the provided data type, column name, and value. + * + * @param dataType Data type of the specified column + * @param columnName ScalarDB table column name + * @param value Value for ScalarDB key + * @return ScalarDB Key instance + * @throws Base64Exception if there is an error creating the key value + */ + public static Key createKey(DataType dataType, String columnName, String value) + throws Base64Exception { + Column keyValue = ColumnUtils.createColumnFromValue(dataType, columnName, value); + return Key.newBuilder().add(keyValue).build(); + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtils.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtils.java new file mode 100644 index 0000000000..907104312c --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/util/TableMetadataUtils.java @@ -0,0 +1,151 @@ +package com.scalar.db.dataloader.core.util; + +import com.scalar.db.api.TableMetadata; +import com.scalar.db.io.DataType; +import com.scalar.db.transaction.consensuscommit.Attribute; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** Utils for ScalarDB table metadata */ +public final class TableMetadataUtils { + + /** Format for the lookup key for a table in a namespace */ + public static final String TABLE_LOOKUP_KEY_FORMAT = "%s.%s"; + + /** Private constructor to prevent instantiation */ + private TableMetadataUtils() {} + + /** + * Check if the field is a ScalarDB transaction metadata column or not + * + * @param columnName Table column name + * @param metadataColumns Fixed list of metadata columns + * @param columnNames List of all column names in a table + * @return The field is metadata or not + */ + public static boolean isMetadataColumn( + String columnName, Set metadataColumns, Set columnNames) { + // Skip field if it can be ignored + if (metadataColumns.contains(columnName)) { + return true; + } + + // Skip if the field is a "before_" field + return columnName.startsWith(Attribute.BEFORE_PREFIX) + && !columnNames.contains(Attribute.BEFORE_PREFIX + columnName); + } + + /** + * Check if the field is a ScalarDB transaction metadata column or not + * + * @param columnName ScalarDB table column name5 + * @param tableMetadata Metadata for a single ScalarDB + * @return is the field a metadata column or not + */ + public static boolean isMetadataColumn(String columnName, TableMetadata tableMetadata) { + Set metadataColumns = getMetadataColumns(); + LinkedHashSet columnNames = tableMetadata.getColumnNames(); + + // Skip field if it can be ignored + if (metadataColumns.contains(columnName)) { + return true; + } + + // Skip if the field is a "before_" field + return columnName.startsWith(Attribute.BEFORE_PREFIX) + && !columnNames.contains(Attribute.BEFORE_PREFIX + columnName); + } + + /** + * Return a list of fixed ScalarDB transaction metadata columns + * + * @return Set of columns + */ + public static Set getMetadataColumns() { + return Stream.of( + Attribute.ID, + Attribute.STATE, + Attribute.VERSION, + Attribute.PREPARED_AT, + Attribute.COMMITTED_AT, + Attribute.BEFORE_ID, + Attribute.BEFORE_STATE, + Attribute.BEFORE_VERSION, + Attribute.BEFORE_PREPARED_AT, + Attribute.BEFORE_COMMITTED_AT) + .collect(Collectors.toCollection(HashSet::new)); + } + + /** + * Return a map with the data types for all columns in a ScalarDB table + * + * @param tableMetadata Metadata for a single ScalarDB table + * @return data types map + */ + public static Map extractColumnDataTypes(TableMetadata tableMetadata) { + Map columnDataTypes = new HashMap<>(); + tableMetadata + .getColumnNames() + .forEach( + columnName -> + columnDataTypes.computeIfAbsent(columnName, tableMetadata::getColumnDataType)); + return columnDataTypes; + } + + /** + * Return lookup key for a table in a namespace + * + * @param namespace Namespace + * @param tableName Table name + * @return Table metadata lookup key + */ + public static String getTableLookupKey(String namespace, String tableName) { + return String.format(TABLE_LOOKUP_KEY_FORMAT, namespace, tableName); + } + + /** + * Populate the projection columns with ScalarDB transaction metadata columns + * + * @param tableMetadata Metadata for a single ScalarDB table + * @param projections List of projection columns + * @return List of projection columns with metadata columns + */ + public static List populateProjectionsWithMetadata( + TableMetadata tableMetadata, List projections) { + List projectionMetadata = new ArrayList<>(); + + // Add projection columns along with metadata columns + projections.forEach( + projection -> { + projectionMetadata.add(projection); + if (!isKeyColumn(projection, tableMetadata)) { + // Add metadata column before the projection if it's not a key column + projectionMetadata.add(Attribute.BEFORE_PREFIX + projection); + } + }); + + // Add fixed metadata columns + projectionMetadata.addAll(getMetadataColumns()); + + return projectionMetadata; + } + + /** + * Checks if a column is a key column (partition key or clustering key) in the table. + * + * @param column The column name to check. + * @param tableMetadata The metadata of the ScalarDB table. + * @return True if the column is a key column, false otherwise. + */ + private static boolean isKeyColumn(String column, TableMetadata tableMetadata) { + return tableMetadata.getPartitionKeyNames().contains(column) + || tableMetadata.getClusteringKeyNames().contains(column); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataServiceTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataServiceTest.java new file mode 100644 index 0000000000..38bb09df46 --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/tablemetadata/TableMetadataServiceTest.java @@ -0,0 +1,157 @@ +package com.scalar.db.dataloader.core.tablemetadata; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +import com.scalar.db.api.DistributedStorageAdmin; +import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.error.CoreError; +import com.scalar.db.exception.storage.ExecutionException; +import com.scalar.db.service.StorageFactory; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +class TableMetadataServiceTest { + + @Mock private StorageFactory storageFactory; + @Mock private DistributedStorageAdmin storageAdmin; + + private TableMetadataService tableMetadataService; + + @BeforeEach + void setUp() { + MockitoAnnotations.openMocks(this); + tableMetadataService = new TableMetadataService(storageFactory); + when(storageFactory.getStorageAdmin()).thenReturn(storageAdmin); + } + + @Test + void getTableMetadata_WithValidNamespaceAndTable_ShouldReturnTableMetadata() + throws ExecutionException, TableMetadataException { + // Arrange + String namespace = "test_namespace"; + String tableName = "test_table"; + TableMetadata expectedMetadata = mock(TableMetadata.class); + when(storageAdmin.getTableMetadata(namespace, tableName)).thenReturn(expectedMetadata); + + // Act + TableMetadata actualMetadata = tableMetadataService.getTableMetadata(namespace, tableName); + + // Assert + assertSame(expectedMetadata, actualMetadata); + verify(storageAdmin).getTableMetadata(namespace, tableName); + } + + @Test + void getTableMetadata_WithMissingNamespaceOrTable_ShouldThrowTableMetadataException() + throws ExecutionException { + // Arrange + String namespace = "test_namespace"; + String tableName = "test_table"; + when(storageAdmin.getTableMetadata(namespace, tableName)).thenReturn(null); + + // Act & Assert + TableMetadataException exception = + assertThrows( + TableMetadataException.class, + () -> tableMetadataService.getTableMetadata(namespace, tableName)); + assertEquals( + CoreError.DATA_LOADER_MISSING_NAMESPACE_OR_TABLE.buildMessage(namespace, tableName), + exception.getMessage()); + verify(storageAdmin).getTableMetadata(namespace, tableName); + } + + @Test + void getTableMetadata_WithExecutionException_ShouldThrowTableMetadataException() + throws ExecutionException { + // Arrange + String namespace = "test_namespace"; + String tableName = "test_table"; + when(storageAdmin.getTableMetadata(namespace, tableName)) + .thenThrow(new ExecutionException("error")); + + // Act & Assert + TableMetadataException exception = + assertThrows( + TableMetadataException.class, + () -> tableMetadataService.getTableMetadata(namespace, tableName)); + assertEquals( + CoreError.DATA_LOADER_MISSING_NAMESPACE_OR_TABLE.buildMessage(namespace, tableName), + exception.getMessage()); + verify(storageAdmin).getTableMetadata(namespace, tableName); + } + + @Test + void getTableMetadata_WithValidRequests_ShouldReturnTableMetadataMap() + throws ExecutionException, TableMetadataException { + // Arrange + String namespace1 = "test_namespace1"; + String tableName1 = "test_table1"; + String namespace2 = "test_namespace2"; + String tableName2 = "test_table2"; + TableMetadataRequest request1 = new TableMetadataRequest(namespace1, tableName1); + TableMetadataRequest request2 = new TableMetadataRequest(namespace2, tableName2); + Collection requests = Arrays.asList(request1, request2); + TableMetadata expectedMetadata1 = mock(TableMetadata.class); + TableMetadata expectedMetadata2 = mock(TableMetadata.class); + when(storageAdmin.getTableMetadata(namespace1, tableName1)).thenReturn(expectedMetadata1); + when(storageAdmin.getTableMetadata(namespace2, tableName2)).thenReturn(expectedMetadata2); + + // Act + Map actualMetadataMap = tableMetadataService.getTableMetadata(requests); + + // Assert + assertEquals(2, actualMetadataMap.size()); + assertSame(expectedMetadata1, actualMetadataMap.get(namespace1 + "." + tableName1)); + assertSame(expectedMetadata2, actualMetadataMap.get(namespace2 + "." + tableName2)); + verify(storageAdmin).getTableMetadata(namespace1, tableName1); + verify(storageAdmin).getTableMetadata(namespace2, tableName2); + } + + @Test + void getTableMetadata_WithMissingNamespaceOrTableInRequests_ShouldThrowTableMetadataException() + throws ExecutionException { + // Arrange + String namespace = "test_namespace"; + String tableName = "test_table"; + TableMetadataRequest request = new TableMetadataRequest(namespace, tableName); + Collection requests = Collections.singletonList(request); + when(storageAdmin.getTableMetadata(namespace, tableName)).thenReturn(null); + + // Act & Assert + TableMetadataException exception = + assertThrows( + TableMetadataException.class, () -> tableMetadataService.getTableMetadata(requests)); + assertEquals( + CoreError.DATA_LOADER_MISSING_NAMESPACE_OR_TABLE.buildMessage(namespace, tableName), + exception.getMessage()); + verify(storageAdmin).getTableMetadata(namespace, tableName); + } + + @Test + void getTableMetadata_WithExecutionExceptionInRequests_ShouldThrowTableMetadataException() + throws ExecutionException { + // Arrange + String namespace = "test_namespace"; + String tableName = "test_table"; + TableMetadataRequest request = new TableMetadataRequest(namespace, tableName); + Collection requests = Collections.singletonList(request); + when(storageAdmin.getTableMetadata(namespace, tableName)) + .thenThrow(new ExecutionException("error")); + + // Act & Assert + TableMetadataException exception = + assertThrows( + TableMetadataException.class, () -> tableMetadataService.getTableMetadata(requests)); + assertEquals( + CoreError.DATA_LOADER_MISSING_NAMESPACE_OR_TABLE.buildMessage(namespace, tableName), + exception.getMessage()); + verify(storageAdmin).getTableMetadata(namespace, tableName); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/ColumnUtilsTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/ColumnUtilsTest.java new file mode 100644 index 0000000000..44180d82cc --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/ColumnUtilsTest.java @@ -0,0 +1,92 @@ +package com.scalar.db.dataloader.core.util; + +import static org.junit.jupiter.api.Assertions.*; + +import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.exception.Base64Exception; +import com.scalar.db.io.*; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.stream.Stream; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class ColumnUtilsTest { + + private static final float FLOAT_VALUE = 2.78f; + + private static Stream provideColumnsForCreateColumnFromValue() { + return Stream.of( + Arguments.of(DataType.BOOLEAN, "boolColumn", "true", BooleanColumn.of("boolColumn", true)), + Arguments.of(DataType.BOOLEAN, "boolColumn", null, BooleanColumn.ofNull("boolColumn")), + Arguments.of(DataType.INT, "intColumn", "42", IntColumn.of("intColumn", 42)), + Arguments.of(DataType.INT, "intColumn", null, IntColumn.ofNull("intColumn")), + Arguments.of( + DataType.BIGINT, + "bigintColumn", + "123456789012", + BigIntColumn.of("bigintColumn", 123456789012L)), + Arguments.of(DataType.BIGINT, "bigintColumn", null, BigIntColumn.ofNull("bigintColumn")), + Arguments.of( + DataType.FLOAT, + "floatColumn", + Float.toString(FLOAT_VALUE), + FloatColumn.of("floatColumn", FLOAT_VALUE)), + Arguments.of(DataType.FLOAT, "floatColumn", null, FloatColumn.ofNull("floatColumn")), + Arguments.of( + DataType.DOUBLE, + "doubleColumn", + Double.toString(Math.E), + DoubleColumn.of("doubleColumn", Math.E)), + Arguments.of(DataType.DOUBLE, "doubleColumn", null, DoubleColumn.ofNull("doubleColumn")), + Arguments.of( + DataType.TEXT, + "textColumn", + "Hello, world!", + TextColumn.of("textColumn", "Hello, world!")), + Arguments.of(DataType.TEXT, "textColumn", null, TextColumn.ofNull("textColumn")), + Arguments.of( + DataType.BLOB, + "blobColumn", + Base64.getEncoder().encodeToString("binary".getBytes(StandardCharsets.UTF_8)), + BlobColumn.of("blobColumn", "binary".getBytes(StandardCharsets.UTF_8))), + Arguments.of(DataType.BLOB, "blobColumn", null, BlobColumn.ofNull("blobColumn"))); + } + + @ParameterizedTest + @MethodSource("provideColumnsForCreateColumnFromValue") + void createColumnFromValue_validInput_returnsColumn( + DataType dataType, String columnName, String value, Column expectedColumn) + throws Base64Exception { + Column actualColumn = ColumnUtils.createColumnFromValue(dataType, columnName, value); + assertEquals(expectedColumn, actualColumn); + } + + @Test + void createColumnFromValue_invalidNumberFormat_throwsNumberFormatException() { + String columnName = "intColumn"; + String value = "not_a_number"; + NumberFormatException exception = + assertThrows( + NumberFormatException.class, + () -> ColumnUtils.createColumnFromValue(DataType.INT, columnName, value)); + assertEquals( + CoreError.DATA_LOADER_INVALID_NUMBER_FORMAT_FOR_COLUMN_VALUE.buildMessage(columnName), + exception.getMessage()); + } + + @Test + void createColumnFromValue_invalidBase64_throwsBase64Exception() { + String columnName = "blobColumn"; + String value = "invalid_base64"; + Base64Exception exception = + assertThrows( + Base64Exception.class, + () -> ColumnUtils.createColumnFromValue(DataType.BLOB, columnName, value)); + assertEquals( + CoreError.DATA_LOADER_INVALID_BASE64_ENCODING_FOR_COLUMN_VALUE.buildMessage(columnName), + exception.getMessage()); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/KeyUtilsTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/KeyUtilsTest.java new file mode 100644 index 0000000000..36e76aa472 --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/KeyUtilsTest.java @@ -0,0 +1,138 @@ +package com.scalar.db.dataloader.core.util; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.ColumnKeyValue; +import com.scalar.db.dataloader.core.exception.Base64Exception; +import com.scalar.db.dataloader.core.exception.KeyParsingException; +import com.scalar.db.io.BigIntColumn; +import com.scalar.db.io.BlobColumn; +import com.scalar.db.io.BooleanColumn; +import com.scalar.db.io.DataType; +import com.scalar.db.io.DoubleColumn; +import com.scalar.db.io.FloatColumn; +import com.scalar.db.io.IntColumn; +import com.scalar.db.io.Key; +import com.scalar.db.io.TextColumn; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class KeyUtilsTest { + + @Mock private TableMetadata tableMetadata; + + @Test + void parseKeyValue_nullKeyValue_returnsNull() throws KeyParsingException { + assertNull(KeyUtils.parseKeyValue(null, tableMetadata)); + } + + @Test + void parseKeyValue_invalidColumnName_throwsKeyParsingException() { + String columnName = "invalidColumn"; + ColumnKeyValue keyValue = new ColumnKeyValue(columnName, "value"); + when(tableMetadata.getColumnDataType(columnName)).thenReturn(null); + + KeyParsingException exception = + assertThrows( + KeyParsingException.class, () -> KeyUtils.parseKeyValue(keyValue, tableMetadata)); + assertEquals( + CoreError.DATA_LOADER_INVALID_COLUMN_KEY_PARSING_FAILED.buildMessage(columnName), + exception.getMessage()); + } + + @Test + void parseKeyValue_validKeyValue_returnsKey() throws KeyParsingException, Base64Exception { + String columnName = "columnName"; + String value = "value"; + ColumnKeyValue keyValue = new ColumnKeyValue(columnName, value); + DataType dataType = DataType.TEXT; + when(tableMetadata.getColumnDataType(columnName)).thenReturn(dataType); + + Key expected = Key.newBuilder().add(TextColumn.of(columnName, value)).build(); + Key actual = KeyUtils.parseKeyValue(keyValue, tableMetadata); + + assertEquals(expected, actual); + } + + @Test + void createKey_boolean_returnsKey() throws Base64Exception { + String columnName = "booleanColumn"; + String value = "true"; + Key expected = Key.newBuilder().add(BooleanColumn.of(columnName, true)).build(); + Key actual = KeyUtils.createKey(DataType.BOOLEAN, columnName, value); + assertEquals(expected, actual); + } + + @Test + void createKey_int_returnsKey() throws Base64Exception { + String columnName = "intColumn"; + String value = "42"; + Key expected = Key.newBuilder().add(IntColumn.of(columnName, 42)).build(); + Key actual = KeyUtils.createKey(DataType.INT, columnName, value); + assertEquals(expected, actual); + } + + @Test + void createKey_bigint_returnsKey() throws Base64Exception { + String columnName = "bigintColumn"; + String value = "123456789012345"; + Key expected = Key.newBuilder().add(BigIntColumn.of(columnName, 123456789012345L)).build(); + Key actual = KeyUtils.createKey(DataType.BIGINT, columnName, value); + assertEquals(expected, actual); + } + + @Test + void createKey_float_returnsKey() throws Base64Exception { + String columnName = "floatColumn"; + String value = "1.23"; + Key expected = Key.newBuilder().add(FloatColumn.of(columnName, 1.23f)).build(); + Key actual = KeyUtils.createKey(DataType.FLOAT, columnName, value); + assertEquals(expected, actual); + } + + @Test + void createKey_double_returnsKey() throws Base64Exception { + String columnName = "doubleColumn"; + String value = "1.23"; + Key expected = Key.newBuilder().add(DoubleColumn.of(columnName, 1.23)).build(); + Key actual = KeyUtils.createKey(DataType.DOUBLE, columnName, value); + assertEquals(expected, actual); + } + + @Test + void createKey_text_returnsKey() throws Base64Exception { + String columnName = "textColumn"; + String value = "Hello, world!"; + Key expected = Key.newBuilder().add(TextColumn.of(columnName, value)).build(); + Key actual = KeyUtils.createKey(DataType.TEXT, columnName, value); + assertEquals(expected, actual); + } + + @Test + void createKey_blob_returnsKey() throws Base64Exception { + String columnName = "blobColumn"; + String value = + Base64.getEncoder().encodeToString("Hello, world!".getBytes(StandardCharsets.UTF_8)); + Key expected = + Key.newBuilder() + .add(BlobColumn.of(columnName, "Hello, world!".getBytes(StandardCharsets.UTF_8))) + .build(); + Key actual = KeyUtils.createKey(DataType.BLOB, columnName, value); + assertEquals(expected, actual); + } + + @Test + void createKey_invalidBase64_throwsBase64Exception() { + String columnName = "blobColumn"; + String value = "invalidBase64"; + assertThrows(Base64Exception.class, () -> KeyUtils.createKey(DataType.BLOB, columnName, value)); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/TableMetadataUtilsTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/TableMetadataUtilsTest.java new file mode 100644 index 0000000000..032193afef --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/util/TableMetadataUtilsTest.java @@ -0,0 +1,166 @@ +package com.scalar.db.dataloader.core.util; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.scalar.db.api.TableMetadata; +import com.scalar.db.io.DataType; +import com.scalar.db.transaction.consensuscommit.Attribute; +import java.util.*; +import org.junit.jupiter.api.Test; + +/** Unit tests for TableMetadataUtils */ +class TableMetadataUtilsTest { + + private static final String NAMESPACE = "ns"; + private static final String TABLE_NAME = "table"; + + @Test + void isMetadataColumn_IsMetaDataColumn_ShouldReturnTrue() { + boolean isMetadataColumn = + TableMetadataUtils.isMetadataColumn( + Attribute.ID, TableMetadataUtils.getMetadataColumns(), new HashSet<>()); + assertThat(isMetadataColumn).isTrue(); + } + + @Test + void isMetadataColumn_IsNotMetadataColumn_ShouldReturnFalse() { + boolean isMetadataColumn = + TableMetadataUtils.isMetadataColumn( + "columnName", TableMetadataUtils.getMetadataColumns(), new HashSet<>()); + assertThat(isMetadataColumn).isFalse(); + } + + @Test + void isMetadataColumn_IsBeforePrefixColumn_ShouldReturnTrue() { + boolean isMetadataColumn = + TableMetadataUtils.isMetadataColumn( + Attribute.BEFORE_PREFIX + "columnName", + TableMetadataUtils.getMetadataColumns(), + new HashSet<>()); + assertThat(isMetadataColumn).isTrue(); + } + + @Test + void isMetadataColumn_IsNotBeforePrefixColumn_ShouldReturnFalse() { + Set columnNames = new HashSet<>(); + columnNames.add("before_before_testing"); + boolean isMetadataColumn = + TableMetadataUtils.isMetadataColumn( + "before_testing", TableMetadataUtils.getMetadataColumns(), columnNames); + assertThat(isMetadataColumn).isFalse(); + } + + @Test + void getMetadataColumns_NoArgs_ShouldReturnSet() { + Set columns = + new HashSet<>( + Arrays.asList( + Attribute.ID, + Attribute.STATE, + Attribute.VERSION, + Attribute.PREPARED_AT, + Attribute.COMMITTED_AT, + Attribute.BEFORE_ID, + Attribute.BEFORE_STATE, + Attribute.BEFORE_VERSION, + Attribute.BEFORE_PREPARED_AT, + Attribute.BEFORE_COMMITTED_AT)); + Set metadataColumns = TableMetadataUtils.getMetadataColumns(); + assertThat(metadataColumns).containsExactlyInAnyOrder(columns.toArray(new String[0])); + } + + @Test + void getTableLookupKey_ValidStringArgs_ShouldReturnLookupKey() { + String actual = TableMetadataUtils.getTableLookupKey(NAMESPACE, TABLE_NAME); + String expected = + String.format(TableMetadataUtils.TABLE_LOOKUP_KEY_FORMAT, NAMESPACE, TABLE_NAME); + assertThat(actual).isEqualTo(expected); + } + + // Test the isMetadataColumn(String columnName, TableMetadata tableMetadata) method + @Test + void isMetadataColumn_WithTableMetadata_IsMetaDataColumn_ShouldReturnTrue() { + TableMetadata tableMetadata = mock(TableMetadata.class); + boolean isMetadataColumn = TableMetadataUtils.isMetadataColumn(Attribute.ID, tableMetadata); + assertThat(isMetadataColumn).isTrue(); + } + + @Test + void isMetadataColumn_WithTableMetadata_IsNotMetadataColumn_ShouldReturnFalse() { + TableMetadata tableMetadata = mock(TableMetadata.class); + when(tableMetadata.getColumnNames()).thenReturn(new LinkedHashSet<>()); + boolean isMetadataColumn = TableMetadataUtils.isMetadataColumn("columnName", tableMetadata); + assertThat(isMetadataColumn).isFalse(); + } + + @Test + void isMetadataColumn_WithTableMetadata_IsBeforePrefixColumn_ShouldReturnTrue() { + TableMetadata tableMetadata = mock(TableMetadata.class); + when(tableMetadata.getColumnNames()).thenReturn(new LinkedHashSet<>()); + boolean isMetadataColumn = + TableMetadataUtils.isMetadataColumn(Attribute.BEFORE_PREFIX + "columnName", tableMetadata); + assertThat(isMetadataColumn).isTrue(); + } + + @Test + void isMetadataColumn_WithTableMetadata_IsNotBeforePrefixColumn_ShouldReturnFalse() { + TableMetadata tableMetadata = mock(TableMetadata.class); + LinkedHashSet columnNames = new LinkedHashSet<>(); + columnNames.add("before_before_testing"); + when(tableMetadata.getColumnNames()).thenReturn(columnNames); + boolean isMetadataColumn = TableMetadataUtils.isMetadataColumn("before_testing", tableMetadata); + assertThat(isMetadataColumn).isFalse(); + } + + // Test the extractColumnDataTypes(TableMetadata tableMetadata) method + @Test + void extractColumnDataTypes_ValidTableMetadata_ShouldReturnColumnDataTypesMap() { + TableMetadata tableMetadata = mock(TableMetadata.class); + LinkedHashSet columnNames = new LinkedHashSet<>(); + columnNames.add("column1"); + columnNames.add("column2"); + when(tableMetadata.getColumnNames()).thenReturn(columnNames); + when(tableMetadata.getColumnDataType("column1")).thenReturn(DataType.TEXT); + when(tableMetadata.getColumnDataType("column2")).thenReturn(DataType.INT); + Map columnDataTypes = + TableMetadataUtils.extractColumnDataTypes(tableMetadata); + assertThat(columnDataTypes).containsEntry("column1", DataType.TEXT); + assertThat(columnDataTypes).containsEntry("column2", DataType.INT); + } + + // Test the populateProjectionsWithMetadata(TableMetadata tableMetadata, List projections) + // method + @Test + void + populateProjectionsWithMetadata_ValidTableMetadataAndProjections_ShouldReturnProjectionsWithMetadata() { + TableMetadata tableMetadata = mock(TableMetadata.class); + LinkedHashSet partitionKeyNames = new LinkedHashSet<>(); + partitionKeyNames.add("partitionKey"); + LinkedHashSet clusteringKeyNames = new LinkedHashSet<>(); + clusteringKeyNames.add("clusteringKey"); + when(tableMetadata.getPartitionKeyNames()).thenReturn(partitionKeyNames); + when(tableMetadata.getClusteringKeyNames()).thenReturn(clusteringKeyNames); + + List projections = Arrays.asList("column1", "column2"); + List projectionMetadata = + TableMetadataUtils.populateProjectionsWithMetadata(tableMetadata, projections); + assertThat(projectionMetadata) + .containsExactlyInAnyOrder( + "column1", + Attribute.BEFORE_PREFIX + "column1", + "column2", + Attribute.BEFORE_PREFIX + "column2", + Attribute.ID, + Attribute.STATE, + Attribute.VERSION, + Attribute.PREPARED_AT, + Attribute.COMMITTED_AT, + Attribute.BEFORE_ID, + Attribute.BEFORE_STATE, + Attribute.BEFORE_VERSION, + Attribute.BEFORE_PREPARED_AT, + Attribute.BEFORE_COMMITTED_AT); + } +}