Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ public class ExportCommand extends ExportCommandOptions implements Callable<Inte
public Integer call() throws Exception {
validateDeprecatedOptions();
applyDeprecatedOptions();
warnAboutIgnoredDeprecatedOptions();
String scalarDbPropertiesFilePath = getScalarDbPropertiesFilePath();

try {
Expand Down Expand Up @@ -142,6 +143,28 @@ private void validateDeprecatedOptions() {
MAX_THREADS_OPTION_SHORT);
}

/** Warns about deprecated options that are no longer used and have been completely ignored. */
private void warnAboutIgnoredDeprecatedOptions() {
CommandLine.ParseResult parseResult = spec.commandLine().getParseResult();
boolean hasIncludeMetadata =
parseResult.hasMatchedOption(DEPRECATED_INCLUDE_METADATA_OPTION)
|| parseResult.hasMatchedOption(DEPRECATED_INCLUDE_METADATA_OPTION_SHORT);

if (hasIncludeMetadata) {
// Use picocli's ANSI support for colored warning output
CommandLine.Help.Ansi ansi = CommandLine.Help.Ansi.AUTO;
String warning =
ansi.string(
"@|bold,yellow The "
+ DEPRECATED_INCLUDE_METADATA_OPTION
+ " option is deprecated and no longer has any effect. "
+ "Use the 'scalar.db.consensus_commit.include_metadata.enabled' configuration property "
+ "in your ScalarDB properties file to control whether transaction metadata is included in scan operations.|@");

logger.warn(warning);
}
}

private String getScalarDbPropertiesFilePath() {
if (StringUtils.isBlank(configFilePath)) {
throw new IllegalArgumentException(DataLoaderError.CONFIG_FILE_PATH_BLANK.buildMessage());
Expand All @@ -161,8 +184,7 @@ private void validateOutputDirectory() throws DirectoryValidationException {

private ExportManager createExportManager(
TransactionFactory transactionFactory, ScalarDbDao scalarDbDao, FileFormat fileFormat) {
ProducerTaskFactory taskFactory =
new ProducerTaskFactory(delimiter, includeTransactionMetadata, prettyPrintJson);
ProducerTaskFactory taskFactory = new ProducerTaskFactory(delimiter, prettyPrintJson);
DistributedTransactionManager manager = transactionFactory.getTransactionManager();
switch (fileFormat) {
case JSON:
Expand All @@ -181,7 +203,6 @@ private ExportOptions buildExportOptions(Key partitionKey, ScanRange scanRange)
ExportOptions.builder(namespace, table, partitionKey, outputFormat)
.sortOrders(sortOrders)
.excludeHeaderRow(excludeHeader)
.includeTransactionMetadata(includeTransactionMetadata)
.delimiter(delimiter)
.limit(limit)
.maxThreadCount(maxThreads)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ public class ExportCommandOptions {
public static final String MAX_THREADS_OPTION = "--max-threads";
public static final String MAX_THREADS_OPTION_SHORT = "-mt";
public static final String DEPRECATED_THREADS_OPTION = "--threads";
public static final String DEPRECATED_INCLUDE_METADATA_OPTION = "--include-metadata";
public static final String DEPRECATED_INCLUDE_METADATA_OPTION_SHORT = "-m";

@CommandLine.Option(
names = {"--config", "-c"},
Expand Down Expand Up @@ -69,10 +71,19 @@ public class ExportCommandOptions {
defaultValue = "json")
protected FileFormat outputFormat;

/**
* @deprecated As of release 3.17.0 This option is no longer used and will be removed in release
* 4.0.0. The option is not fully removed as users who might already have their scripts or
* commands pre-set might pass the argument and when passed if not supported, picocli will
* throw an error. We want to avoid that and instead just show a warning.
*/
@Deprecated
@CommandLine.Option(
names = {"--include-metadata", "-m"},
description = "Include transaction metadata in the exported data (default: false)",
defaultValue = "false")
description =
"Deprecated: This option is no longer used. Please use scalar.db.consensus_commit.include_metadata.enabled to control whether transaction metadata is included in scan operations.",
defaultValue = "false",
hidden = true)
protected boolean includeTransactionMetadata;

@CommandLine.Option(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,14 +152,14 @@ void call_withOnlyDeprecatedStartExclusive_shouldApplyInvertedValue() {
cmd.parseArgs(args);

// Verify the deprecated value was parsed
assertEquals(true, command.startExclusiveDeprecated);
assertTrue(command.startExclusiveDeprecated);

// Apply deprecated options (this is what the command does after validation)
command.applyDeprecatedOptions();

// Verify the value was applied with inverted logic
// start-exclusive=true should become start-inclusive=false
assertEquals(false, command.scanStartInclusive);
assertFalse(command.scanStartInclusive);
}

@Test
Expand All @@ -181,14 +181,14 @@ void call_withOnlyDeprecatedEndExclusive_shouldApplyInvertedValue() {
cmd.parseArgs(args);

// Verify the deprecated value was parsed
assertEquals(false, command.endExclusiveDeprecated);
assertFalse(command.endExclusiveDeprecated);

// Apply deprecated options (this is what the command does after validation)
command.applyDeprecatedOptions();

// Verify the value was applied with inverted logic
// end-exclusive=false should become end-inclusive=true
assertEquals(true, command.scanEndInclusive);
assertTrue(command.scanEndInclusive);
}

@Test
Expand Down Expand Up @@ -273,4 +273,83 @@ void call_withoutMaxThreads_shouldDefaultToAvailableProcessors() {
// Verify it was set to available processors
assertEquals(Runtime.getRuntime().availableProcessors(), command.maxThreads);
}

@Test
void call_withDeprecatedIncludeMetadataOption_shouldParseAndDetectOption() {
// Test that the deprecated option can be parsed without crashing
// and is detected for warning purposes (both long and short forms)
String[] argsWithLongForm = {
"--config",
"scalardb.properties",
"--namespace",
"scalar",
"--table",
"asset",
"--format",
"JSON",
"--include-metadata"
};

String[] argsWithShortForm = {
"--config",
"scalardb.properties",
"--namespace",
"scalar",
"--table",
"asset",
"--format",
"JSON",
"-m"
};

// Test long form
ExportCommand commandLong = new ExportCommand();
CommandLine cmdLong = new CommandLine(commandLong);
cmdLong.parseArgs(argsWithLongForm);
commandLong.spec = cmdLong.getCommandSpec();

// Verify the option is detected (so warning will trigger)
assertTrue(
cmdLong
.getParseResult()
.hasMatchedOption(ExportCommandOptions.DEPRECATED_INCLUDE_METADATA_OPTION));

// Test short form
ExportCommand commandShort = new ExportCommand();
CommandLine cmdShort = new CommandLine(commandShort);
cmdShort.parseArgs(argsWithShortForm);
commandShort.spec = cmdShort.getCommandSpec();

// Verify the short option is detected (so warning will trigger)
assertTrue(
cmdShort
.getParseResult()
.hasMatchedOption(ExportCommandOptions.DEPRECATED_INCLUDE_METADATA_OPTION_SHORT));
}

@Test
void call_withoutDeprecatedIncludeMetadataOption_shouldNotDetectOption() {
// Verify that when the deprecated option is NOT used, it's not detected
// (so warning won't trigger incorrectly)
String[] args = {
"--config",
"scalardb.properties",
"--namespace",
"scalar",
"--table",
"asset",
"--format",
"JSON"
};

ExportCommand command = new ExportCommand();
CommandLine cmd = new CommandLine(command);
cmd.parseArgs(args);
command.spec = cmd.getCommandSpec();

// Verify the option is NOT detected (so warning won't trigger)
assertFalse(
cmd.getParseResult()
.hasMatchedOption(ExportCommandOptions.DEPRECATED_INCLUDE_METADATA_OPTION));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import com.scalar.db.dataloader.core.dataexport.producer.ProducerTaskFactory;
import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao;
import com.scalar.db.dataloader.core.util.CsvUtil;
import com.scalar.db.transaction.consensuscommit.ConsensusCommitUtils;
import java.io.IOException;
import java.io.Writer;
import java.util.Iterator;
Expand Down Expand Up @@ -77,8 +76,7 @@ private String createCsvHeaderRow(ExportOptions exportOptions, TableMetadata tab
Iterator<String> iterator = tableMetadata.getColumnNames().iterator();
while (iterator.hasNext()) {
String columnName = iterator.next();
if (shouldIgnoreColumn(
exportOptions.isIncludeTransactionMetadata(), columnName, tableMetadata, projections)) {
if (!projections.isEmpty() && !projections.contains(columnName)) {
continue;
}
headerRow.append(columnName);
Expand All @@ -90,24 +88,4 @@ private String createCsvHeaderRow(ExportOptions exportOptions, TableMetadata tab
headerRow.append("\n");
return headerRow.toString();
}

/**
* To ignore a column or not based on conditions such as if it is a metadata column or if it is
* not include in selected projections
*
* @param isIncludeTransactionMetadata to include transaction metadata or not
* @param columnName column name
* @param tableMetadata table metadata
* @param projections selected columns for projection
* @return ignore the column or not
*/
private boolean shouldIgnoreColumn(
boolean isIncludeTransactionMetadata,
String columnName,
TableMetadata tableMetadata,
List<String> projections) {
return (!isIncludeTransactionMetadata
&& ConsensusCommitUtils.isTransactionMetaColumn(columnName, tableMetadata))
|| (!projections.isEmpty() && !projections.contains(columnName));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import com.scalar.db.dataloader.core.dataexport.validation.ExportOptionsValidator;
import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDao;
import com.scalar.db.dataloader.core.dataimport.dao.ScalarDbDaoException;
import com.scalar.db.dataloader.core.util.TableMetadataUtil;
import com.scalar.db.exception.transaction.CrudException;
import com.scalar.db.exception.transaction.UnknownTransactionStatusException;
import com.scalar.db.io.DataType;
Expand Down Expand Up @@ -76,7 +75,6 @@ public ExportReport startExport(
try {
validateExportOptions(exportOptions, tableMetadata);
Map<String, DataType> dataTypeByColumnName = tableMetadata.getColumnDataTypes();
handleTransactionMetadata(exportOptions, tableMetadata);
processHeader(exportOptions, tableMetadata, writer);

ExecutorService executorService =
Expand Down Expand Up @@ -198,22 +196,6 @@ private void validateExportOptions(ExportOptions exportOptions, TableMetadata ta
ExportOptionsValidator.validate(exportOptions, tableMetadata);
}

/**
* To update projection columns of export options if include metadata options is enabled
*
* @param exportOptions export options
* @param tableMetadata metadata of the table
*/
private void handleTransactionMetadata(ExportOptions exportOptions, TableMetadata tableMetadata) {
if (exportOptions.isIncludeTransactionMetadata()
&& !exportOptions.getProjectionColumns().isEmpty()) {
List<String> projectionMetadata =
TableMetadataUtil.populateProjectionsWithMetadata(
tableMetadata, exportOptions.getProjectionColumns());
exportOptions.setProjectionColumns(projectionMetadata);
}
}

/**
* To create a scanner object
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ public class ExportOptions {
@Builder.Default private final int maxThreadCount = Runtime.getRuntime().availableProcessors();
@Builder.Default private final String delimiter = ";";
@Builder.Default private final boolean excludeHeaderRow = false;
@Builder.Default private final boolean includeTransactionMetadata = false;
@Builder.Default private List<String> projectionColumns = Collections.emptyList();
private List<Scan.Ordering> sortOrders;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import com.scalar.db.dataloader.core.util.CsvUtil;
import com.scalar.db.dataloader.core.util.DecimalUtil;
import com.scalar.db.io.DataType;
import com.scalar.db.transaction.consensuscommit.ConsensusCommitUtils;
import java.nio.charset.Charset;
import java.time.Instant;
import java.time.LocalDate;
Expand All @@ -33,19 +32,17 @@ public class CsvProducerTask extends ProducerTask {
/**
* Class constructor
*
* @param includeMetadata Include metadata in the exported data
* @param projectColumns list of columns that is required in export data
* @param tableMetadata Metadata for a single ScalarDB table
* @param columnDataTypes Map of data types for the all columns in a ScalarDB table
* @param delimiter Delimiter used in csv content
*/
public CsvProducerTask(
boolean includeMetadata,
List<String> projectColumns,
TableMetadata tableMetadata,
Map<String, DataType> columnDataTypes,
String delimiter) {
super(includeMetadata, projectColumns, tableMetadata, columnDataTypes);
super(projectColumns, tableMetadata, columnDataTypes);
this.delimiter = delimiter;
}

Expand Down Expand Up @@ -82,12 +79,10 @@ private String convertResultToCsv(Result result) {
while (iterator.hasNext()) {
String columnName = iterator.next();

// Skip the field if it can be ignored based on check
// Skip the field if it's not in the projection list (when projections are specified)
boolean columnNotProjected =
!projectedColumnsSet.isEmpty() && !projectedColumnsSet.contains(columnName);
boolean isMetadataColumn =
ConsensusCommitUtils.isTransactionMetaColumn(columnName, tableMetadata);
if (columnNotProjected || (!includeMetadata && isMetadataColumn)) {
if (columnNotProjected) {
continue;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import com.scalar.db.api.TableMetadata;
import com.scalar.db.dataloader.core.DataLoaderObjectMapper;
import com.scalar.db.io.DataType;
import com.scalar.db.transaction.consensuscommit.ConsensusCommitUtils;
import java.nio.charset.Charset;
import java.time.Instant;
import java.time.LocalDate;
Expand All @@ -28,17 +27,15 @@ public class JsonLineProducerTask extends ProducerTask {
/**
* Class constructor
*
* @param includeMetadata Include metadata in the exported data
* @param projectionColumns list of columns that is required in export data
* @param tableMetadata Metadata for a single ScalarDB table
* @param columnDataTypes Map of data types for the all columns in a ScalarDB table
*/
public JsonLineProducerTask(
boolean includeMetadata,
List<String> projectionColumns,
TableMetadata tableMetadata,
Map<String, DataType> columnDataTypes) {
super(includeMetadata, projectionColumns, tableMetadata, columnDataTypes);
super(projectionColumns, tableMetadata, columnDataTypes);
}

/**
Expand Down Expand Up @@ -72,12 +69,10 @@ private ObjectNode generateJsonForResult(Result result) {

// Loop through all the columns and to the json object
for (String columnName : tableColumns) {
// Skip the field if it can be ignored based on check
// Skip the field if it's not in the projection list (when projections are specified)
boolean columnNotProjected =
!projectedColumnsSet.isEmpty() && !projectedColumnsSet.contains(columnName);
boolean isMetadataColumn =
ConsensusCommitUtils.isTransactionMetaColumn(columnName, tableMetadata);
if (columnNotProjected || (!includeMetadata && isMetadataColumn)) {
if (columnNotProjected) {
continue;
}

Expand Down
Loading