diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
index cdb535bc7aaa..bfbb8fdd8412 100644
--- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
+++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
@@ -1501,7 +1501,7 @@ private Scan createScanForBackupHistory() {
byte[] startRow = Bytes.toBytes(BACKUP_INFO_PREFIX);
byte[] stopRow = Arrays.copyOf(startRow, startRow.length);
stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1);
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
scan.setStopRow(stopRow);
scan.addFamily(BackupSystemTable.SESSIONS_FAMILY);
scan.setMaxVersions(1);
@@ -1541,7 +1541,7 @@ private Scan createScanForReadLogTimestampMap(String backupRoot) {
byte[] startRow = rowkey(TABLE_RS_LOG_MAP_PREFIX, backupRoot);
byte[] stopRow = Arrays.copyOf(startRow, startRow.length);
stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1);
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
scan.setStopRow(stopRow);
scan.addFamily(BackupSystemTable.META_FAMILY);
@@ -1582,7 +1582,7 @@ private Scan createScanForReadRegionServerLastLogRollResult(String backupRoot) {
byte[] startRow = rowkey(RS_LOG_TS_PREFIX, backupRoot);
byte[] stopRow = Arrays.copyOf(startRow, startRow.length);
stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1);
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
scan.setStopRow(stopRow);
scan.addFamily(BackupSystemTable.META_FAMILY);
scan.setMaxVersions(1);
@@ -1891,7 +1891,7 @@ static Scan createScanForBulkLoadedFiles(String backupId) {
: rowkey(BULK_LOAD_PREFIX, backupId + BLK_LD_DELIM);
byte[] stopRow = Arrays.copyOf(startRow, startRow.length);
stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1);
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
scan.setStopRow(stopRow);
scan.addFamily(BackupSystemTable.META_FAMILY);
scan.setMaxVersions(1);
@@ -1939,7 +1939,7 @@ private Scan createScanForGetWALs(String backupRoot) {
byte[] startRow = Bytes.toBytes(WALS_PREFIX);
byte[] stopRow = Arrays.copyOf(startRow, startRow.length);
stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1);
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
scan.setStopRow(stopRow);
scan.addFamily(BackupSystemTable.META_FAMILY);
return scan;
@@ -1966,7 +1966,7 @@ private Scan createScanForBackupSetList() {
byte[] startRow = Bytes.toBytes(SET_KEY_PREFIX);
byte[] stopRow = Arrays.copyOf(startRow, startRow.length);
stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1);
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
scan.setStopRow(stopRow);
scan.addFamily(BackupSystemTable.META_FAMILY);
return scan;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index bf71eeef9bb8..2e12baa56762 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -575,7 +575,7 @@ public static Scan getScanForTableName(Connection connection, TableName tableNam
byte[] stopKey = getTableStopRowForMeta(tableName, QueryType.REGION);
Scan scan = getMetaScan(connection, -1);
- scan.setStartRow(startKey);
+ scan.withStartRow(startKey);
scan.setStopRow(stopKey);
return scan;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 71ee2935755c..2d02e255fce4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -192,7 +192,7 @@ public Scan() {}
*/
@Deprecated
public Scan(byte[] startRow) {
- setStartRow(startRow);
+ withStartRow(startRow);
}
/**
@@ -205,7 +205,7 @@ public Scan(byte[] startRow) {
*/
@Deprecated
public Scan(byte[] startRow, byte[] stopRow) {
- setStartRow(startRow);
+ withStartRow(startRow);
setStopRow(stopRow);
}
@@ -394,31 +394,6 @@ public Scan setTimestamp(long timestamp) {
return (Scan) super.setColumnFamilyTimeRange(cf, minStamp, maxStamp);
}
- /**
- * Set the start row of the scan.
- *
- * If the specified row does not exist, the Scanner will start from the next closest row after the
- * specified row.
- * @param startRow row to start scanner at or after
- * @return this
- * @throws IllegalArgumentException if startRow does not meet criteria for a row key (when length
- * exceeds {@link HConstants#MAX_ROW_LENGTH})
- * @deprecated since 2.0.0 and will be removed in 3.0.0. Use {@link #withStartRow(byte[])}
- * instead. This method may change the inclusive of the stop row to keep compatible with the old
- * behavior.
- * @see #withStartRow(byte[])
- * @see HBASE-17320
- */
- @Deprecated
- public Scan setStartRow(byte[] startRow) {
- withStartRow(startRow);
- if (ClientUtil.areScanStartRowAndStopRowEqual(this.startRow, this.stopRow)) {
- // for keeping the old behavior that a scan with the same start and stop row is a get scan.
- this.includeStopRow = true;
- }
- return this;
- }
-
/**
* Set the start row of the scan.
*
@@ -526,17 +501,17 @@ public Scan withStopRow(byte[] stopRow, boolean inclusive) {
*
This is a utility method that converts the desired rowPrefix into the appropriate values
* for the startRow and stopRow to achieve the desired result.
* This can safely be used in combination with setFilter.
- * NOTE: Doing a {@link #setStartRow(byte[])} and/or {@link #setStopRow(byte[])}
+ *
NOTE: Doing a {@link #withStartRow(byte[])} and/or {@link #setStopRow(byte[])}
* after this method will yield undefined results.
* @param rowPrefix the prefix all rows must start with. (Set null to remove the filter.)
* @return this
*/
public Scan setRowPrefixFilter(byte[] rowPrefix) {
if (rowPrefix == null) {
- setStartRow(HConstants.EMPTY_START_ROW);
+ withStartRow(HConstants.EMPTY_START_ROW);
setStopRow(HConstants.EMPTY_END_ROW);
} else {
- this.setStartRow(rowPrefix);
+ this.withStartRow(rowPrefix);
this.setStopRow(ClientUtil.calculateTheClosestNextRowKeyForPrefix(rowPrefix));
}
return this;
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
index 5fb886d1c700..cba11ae415d5 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
@@ -194,11 +194,11 @@ public void testSetAuthorizations() {
@Test
public void testSetStartRowAndSetStopRow() {
Scan scan = new Scan();
- scan.setStartRow(null);
- scan.setStartRow(new byte[1]);
- scan.setStartRow(new byte[HConstants.MAX_ROW_LENGTH]);
+ scan.withStartRow(null);
+ scan.withStartRow(new byte[1]);
+ scan.withStartRow(new byte[HConstants.MAX_ROW_LENGTH]);
try {
- scan.setStartRow(new byte[HConstants.MAX_ROW_LENGTH+1]);
+ scan.withStartRow(new byte[HConstants.MAX_ROW_LENGTH+1]);
fail("should've thrown exception");
} catch (IllegalArgumentException iae) {
} catch (Exception e) {
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
index 8101654ab83a..2ca73b340040 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
@@ -799,7 +799,7 @@ R median(final Table table, ColumnInterpreter ci, Scan scan) thro
Scan scan2 = new Scan(scan);
// inherit stop row from method parameter
if (startRow != null) {
- scan2.setStartRow(startRow);
+ scan2.withStartRow(startRow);
}
ResultScanner scanner = null;
try {
diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
index b21d5c3f29a0..c51039095b0a 100644
--- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
+++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
@@ -361,13 +361,13 @@ private Scan validateKey(final RegionInfo region, final ExportProtos.ExportReque
byte[] originStartKey = scan.getStartRow();
if (originStartKey == null
|| Bytes.compareTo(originStartKey, regionStartKey) < 0) {
- scan.setStartRow(regionStartKey);
+ scan.withStartRow(regionStartKey);
}
byte[] regionEndKey = region.getEndKey();
byte[] originEndKey = scan.getStopRow();
if (originEndKey == null
|| Bytes.compareTo(originEndKey, regionEndKey) > 0) {
- scan.setStartRow(regionEndKey);
+ scan.withStartRow(regionEndKey);
}
return scan;
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 784e5b423e00..3cda64056bcd 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -1653,7 +1653,7 @@ public int run(String[] args) throws Exception {
scan.setBatch(10000);
if (cmd.hasOption("s"))
- scan.setStartRow(Bytes.toBytesBinary(cmd.getOptionValue("s")));
+ scan.withStartRow(Bytes.toBytesBinary(cmd.getOptionValue("s")));
if (cmd.hasOption("e"))
scan.setStopRow(Bytes.toBytesBinary(cmd.getOptionValue("e")));
@@ -1711,7 +1711,7 @@ public int run(String[] args) throws Exception {
abstract static class WalkerBase extends Configured{
protected static CINode findStartNode(Table table, byte[] startKey) throws IOException {
Scan scan = new Scan();
- scan.setStartRow(startKey);
+ scan.withStartRow(startKey);
scan.setBatch(1);
scan.addColumn(FAMILY_NAME, COLUMN_PREV);
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
index ea219db9b028..342ec60f2cc8 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
@@ -125,7 +125,7 @@ public void run() {
try (TraceScope scope = TraceUtil.createTrace("Scan")){
Table ht = util.getConnection().getTable(tableName);
Scan s = new Scan();
- s.setStartRow(Bytes.toBytes(rowKeyQueue.take()));
+ s.withStartRow(Bytes.toBytes(rowKeyQueue.take()));
s.setBatch(7);
rs = ht.getScanner(s);
// Something to keep the jvm from removing the loop.
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
index dfa3d312d421..5ecbaa749af9 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
@@ -121,7 +121,7 @@ static Scan getScanFromCommandLine(Configuration conf, String[] args) throws IOE
s.setCacheBlocks(false);
// set Start and Stop row
if (conf.get(TableInputFormat.SCAN_ROW_START) != null) {
- s.setStartRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_START)));
+ s.withStartRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_START)));
}
if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) {
s.setStopRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_STOP)));
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
index 2d742fc6d2b8..3fd93e39191f 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
@@ -195,7 +195,7 @@ Scan initScan() throws IOException {
scan.setMaxVersions(versions);
}
if (!isTableStartRow(startRow)) {
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
}
if (!isTableEndRow(stopRow)) {
scan.setStopRow(stopRow);
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java
index d0cffb325316..456ea628382f 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java
@@ -38,13 +38,13 @@
* List<Scan> scans = new ArrayList<Scan>();
*
* Scan scan1 = new Scan();
- * scan1.setStartRow(firstRow1);
+ * scan1.withStartRow(firstRow1);
* scan1.setStopRow(lastRow1);
* scan1.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, table1);
* scans.add(scan1);
*
* Scan scan2 = new Scan();
- * scan2.setStartRow(firstRow2);
+ * scan2.withStartRow(firstRow2);
* scan2.setStopRow(lastRow2);
* scan1.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, table2);
* scans.add(scan2);
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index d18bfe7213a3..6146097a584c 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -98,7 +98,7 @@ public RecordReader createRecordReader(
try {
Scan sc = tSplit.getScan();
- sc.setStartRow(tSplit.getStartRow());
+ sc.withStartRow(tSplit.getStartRow());
sc.setStopRow(tSplit.getEndRow());
trr.setScan(sc);
trr.setTable(table);
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
index 0b879c598c61..3a014b8af4e2 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
@@ -171,7 +171,7 @@ private static void setScanFilter(Scan scan, List
}
if (size == 1) {
MultiRowRangeFilter.RowRange range = rowRangeList.get(0);
- scan.setStartRow(range.getStartRow()); //inclusive
+ scan.withStartRow(range.getStartRow()); //inclusive
scan.setStopRow(range.getStopRow()); //exclusive
} else if (size > 1) {
scan.setFilter(new MultiRowRangeFilter(rowRangeList));
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
index ecedf9580dc9..664b8cd5ae33 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
@@ -340,7 +340,7 @@ private static String toHex(ImmutableBytesWritable bytes) {
private void syncRange(Context context, ImmutableBytesWritable startRow,
ImmutableBytesWritable stopRow) throws IOException, InterruptedException {
Scan scan = sourceTableHash.initScan();
- scan.setStartRow(startRow.copyBytes());
+ scan.withStartRow(startRow.copyBytes());
scan.setStopRow(stopRow.copyBytes());
ResultScanner sourceScanner = sourceTable.getScanner(scan);
@@ -681,7 +681,7 @@ private void finishRemainingHashRanges(Context context) throws IOException,
// the open hash range continues past the end of this region
// add a scan to complete the current hash range
Scan scan = sourceTableHash.initScan();
- scan.setStartRow(splitEndRow);
+ scan.withStartRow(splitEndRow);
if (nextSourceKey == null) {
scan.setStopRow(sourceTableHash.stopRow);
} else {
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
index 3eb7d699bd0c..24b977a941b4 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
@@ -148,7 +148,7 @@ public static Scan createScanFromConfiguration(Configuration conf) throws IOExce
Scan scan = new Scan();
if (conf.get(SCAN_ROW_START) != null) {
- scan.setStartRow(Bytes.toBytesBinary(conf.get(SCAN_ROW_START)));
+ scan.withStartRow(Bytes.toBytesBinary(conf.get(SCAN_ROW_START)));
}
if (conf.get(SCAN_ROW_STOP) != null) {
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index ea50f3960697..dce2d2394f1f 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -181,7 +181,7 @@ public RecordReader createRecordReader(
final TableRecordReader trr =
this.tableRecordReader != null ? this.tableRecordReader : new TableRecordReader();
Scan sc = new Scan(this.scan);
- sc.setStartRow(tSplit.getStartRow());
+ sc.withStartRow(tSplit.getStartRow());
sc.setStopRow(tSplit.getEndRow());
trr.setScan(sc);
trr.setTable(getTable());
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
index 5aed30d0ca5a..14be2dd977bb 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
@@ -198,7 +198,7 @@ public void map(ImmutableBytesWritable row, final Result value,
TableName peerTableName = TableName.valueOf(peerName);
replicatedConnection = ConnectionFactory.createConnection(peerConf);
replicatedTable = replicatedConnection.getTable(peerTableName);
- scan.setStartRow(value.getRow());
+ scan.withStartRow(value.getRow());
byte[] endRow = null;
if (tableSplit instanceof TableSnapshotInputFormat.TableSnapshotRegionSplit) {
@@ -511,7 +511,7 @@ private static void setRowPrefixFilter(Scan scan, String rowPrefixes) {
}
private static void setStartAndStopRows(Scan scan, byte[] startPrefixRow, byte[] lastPrefixRow) {
- scan.setStartRow(startPrefixRow);
+ scan.withStartRow(startPrefixRow);
byte[] stopRow = Bytes.add(Bytes.head(lastPrefixRow, lastPrefixRow.length - 1),
new byte[]{(byte) (lastPrefixRow[lastPrefixRow.length - 1] + 1)});
scan.setStopRow(stopRow);
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
index 8df7a6c74700..ad22d0b6403b 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java
@@ -238,7 +238,7 @@ private void testScan(String start, String stop, String last)
scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, Bytes.toBytes(tableName));
if (start != null) {
- scan.setStartRow(Bytes.toBytes(start));
+ scan.withStartRow(Bytes.toBytes(start));
}
if (stop != null) {
scan.setStopRow(Bytes.toBytes(stop));
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
index a0483d53af5c..f3c14aa3e5d4 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
@@ -169,7 +169,7 @@ static void runTestMapreduce(Table table) throws IOException,
org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl trr =
new org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl();
Scan s = new Scan();
- s.setStartRow(Bytes.toBytes("aaa"));
+ s.withStartRow(Bytes.toBytes("aaa"));
s.setStopRow(Bytes.toBytes("zzz"));
s.addFamily(FAMILY);
trr.setScan(s);
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
index f2bc784409b7..268f388d3586 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
@@ -140,7 +140,7 @@ public TableScanResource getScanResource(
byte[] prefixBytes = Bytes.toBytes(prefix);
prefixFilter = new PrefixFilter(Bytes.toBytes(prefix));
if (startRow.isEmpty()) {
- tableScan.setStartRow(prefixBytes);
+ tableScan.withStartRow(prefixBytes);
}
}
if (LOG.isTraceEnabled()) {
@@ -154,7 +154,7 @@ public TableScanResource getScanResource(
tableScan.setMaxVersions(maxVersions);
tableScan.setTimeRange(startTime, endTime);
if (!startRow.isEmpty()) {
- tableScan.setStartRow(Bytes.toBytes(startRow));
+ tableScan.withStartRow(Bytes.toBytes(startRow));
}
tableScan.setStopRow(Bytes.toBytes(endRow));
for (String col : column) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java
index 4ff10c4a7349..3f9edc3de2fe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java
@@ -1317,7 +1317,7 @@ public void testJira6912() throws Exception {
TEST_UTIL.flush();
Scan scan = new Scan();
- scan.setStartRow(Bytes.toBytes(1));
+ scan.withStartRow(Bytes.toBytes(1));
scan.setStopRow(Bytes.toBytes(3));
scan.addColumn(FAMILY, FAMILY);
scan.setFilter(new RowFilter(CompareOperator.NOT_EQUAL,
@@ -2114,7 +2114,7 @@ private void reverseScanTest(Table table, boolean small) throws IOException {
scan = new Scan();
scan.setSmall(small);
scan.setReversed(true);
- scan.setStartRow(Bytes.toBytes("002"));
+ scan.withStartRow(Bytes.toBytes("002"));
try (ResultScanner scanner = table.getScanner(scan)) {
int count = 0;
byte[] lastRow = null;
@@ -2135,7 +2135,7 @@ private void reverseScanTest(Table table, boolean small) throws IOException {
scan = new Scan();
scan.setSmall(small);
scan.setReversed(true);
- scan.setStartRow(Bytes.toBytes("002"));
+ scan.withStartRow(Bytes.toBytes("002"));
scan.setStopRow(Bytes.toBytes("000"));
try (ResultScanner scanner = table.getScanner(scan)) {
int count = 0;
@@ -2157,7 +2157,7 @@ private void reverseScanTest(Table table, boolean small) throws IOException {
scan = new Scan();
scan.setSmall(small);
scan.setReversed(true);
- scan.setStartRow(Bytes.toBytes("001"));
+ scan.withStartRow(Bytes.toBytes("001"));
try (ResultScanner scanner = table.getScanner(scan)) {
int count = 0;
byte[] lastRow = null;
@@ -2178,7 +2178,7 @@ private void reverseScanTest(Table table, boolean small) throws IOException {
scan = new Scan();
scan.setSmall(small);
scan.setReversed(true);
- scan.setStartRow(Bytes.toBytes("000"));
+ scan.withStartRow(Bytes.toBytes("000"));
try (ResultScanner scanner = table.getScanner(scan)) {
int count = 0;
byte[] lastRow = null;
@@ -2199,7 +2199,7 @@ private void reverseScanTest(Table table, boolean small) throws IOException {
scan = new Scan();
scan.setSmall(small);
scan.setReversed(true);
- scan.setStartRow(Bytes.toBytes("006"));
+ scan.withStartRow(Bytes.toBytes("006"));
scan.setStopRow(Bytes.toBytes("002"));
try (ResultScanner scanner = table.getScanner(scan)) {
int count = 0;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 3200b1ac8e04..680add5b20d9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -289,7 +289,7 @@ public void testRegionScannerReseek() throws Exception {
Scan s = new Scan();
// set a start row
- s.setStartRow(ROWS_FOUR[1]);
+ s.withStartRow(ROWS_FOUR[1]);
RegionScanner scanner = region.getScanner(s);
// reseek to row three.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
index 64263da29c1c..f252e7738880 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java
@@ -156,7 +156,7 @@ private List getScanResult(byte[] startRow, byte[] stopRow, Table ht) thro
Scan scan = new Scan();
scan.setMaxVersions();
if(!Bytes.toString(startRow).isEmpty()) {
- scan.setStartRow(startRow);
+ scan.withStartRow(startRow);
}
if(!Bytes.toString(stopRow).isEmpty()) {
scan.setStopRow(stopRow);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
index e95e679eb760..33fd0f533dd8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
@@ -215,7 +215,7 @@ private void testScan(final int[] columnArr, final boolean lazySeekEnabled,
qualSet.add(qualStr);
}
scan.setMaxVersions(maxVersions);
- scan.setStartRow(rowBytes(startRow));
+ scan.withStartRow(rowBytes(startRow));
// Adjust for the fact that for multi-row queries the end row is exclusive.
{
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
index 29560fd630ab..3c548f10a736 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
@@ -452,7 +452,7 @@ public void testTagsWithAppendAndIncrement() throws Exception {
increment.setAttribute("visibility", Bytes.toBytes("tag2"));
table.increment(increment);
TestCoprocessorForTags.checkTagPresence = true;
- scanner = table.getScanner(new Scan().setStartRow(row2));
+ scanner = table.getScanner(new Scan().withStartRow(row2));
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
@@ -472,7 +472,7 @@ public void testTagsWithAppendAndIncrement() throws Exception {
append.addColumn(f, q, Bytes.toBytes("b"));
table.append(append);
TestCoprocessorForTags.checkTagPresence = true;
- scanner = table.getScanner(new Scan().setStartRow(row3));
+ scanner = table.getScanner(new Scan().withStartRow(row3));
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
@@ -486,7 +486,7 @@ public void testTagsWithAppendAndIncrement() throws Exception {
append.setAttribute("visibility", Bytes.toBytes("tag2"));
table.append(append);
TestCoprocessorForTags.checkTagPresence = true;
- scanner = table.getScanner(new Scan().setStartRow(row3));
+ scanner = table.getScanner(new Scan().withStartRow(row3));
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
@@ -510,7 +510,7 @@ public void testTagsWithAppendAndIncrement() throws Exception {
append.setAttribute("visibility", Bytes.toBytes("tag2"));
table.append(append);
TestCoprocessorForTags.checkTagPresence = true;
- scanner = table.getScanner(new Scan().setStartRow(row4));
+ scanner = table.getScanner(new Scan().withStartRow(row4));
result = scanner.next();
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
index f15aa3d287b9..d0445c5f1e12 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
@@ -241,7 +241,7 @@ public Object run() throws Exception {
@Override
public List run() throws Exception {
Scan scan = new Scan();
- scan.setStartRow(TEST_ROW);
+ scan.withStartRow(TEST_ROW);
scan.setStopRow(Bytes.add(TEST_ROW, new byte[]{ 0 } ));
scan.addFamily(TEST_FAMILY);
Connection connection = ConnectionFactory.createConnection(conf);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
index 62f186d0459c..74fa3e8e8fe5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
@@ -973,7 +973,7 @@ public Object run() throws Exception {
@Override
public List run() throws Exception {
Scan scan = new Scan();
- scan.setStartRow(TEST_ROW);
+ scan.withStartRow(TEST_ROW);
scan.setStopRow(Bytes.add(TEST_ROW, new byte[]{ 0 } ));
scan.addFamily(TEST_FAMILY);
Connection connection = ConnectionFactory.createConnection(conf);
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
index d7a6945ab554..25045cc01752 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
@@ -883,7 +883,7 @@ public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
- scan.setStartRow(tScan.getStartRow());
+ scan.withStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
@@ -1161,7 +1161,7 @@ private Result getReverseScanResult(byte[] tableName, byte[] row, byte[] family)
Scan scan = new Scan(row);
scan.setReversed(true);
scan.addFamily(family);
- scan.setStartRow(row);
+ scan.withStartRow(row);
try (Table table = getTable(tableName);
ResultScanner scanner = table.getScanner(scan)) {
return scanner.next();
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
index 8371d9cebd00..7a49b512c91a 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
@@ -513,7 +513,7 @@ public static Scan scanFromThrift(TScan in) throws IOException {
Scan out = new Scan();
if (in.isSetStartRow()) {
- out.setStartRow(in.getStartRow());
+ out.withStartRow(in.getStartRow());
}
if (in.isSetStopRow()) {
out.setStopRow(in.getStopRow());
| | |