Skip to content

Commit

Permalink
change from splits to split in batch
Browse files Browse the repository at this point in the history
Signed-off-by: Peng Huo <[email protected]>
  • Loading branch information
penghuo committed Nov 8, 2022
1 parent 315c39a commit e2e3cd8
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

package org.opensearch.sql.executor.streaming;

import java.util.List;
import lombok.Data;
import org.opensearch.sql.storage.split.Split;

Expand All @@ -14,5 +13,5 @@
*/
@Data
public class Batch {
private final List<Split> splits;
private final Split split;
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import com.google.common.collect.Sets;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
Expand Down Expand Up @@ -101,6 +100,6 @@ public Batch getBatch(Optional<Offset> start, Offset end) {
.collect(Collectors.toSet());

log.debug("fetch files {} with id from: {} to: {}.", paths, start, end);
return new Batch(Collections.singletonList(new FileSystemSplit(paths)));
return new Batch(new FileSystemSplit(paths));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
Expand Down Expand Up @@ -154,10 +153,9 @@ StreamSource batchInBetween(Long start, Long end, String... uris) {

private StreamSource internalBatchInBetween(
Optional<Offset> start, Offset end, String... uris) {
List<Split> splits = streamSource.getBatch(start, end).getSplits();
assertEquals(1, splits.size());
Split split = streamSource.getBatch(start, end).getSplit();
assertThat(
((FileSystemSplit) splits.get(0)).getPaths(),
((FileSystemSplit) split).getPaths(),
containsInAnyOrder(
Arrays.stream(uris)
.map(name -> new org.apache.hadoop.fs.Path(perTestTempDir.resolve(name).toUri()))
Expand Down

0 comments on commit e2e3cd8

Please sign in to comment.