Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Optimize GridFS throughput by removing redundant byte array cloning. #1402

Merged
merged 5 commits into from
Jun 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ final class GridFSBucketImpl implements GridFSBucket {
private final String bucketName;
private final int chunkSizeBytes;
private final MongoCollection<GridFSFile> filesCollection;
private final MongoCollection<Document> chunksCollection;
private final MongoCollection<BsonDocument> chunksCollection;
private volatile boolean checkedIndexes;

GridFSBucketImpl(final MongoDatabase database) {
Expand All @@ -71,7 +71,7 @@ final class GridFSBucketImpl implements GridFSBucket {
}

GridFSBucketImpl(final String bucketName, final int chunkSizeBytes, final MongoCollection<GridFSFile> filesCollection,
final MongoCollection<Document> chunksCollection) {
final MongoCollection<BsonDocument> chunksCollection) {
this.bucketName = notNull("bucketName", bucketName);
this.chunkSizeBytes = chunkSizeBytes;
this.filesCollection = notNull("filesCollection", filesCollection);
Expand Down Expand Up @@ -459,8 +459,8 @@ private static MongoCollection<GridFSFile> getFilesCollection(final MongoDatabas
);
}

private static MongoCollection<Document> getChunksCollection(final MongoDatabase database, final String bucketName) {
return database.getCollection(bucketName + ".chunks").withCodecRegistry(MongoClientSettings.getDefaultCodecRegistry());
private static MongoCollection<BsonDocument> getChunksCollection(final MongoDatabase database, final String bucketName) {
return database.getCollection(bucketName + ".chunks", BsonDocument.class).withCodecRegistry(MongoClientSettings.getDefaultCodecRegistry());
}

private void checkCreateIndex(@Nullable final ClientSession clientSession) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,10 @@
import com.mongodb.client.MongoCursor;
import com.mongodb.client.gridfs.model.GridFSFile;
import com.mongodb.lang.Nullable;
import org.bson.BsonBinary;
import org.bson.BsonDocument;
import org.bson.BsonInt32;
import org.bson.BsonValue;
import org.bson.Document;
import org.bson.types.Binary;

import java.util.concurrent.locks.ReentrantLock;

Expand All @@ -37,12 +38,12 @@
class GridFSDownloadStreamImpl extends GridFSDownloadStream {
private final ClientSession clientSession;
private final GridFSFile fileInfo;
private final MongoCollection<Document> chunksCollection;
private final MongoCollection<BsonDocument> chunksCollection;
private final BsonValue fileId;
private final long length;
private final int chunkSizeInBytes;
private final int numberOfChunks;
private MongoCursor<Document> cursor;
private MongoCursor<BsonDocument> cursor;
private int batchSize;
private int chunkIndex;
private int bufferOffset;
Expand All @@ -55,10 +56,10 @@ class GridFSDownloadStreamImpl extends GridFSDownloadStream {
private boolean closed = false;

GridFSDownloadStreamImpl(@Nullable final ClientSession clientSession, final GridFSFile fileInfo,
final MongoCollection<Document> chunksCollection) {
final MongoCollection<BsonDocument> chunksCollection) {
this.clientSession = clientSession;
this.fileInfo = notNull("file information", fileInfo);
this.chunksCollection = notNull("chunks collection", chunksCollection);
this.chunksCollection = notNull("chunks collection", chunksCollection);

fileId = fileInfo.getId();
length = fileInfo.getLength();
Expand Down Expand Up @@ -213,17 +214,17 @@ private void discardCursor() {
}

@Nullable
private Document getChunk(final int startChunkIndex) {
private BsonDocument getChunk(final int startChunkIndex) {
if (cursor == null) {
cursor = getCursor(startChunkIndex);
}
Document chunk = null;
BsonDocument chunk = null;
if (cursor.hasNext()) {
chunk = cursor.next();
if (batchSize == 1) {
discardCursor();
}
if (chunk.getInteger("n") != startChunkIndex) {
if (chunk.getInt32("n").getValue() != startChunkIndex) {
throw new MongoGridFSException(format("Could not find file chunk for file_id: %s at chunk index %s.",
fileId, startChunkIndex));
}
Expand All @@ -232,28 +233,28 @@ private Document getChunk(final int startChunkIndex) {
return chunk;
}

private MongoCursor<Document> getCursor(final int startChunkIndex) {
FindIterable<Document> findIterable;
Document filter = new Document("files_id", fileId).append("n", new Document("$gte", startChunkIndex));
private MongoCursor<BsonDocument> getCursor(final int startChunkIndex) {
FindIterable<BsonDocument> findIterable;
BsonDocument filter = new BsonDocument("files_id", fileId).append("n", new BsonDocument("$gte", new BsonInt32(startChunkIndex)));
if (clientSession != null) {
findIterable = chunksCollection.find(clientSession, filter);
} else {
findIterable = chunksCollection.find(filter);
}
return findIterable.batchSize(batchSize).sort(new Document("n", 1)).iterator();
return findIterable.batchSize(batchSize).sort(new BsonDocument("n", new BsonInt32(1))).iterator();
}

private byte[] getBufferFromChunk(@Nullable final Document chunk, final int expectedChunkIndex) {
private byte[] getBufferFromChunk(@Nullable final BsonDocument chunk, final int expectedChunkIndex) {

if (chunk == null || chunk.getInteger("n") != expectedChunkIndex) {
if (chunk == null || chunk.getInt32("n").getValue() != expectedChunkIndex) {
throw new MongoGridFSException(format("Could not find file chunk for file_id: %s at chunk index %s.",
fileId, expectedChunkIndex));
}

if (!(chunk.get("data") instanceof Binary)) {
if (!(chunk.get("data") instanceof BsonBinary)) {
throw new MongoGridFSException("Unexpected data format for the chunk");
}
byte[] data = chunk.get("data", Binary.class).getData();
byte[] data = chunk.getBinary("data").getData();

long expectedDataLength = 0;
boolean extraChunk = false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@
import com.mongodb.client.MongoCollection;
import com.mongodb.client.gridfs.model.GridFSFile;
import com.mongodb.lang.Nullable;
import org.bson.BsonBinary;
import org.bson.BsonDocument;
import org.bson.BsonInt32;
import org.bson.BsonValue;
import org.bson.Document;
import org.bson.types.Binary;
import org.bson.types.ObjectId;

import java.util.Date;
Expand All @@ -35,7 +37,7 @@
final class GridFSUploadStreamImpl extends GridFSUploadStream {
private final ClientSession clientSession;
private final MongoCollection<GridFSFile> filesCollection;
private final MongoCollection<Document> chunksCollection;
private final MongoCollection<BsonDocument> chunksCollection;
private final BsonValue fileId;
private final String filename;
private final int chunkSizeBytes;
Expand All @@ -49,7 +51,7 @@ final class GridFSUploadStreamImpl extends GridFSUploadStream {
private boolean closed = false;

GridFSUploadStreamImpl(@Nullable final ClientSession clientSession, final MongoCollection<GridFSFile> filesCollection,
final MongoCollection<Document> chunksCollection, final BsonValue fileId, final String filename,
final MongoCollection<BsonDocument> chunksCollection, final BsonValue fileId, final String filename,
final int chunkSizeBytes, @Nullable final Document metadata) {
this.clientSession = clientSession;
this.filesCollection = notNull("files collection", filesCollection);
Expand Down Expand Up @@ -160,23 +162,23 @@ public void close() {
private void writeChunk() {
if (bufferOffset > 0) {
if (clientSession != null) {
chunksCollection.insertOne(clientSession, new Document("files_id", fileId).append("n", chunkIndex)
chunksCollection.insertOne(clientSession, new BsonDocument("files_id", fileId).append("n", new BsonInt32(chunkIndex))
.append("data", getData()));
} else {
chunksCollection.insertOne(new Document("files_id", fileId).append("n", chunkIndex).append("data", getData()));
chunksCollection.insertOne(new BsonDocument("files_id", fileId).append("n", new BsonInt32(chunkIndex)).append("data", getData()));
}
chunkIndex++;
bufferOffset = 0;
}
}

private Binary getData() {
private BsonBinary getData() {
if (bufferOffset < chunkSizeBytes) {
byte[] sizedBuffer = new byte[bufferOffset];
System.arraycopy(buffer, 0, sizedBuffer, 0, bufferOffset);
buffer = sizedBuffer;
}
return new Binary(buffer);
return new BsonBinary(buffer);
}

private void checkClosed() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,13 @@ import com.mongodb.client.result.DeleteResult
import com.mongodb.client.result.UpdateResult
import com.mongodb.internal.operation.BatchCursor
import com.mongodb.internal.operation.FindOperation
import org.bson.BsonBinary
import org.bson.BsonDocument
import org.bson.BsonInt32
import org.bson.BsonObjectId
import org.bson.BsonString
import org.bson.Document
import org.bson.codecs.DocumentCodecProvider
import org.bson.types.Binary
import org.bson.types.ObjectId
import spock.lang.Specification
import spock.lang.Unroll
Expand Down Expand Up @@ -327,7 +328,9 @@ class GridFSBucketSpecification extends Specification {
def findIterable = Mock(FindIterable)
def filesCollection = Mock(MongoCollection)
def tenBytes = new byte[10]
def chunkDocument = new Document('files_id', fileInfo.getId()).append('n', 0).append('data', new Binary(tenBytes))
def chunkDocument = new BsonDocument('files_id', fileInfo.getId())
.append('n', new BsonInt32(0))
.append('data', new BsonBinary(tenBytes))
def chunksCollection = Mock(MongoCollection)
def gridFSBucket = new GridFSBucketImpl('fs', 255, filesCollection, chunksCollection)
def outputStream = new ByteArrayOutputStream(10)
Expand All @@ -346,7 +349,7 @@ class GridFSBucketSpecification extends Specification {
} else {
1 * filesCollection.find() >> findIterable
}
1 * findIterable.filter(new Document('_id', bsonFileId)) >> findIterable
1 * findIterable.filter(new BsonDocument('_id', bsonFileId)) >> findIterable
1 * findIterable.first() >> fileInfo

then:
Expand Down Expand Up @@ -376,7 +379,9 @@ class GridFSBucketSpecification extends Specification {
def findIterable = Mock(FindIterable)
def filesCollection = Mock(MongoCollection)
def tenBytes = new byte[10]
def chunkDocument = new Document('files_id', fileInfo.getId()).append('n', 0).append('data', new Binary(tenBytes))
def chunkDocument = new BsonDocument('files_id', fileInfo.getId())
.append('n', new BsonInt32(0))
.append('data', new BsonBinary(tenBytes))
def chunksCollection = Mock(MongoCollection)
def gridFSBucket = new GridFSBucketImpl('fs', 255, filesCollection, chunksCollection)
def outputStream = new ByteArrayOutputStream(10)
Expand All @@ -395,7 +400,7 @@ class GridFSBucketSpecification extends Specification {
} else {
1 * filesCollection.find() >> findIterable
}
1 * findIterable.filter(new Document('_id', bsonFileId)) >> findIterable
1 * findIterable.filter(new BsonDocument('_id', bsonFileId)) >> findIterable
1 * findIterable.first() >> fileInfo

then:
Expand Down Expand Up @@ -424,11 +429,13 @@ class GridFSBucketSpecification extends Specification {
def bsonFileId = new BsonObjectId(fileId)
def fileInfo = new GridFSFile(bsonFileId, filename, 10, 255, new Date(), new Document())
def mongoCursor = Mock(MongoCursor)
def findIterable = Mock(FindIterable)
def gridFsFileFindIterable = Mock(FindIterable)
def findChunkIterable = Mock(FindIterable)
def filesCollection = Mock(MongoCollection)
def tenBytes = new byte[10]
def chunkDocument = new Document('files_id', fileInfo.getId()).append('n', 0).append('data', new Binary(tenBytes))
def chunkDocument = new BsonDocument('files_id', fileInfo.getId())
.append('n', new BsonInt32(0))
.append('data', new BsonBinary(tenBytes))
def chunksCollection = Mock(MongoCollection)
def gridFSBucket = new GridFSBucketImpl('fs', 255, filesCollection, chunksCollection)
def outputStream = new ByteArrayOutputStream(10)
Expand All @@ -443,14 +450,14 @@ class GridFSBucketSpecification extends Specification {

then:
if (clientSession != null) {
1 * filesCollection.find(clientSession) >> findIterable
1 * filesCollection.find(clientSession) >> gridFsFileFindIterable
} else {
1 * filesCollection.find() >> findIterable
1 * filesCollection.find() >> gridFsFileFindIterable
}
1 * findIterable.filter(new Document('filename', filename)) >> findIterable
1 * findIterable.skip(_) >> findIterable
1 * findIterable.sort(_) >> findIterable
1 * findIterable.first() >> fileInfo
1 * gridFsFileFindIterable.filter(new Document('filename', filename)) >> gridFsFileFindIterable
1 * gridFsFileFindIterable.skip(_) >> gridFsFileFindIterable
1 * gridFsFileFindIterable.sort(_) >> gridFsFileFindIterable
1 * gridFsFileFindIterable.first() >> fileInfo

if (clientSession != null) {
1 * chunksCollection.find(clientSession, _) >> findChunkIterable
Expand Down
Loading