Skip to content

Commit

Permalink
Merge pull request #1522 from phac-nml/dependabot/gradle/com.azure-az…
Browse files Browse the repository at this point in the history
…ure-storage-blob-12.28.1

chore(deps): bump com.azure:azure-storage-blob from 12.18.0 to 12.28.1
  • Loading branch information
ericenns authored Nov 13, 2024
2 parents ceee29a + ce1d1e7 commit f3cf4f1
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 12 deletions.
2 changes: 1 addition & 1 deletion build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ dependencies {
}

// Microsoft Azure
implementation("com.azure:azure-storage-blob:12.18.0") {
implementation("com.azure:azure-storage-blob:12.28.1") {
exclude(group = "jakarta.xml.bind", module = "jakarta.xml.bind-api")
exclude(group = "jakarta.activation", module = "jakarta.activation-api")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.channels.FileChannel;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
Expand All @@ -15,6 +16,7 @@
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;

import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
Expand Down Expand Up @@ -252,8 +254,12 @@ public boolean fileExists(Path file) {
@Override
public InputStream getFileInputStream(Path file) {
try {
S3Object s3Object = s3.getObject(bucketName, getAwsFileAbsolutePath(file));
return s3Object.getObjectContent();
if (getFileSizeBytes(file) > 0L) {
S3Object s3Object = s3.getObject(bucketName, getAwsFileAbsolutePath(file));
return s3Object.getObjectContent();
} else {
return IOUtils.toInputStream("", Charset.defaultCharset());
}
} catch (AmazonServiceException e) {
logger.error("Couldn't read file from s3 bucket [" + e + "]");
throw new StorageException("Unable to locate file in s3 bucket", e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.channels.FileChannel;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
Expand All @@ -13,6 +14,7 @@
import java.util.Optional;
import java.util.zip.GZIPInputStream;

import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
Expand Down Expand Up @@ -52,7 +54,8 @@ public IridaFileStorageAzureUtilityImpl(boolean deleteFromFilesystem, String con
}

/*
This instantiation method should only be used for testing. DO NOT USE IN PRODUCTION. USE THE SAS TOKEN METHOD ABOVE FOR PRODUCTION.
* This instantiation method should only be used for testing. DO NOT USE IN
* PRODUCTION. USE THE SAS TOKEN METHOD ABOVE FOR PRODUCTION.
*/
public IridaFileStorageAzureUtilityImpl(boolean deleteFromFilesystem, String url,
StorageSharedKeyCredential storageSharedKeyCredential, String containerName) {
Expand Down Expand Up @@ -152,8 +155,9 @@ public void writeFile(Path source, Path target, Path sequenceFileDir, Path seque
try {
logger.trace("Uploading file to azure: [" + target.getFileName() + "]");

// Upload the file in blocks rather than all at once to prevent a timeout if the file is large.
int blockSize = 2 * 1024 * 1024; //2MB
// Upload the file in blocks rather than all at once to prevent a timeout if the
// file is large.
int blockSize = 2 * 1024 * 1024; // 2MB
ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions(blockSize, 8, null);
blobClient.uploadFromFile(source.toString(), parallelTransferOptions, new BlobHttpHeaders(), null,
AccessTier.HOT, new BlobRequestConditions(), Duration.ofMinutes(10));
Expand Down Expand Up @@ -247,7 +251,11 @@ public InputStream getFileInputStream(Path file) {
logger.trace("Opening input stream to file on azure [" + file.toString() + "]");
BlobClient blobClient = containerClient.getBlobClient(getAzureFileAbsolutePath(file));
try {
return blobClient.openInputStream();
if (getFileSizeBytes(file) > 0L) {
return blobClient.openInputStream();
} else {
return IOUtils.toInputStream("", Charset.defaultCharset());
}
} catch (BlobStorageException e) {
logger.error("Couldn't get file input stream from azure [" + e + "]");
throw new StorageException("Couldn't get file input stream from azure", e);
Expand All @@ -262,8 +270,8 @@ public boolean isGzipped(Path file) throws IOException {
try (InputStream is = getFileInputStream(file)) {
byte[] bytes = new byte[2];
is.read(bytes);
return ((bytes[0] == (byte) (GZIPInputStream.GZIP_MAGIC)) && (bytes[1] == (byte) (GZIPInputStream.GZIP_MAGIC
>> 8)));
return ((bytes[0] == (byte) (GZIPInputStream.GZIP_MAGIC))
&& (bytes[1] == (byte) (GZIPInputStream.GZIP_MAGIC >> 8)));
}
}

Expand Down Expand Up @@ -347,7 +355,8 @@ public byte[] readAllBytes(Path file) {
* Removes the leading "/" from the absolute path returns the rest of the path.
*
* @param file The path to the file
* @return the absolute file path with the preceding slash stripped off it path includes it
* @return the absolute file path with the preceding slash stripped off it path
* includes it
*/
private String getAzureFileAbsolutePath(Path file) {
String absolutePath = file.toAbsolutePath().toString();
Expand Down Expand Up @@ -382,8 +391,10 @@ public Long getFileSizeBytes(Path file) {
public FileChunkResponse readChunk(Path file, Long seek, Long chunk) {
BlobClient blobClient = containerClient.getBlobClient(getAzureFileAbsolutePath(file));
/*
The range of bytes to read. Start at seek and get `chunk` amount of bytes from seek point.
However a smaller amount of bytes may be read, so we set the file pointer accordingly
* The range of bytes to read. Start at seek and get `chunk` amount of bytes
* from seek point.
* However a smaller amount of bytes may be read, so we set the file pointer
* accordingly
*/
BlobRange blobRange = new BlobRange(seek, chunk);
try (BlobInputStream blobInputStream = blobClient.openInputStream(blobRange, null)) {
Expand Down

0 comments on commit f3cf4f1

Please sign in to comment.