diff --git a/.travis.yml b/.travis.yml index ab421366db6a..c023116917a3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,9 +10,6 @@ before_install: install: mvn install -DskipTests=true -Dgpg.skip=true script: - utilities/verify.sh -branches: - only: - - master after_success: - utilities/after_success.sh env: diff --git a/README.md b/README.md index 5ed44e91adfd..f6104e64a9d3 100644 --- a/README.md +++ b/README.md @@ -25,16 +25,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java - 0.0.11 + 0.0.12 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java:jar:0.0.11' +compile 'com.google.gcloud:gcloud-java:jar:0.0.12' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.0.11" +libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.0.12" ``` Example Applications @@ -69,13 +69,15 @@ Most `gcloud-java` libraries require a project ID. There are multiple ways to s 1. Project ID supplied when building the service options 2. Project ID specified by the environment variable `GCLOUD_PROJECT` 3. App Engine project ID -4. Compute Engine project ID -5. Google Cloud SDK project ID +4. Google Cloud SDK project ID +5. Compute Engine project ID Authentication -------------- -There are multiple ways to authenticate to use Google Cloud services. +First, ensure that the necessary Google Cloud APIs are enabled for your project. To do this, follow the instructions on the [authentication document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/authentication/readme.md#authentication) shared by all the gcloud language libraries. + +Next, choose a method for authenticating API requests from within your project: 1. When using `gcloud-java` libraries from within Compute/App Engine, no additional authentication steps are necessary. 2. When using `gcloud-java` libraries elsewhere, there are two options: @@ -164,8 +166,7 @@ import com.google.gcloud.storage.StorageOptions; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; -StorageOptions options = StorageOptions.builder().projectId("project").build(); -Storage storage = options.service(); +Storage storage = StorageOptions.defaultInstance().service(); BlobId blobId = BlobId.of("bucket", "blob_name"); Blob blob = Blob.load(storage, blobId); if (blob == null) { @@ -181,6 +182,11 @@ if (blob == null) { } ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -207,7 +213,7 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. @@ -218,7 +224,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md -[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md [cloud-platform]: https://cloud.google.com/ diff --git a/RELEASING.md b/RELEASING.md index 419f723fe328..59b6559b9f95 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -10,13 +10,15 @@ This script takes an optional argument denoting the new version. By default, if 2. Create a PR to update the pom.xml version. The PR should look something like [#225](https://github.com/GoogleCloudPlatform/gcloud-java/pull/225). After this PR is merged into GoogleCloudPlatform/gcloud-java, Travis CI will push a new website to GoogleCloudPlatform/gh-pages, push a new artifact to the Maven Central Repository, and update versions in the README files. -3. Create a release on Github manually. +3. Before moving on, verify that the artifacts have successfully been pushed to the Maven Central Repository. Open Travis CI, click the ["Build History" tab](https://travis-ci.org/GoogleCloudPlatform/gcloud-java/builds), and open the second build's logs for Step 2's PR. Be sure that you are not opening the "Pull Request" build logs. When the build finishes, scroll to the end of the log and verify that the artifacts were successfully staged and deployed. You can also search for `gcloud-java` on the [Sonatype website](https://oss.sonatype.org/#nexus-search;quick~gcloud-java) and check the latest version number. If the deployment didn't succeed because of a flaky test, rerun the build. + +4. Create a release on Github manually. Go to the [releases page](https://github.com/GoogleCloudPlatform/gcloud-java/releases) and click "Draft a new release." Use `vX.Y.Z` as the "Tag Version" and `X.Y.Z` as the "Release Title", where `X.Y.Z` is the release version as listed in the `pom.xml` files. -4. Run `utilities/update_pom_version.sh` again (to include "-SNAPSHOT" in the project version). +5. Run `utilities/update_pom_version.sh` again (to include "-SNAPSHOT" in the project version). As mentioned before, there is an optional version argument. By default, the script will update the version from "X.Y.Z" to "X.Y.Z+1-SNAPSHOT". Suppose a different version is desired, for example X+1.0.0-SNAPSHOT. Then the appropriate command to run would be `utilities/update_pom_version.sh X+1.0.0-SNAPSHOT`. -5. Create and merge in another PR to reflect the updated project version. For an example of what this PR should look like, see [#227](https://github.com/GoogleCloudPlatform/gcloud-java/pull/227). +6. Create and merge in another PR to reflect the updated project version. For an example of what this PR should look like, see [#227](https://github.com/GoogleCloudPlatform/gcloud-java/pull/227). ### To push a snapshot version diff --git a/gcloud-java-core/README.md b/gcloud-java-core/README.md index f84fb33993e7..96b7a4e82021 100644 --- a/gcloud-java-core/README.md +++ b/gcloud-java-core/README.md @@ -17,18 +17,23 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-core - 0.0.11 + 0.0.12 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-core:jar:0.0.11' +compile 'com.google.gcloud:gcloud-java-core:jar:0.0.12' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-core" % "0.0.11" +libraryDependencies += "com.google.gcloud" % "gcloud-java-core" % "0.0.12" ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -39,7 +44,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. Versioning ---------- @@ -57,5 +64,6 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [cloud-platform]: https://cloud.google.com/ diff --git a/gcloud-java-core/pom.xml b/gcloud-java-core/pom.xml index e13933bd2beb..c0248004d335 100644 --- a/gcloud-java-core/pom.xml +++ b/gcloud-java-core/pom.xml @@ -11,7 +11,7 @@ com.google.gcloud gcloud-java-pom - 0.0.12-SNAPSHOT + 0.0.13-SNAPSHOT gcloud-java-core @@ -20,12 +20,18 @@ com.google.auth google-auth-library-credentials - 0.1.0 + 0.3.1 com.google.auth google-auth-library-oauth2-http - 0.1.0 + 0.3.1 + + + com.google.guava + guava-jdk5 + + com.google.http-client diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java b/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java index 800fcf340689..3303e4f8a652 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java @@ -19,20 +19,17 @@ import static com.google.common.base.Preconditions.checkNotNull; import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.compute.ComputeCredential; import com.google.api.client.googleapis.extensions.appengine.auth.oauth2.AppIdentityCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpTransport; -import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.client.json.jackson.JacksonFactory; import com.google.auth.http.HttpCredentialsAdapter; import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ServiceAccountCredentials; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; -import java.security.GeneralSecurityException; import java.security.PrivateKey; import java.util.Objects; import java.util.Set; @@ -163,56 +160,7 @@ public RestorableState capture() { } } - private static class ComputeEngineAuthCredentials extends AuthCredentials { - - private ComputeCredential computeCredential; - - private static final ComputeEngineAuthCredentialsState STATE = - new ComputeEngineAuthCredentialsState(); - - private static class ComputeEngineAuthCredentialsState - implements RestorableState, Serializable { - - private static final long serialVersionUID = -6168594072854417404L; - - @Override - public AuthCredentials restore() { - try { - return new ComputeEngineAuthCredentials(); - } catch (IOException | GeneralSecurityException e) { - throw new IllegalStateException( - "Could not restore " + ComputeEngineAuthCredentials.class.getSimpleName(), e); - } - } - - @Override - public int hashCode() { - return getClass().getName().hashCode(); - } - - @Override - public boolean equals(Object obj) { - return obj instanceof ComputeEngineAuthCredentialsState; - } - } - - ComputeEngineAuthCredentials() throws IOException, GeneralSecurityException { - computeCredential = getComputeCredential(); - } - - @Override - protected HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes) { - return computeCredential; - } - - @Override - public RestorableState capture() { - return STATE; - } - } - - private static class ApplicationDefaultAuthCredentials extends AuthCredentials { + public static class ApplicationDefaultAuthCredentials extends AuthCredentials { private GoogleCredentials googleCredentials; @@ -255,6 +203,15 @@ protected HttpRequestInitializer httpRequestInitializer(HttpTransport transport, return new HttpCredentialsAdapter(googleCredentials.createScoped(scopes)); } + public ServiceAccountAuthCredentials toServiceAccountCredentials() { + if (googleCredentials instanceof ServiceAccountCredentials) { + ServiceAccountCredentials credentials = (ServiceAccountCredentials) googleCredentials; + return new ServiceAccountAuthCredentials(credentials.getClientEmail(), + credentials.getPrivateKey()); + } + return null; + } + @Override public RestorableState capture() { return STATE; @@ -268,11 +225,6 @@ public static AuthCredentials createForAppEngine() { return AppEngineAuthCredentials.INSTANCE; } - public static AuthCredentials createForComputeEngine() - throws IOException, GeneralSecurityException { - return new ComputeEngineAuthCredentials(); - } - /** * Returns the Application Default Credentials. * @@ -327,13 +279,4 @@ public static ServiceAccountAuthCredentials createForJson(InputStream jsonCreden public static AuthCredentials noCredentials() { return ServiceAccountAuthCredentials.NO_CREDENTIALS; } - - static ComputeCredential getComputeCredential() throws IOException, GeneralSecurityException { - NetHttpTransport transport = GoogleNetHttpTransport.newTrustedTransport(); - // Try to connect using Google Compute Engine service account credentials. - ComputeCredential credential = new ComputeCredential(transport, new JacksonFactory()); - // Force token refresh to detect if we are running on Google Compute Engine. - credential.refreshToken(); - return credential; - } } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java index 0793470ade83..25fda29c363d 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java @@ -109,12 +109,6 @@ public HttpTransport create() { // Maybe not on App Engine } } - // Consider Compute - try { - return AuthCredentials.getComputeCredential().getTransport(); - } catch (Exception e) { - // Maybe not on GCE - } return new NetHttpTransport(); } } @@ -342,7 +336,7 @@ protected boolean projectIdRequired() { } private static AuthCredentials defaultAuthCredentials() { - // Consider App Engine. This will not be needed once issue #21 is fixed. + // Consider App Engine. if (appEngineAppId() != null) { try { return AuthCredentials.createForAppEngine(); @@ -354,16 +348,8 @@ private static AuthCredentials defaultAuthCredentials() { try { return AuthCredentials.createApplicationDefaults(); } catch (Exception ex) { - // fallback to old-style + return AuthCredentials.noCredentials(); } - - // Consider old-style Compute. This will not be needed once issue #21 is fixed. - try { - return AuthCredentials.createForComputeEngine(); - } catch (Exception ignore) { - // Maybe not on GCE - } - return AuthCredentials.noCredentials(); } protected static String appEngineAppId() { @@ -383,19 +369,6 @@ protected String defaultProject() { } protected static String googleCloudProjectId() { - try { - URL url = new URL("http://metadata/computeMetadata/v1/project/project-id"); - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestProperty("X-Google-Metadata-Request", "True"); - InputStream input = connection.getInputStream(); - if (connection.getResponseCode() == 200) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, UTF_8))) { - return reader.readLine(); - } - } - } catch (IOException ignore) { - // ignore - } File configDir; if (System.getenv().containsKey("CLOUDSDK_CONFIG")) { configDir = new File(System.getenv("CLOUDSDK_CONFIG")); @@ -404,38 +377,52 @@ protected static String googleCloudProjectId() { } else { configDir = new File(System.getProperty("user.home"), ".config/gcloud"); } - FileReader fileReader; + FileReader fileReader = null; try { fileReader = new FileReader(new File(configDir, "configurations/config_default")); } catch (FileNotFoundException newConfigFileNotFoundEx) { try { fileReader = new FileReader(new File(configDir, "properties")); } catch (FileNotFoundException oldConfigFileNotFoundEx) { - // return null if we can't find config file - return null; + // ignore } } - try (BufferedReader reader = new BufferedReader(fileReader)) { - String line; - String section = null; - Pattern projectPattern = Pattern.compile("^project\\s*=\\s*(.*)$"); - Pattern sectionPattern = Pattern.compile("^\\[(.*)\\]$"); - while ((line = reader.readLine()) != null) { - if (line.isEmpty() || line.startsWith(";")) { - continue; - } - line = line.trim(); - Matcher matcher = sectionPattern.matcher(line); - if (matcher.matches()) { - section = matcher.group(1); - } else if (section == null || section.equals("core")) { - matcher = projectPattern.matcher(line); + if (fileReader != null) { + try (BufferedReader reader = new BufferedReader(fileReader)) { + String line; + String section = null; + Pattern projectPattern = Pattern.compile("^project\\s*=\\s*(.*)$"); + Pattern sectionPattern = Pattern.compile("^\\[(.*)\\]$"); + while ((line = reader.readLine()) != null) { + if (line.isEmpty() || line.startsWith(";")) { + continue; + } + line = line.trim(); + Matcher matcher = sectionPattern.matcher(line); if (matcher.matches()) { - return matcher.group(1); + section = matcher.group(1); + } else if (section == null || section.equals("core")) { + matcher = projectPattern.matcher(line); + if (matcher.matches()) { + return matcher.group(1); + } } } + } catch (IOException ex) { + // ignore + } + } + try { + URL url = new URL("http://metadata/computeMetadata/v1/project/project-id"); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestProperty("X-Google-Metadata-Request", "True"); + InputStream input = connection.getInputStream(); + if (connection.getResponseCode() == 200) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, UTF_8))) { + return reader.readLine(); + } } - } catch (IOException ex) { + } catch (IOException ignore) { // ignore } // return null if can't determine @@ -450,9 +437,11 @@ protected static String getAppEngineProjectId() { try { Class factoryClass = Class.forName("com.google.appengine.api.appidentity.AppIdentityServiceFactory"); + Class serviceClass = + Class.forName("com.google.appengine.api.appidentity.AppIdentityService"); Method method = factoryClass.getMethod("getAppIdentityService"); Object appIdentityService = method.invoke(null); - method = appIdentityService.getClass().getMethod("getServiceAccountName"); + method = serviceClass.getMethod("getServiceAccountName"); String serviceAccountName = (String) method.invoke(appIdentityService); int indexOfAtSign = serviceAccountName.indexOf('@'); return serviceAccountName.substring(0, indexOfAtSign); diff --git a/gcloud-java-datastore/README.md b/gcloud-java-datastore/README.md index 6d9fc0e8c4d6..aeaae2b621c6 100644 --- a/gcloud-java-datastore/README.md +++ b/gcloud-java-datastore/README.md @@ -20,16 +20,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-datastore - 0.0.11 + 0.0.12 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-datastore:jar:0.0.11' +compile 'com.google.gcloud:gcloud-java-datastore:jar:0.0.12' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-datastore" % "0.0.11" +libraryDependencies += "com.google.gcloud" % "gcloud-java-datastore" % "0.0.12" ``` Example Application @@ -56,36 +56,146 @@ Cloud Datastore for your project. See the ``gcloud-java`` API [datastore documentation][datastore-api] to learn how to interact with the Cloud Datastore using this Client Library. -Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you must [supply credentials](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) and a project ID if running this snippet elsewhere. +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a [Google Developers Console](https://console.developers.google.com/) project with the Datastore API enabled. [Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your project set up. You will also need to set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: `gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-datastore` library. See the [Quickstart](#quickstart) section to add `gcloud-java-datastore` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud Datastore, you must create a service object with credentials. You can then make API calls by calling methods on the Datastore service object. The simplest way to authenticate is to use [Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). These credentials are automatically inferred from your environment, so you only need the following code to create your service object: ```java import com.google.gcloud.datastore.Datastore; import com.google.gcloud.datastore.DatastoreOptions; -import com.google.gcloud.datastore.DateTime; + +Datastore datastore = DatastoreOptions.defaultInstance().service(); +``` + +For other authentication options, see the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Storing data +Objects in Datastore are known as entities. Entities are grouped by "kind" and have keys for easy access. In this code snippet, we will create a new entity representing a person and store that data by the person's name. First, add the following imports at the top of your file: + +```java import com.google.gcloud.datastore.Entity; import com.google.gcloud.datastore.Key; import com.google.gcloud.datastore.KeyFactory; +``` -Datastore datastore = DatastoreOptions.defaultInstance().service(); -KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND); -Key key = keyFactory.newKey(keyName); -Entity entity = datastore.get(key); -if (entity == null) { - entity = Entity.builder(key) - .set("name", "John Do") - .set("age", 30) - .set("access_time", DateTime.now()) - .build(); - datastore.put(entity); -} else { - System.out.println("Updating access_time for " + entity.getString("name")); - entity = Entity.builder(entity) - .set("access_time", DateTime.now()) - .build(); - datastore.update(entity); +Then add the following code to put an entity in Datastore. + +```java +KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); +Key key = keyFactory.newKey("John Doe"); +Entity entity = Entity.builder(key) + .set("age", 51) + .set("favorite_food", "pizza") + .build(); +datastore.put(entity); +``` + +Later, if you want to get this entity back, add the following to your code: + +```java +Entity johnEntity = datastore.get(key); +``` + +#### Running a query +In addition to retrieving entities by their keys, you can perform queries to retrieve entities by the values of their properties. A typical query includes an entity kind, filters to select entities with matching values, and sort orders to sequence the results. `gcloud-java-datastore` supports two types of queries: `StructuredQuery` (that allows you to construct query elements) and `GqlQuery` (which operates using [GQL syntax](https://cloud.google.com/datastore/docs/apis/gql/gql_reference)) in string format. In this tutorial, we will use a simple `StructuredQuery`. + +Suppose that you've added more people to Datastore, and now you want to find all people whose favorite food is pizza. Import the following: + +```java +import com.google.gcloud.datastore.Query; +import com.google.gcloud.datastore.QueryResults; +import com.google.gcloud.datastore.StructuredQuery; +import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; +``` + +Then add the following code to your program: + +```java +Query query = Query.entityQueryBuilder() + .kind("Person") + .filter(PropertyFilter.eq("favorite_food", "pizza")) + .build(); +QueryResults results = datastore.run(query); +while (results.hasNext()) { + Entity currentEntity = results.next(); +} +``` + +Cloud Datastore relies on indexing to run queries. Indexing is turned on by default for most types of properties. To read more about indexing, see the [Cloud Datastore Index Configuration documentation](https://cloud.google.com/datastore/docs/tools/indexconfig). + +#### Complete source code + +Here we put together all the code shown above into one program. This program assumes that you are running on Compute Engine or from your own desktop. To run this example on App Engine, simply move the code from the main method to your application's servlet class. + +```java +import com.google.gcloud.datastore.Datastore; +import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.datastore.Entity; +import com.google.gcloud.datastore.Key; +import com.google.gcloud.datastore.KeyFactory; +import com.google.gcloud.datastore.Query; +import com.google.gcloud.datastore.QueryResults; +import com.google.gcloud.datastore.StructuredQuery; +import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; + +public class GcloudJavaDatastoreExample { + + public static void main(String[] args) { + // Create datastore service object. + // By default, credentials are inferred from the runtime environment. + Datastore datastore = DatastoreOptions.defaultInstance().service(); + + // Add an entity to Datastore + KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); + Key key = keyFactory.newKey("John Doe"); + Entity entity = Entity.builder(key) + .set("age", 51) + .set("favorite_food", "pizza") + .build(); + datastore.put(entity); + + // Get an entity from Datastore + Entity johnEntity = datastore.get(key); + + // Add a couple more entities to make the query results more interesting + Key janeKey = keyFactory.newKey("Jane Doe"); + Entity janeEntity = Entity.builder(janeKey) + .set("age", 44) + .set("favorite_food", "pizza") + .build(); + Key joeKey = keyFactory.newKey("Joe Shmoe"); + Entity joeEntity = Entity.builder(joeKey) + .set("age", 27) + .set("favorite_food", "sushi") + .build(); + datastore.put(janeEntity, joeEntity); + + // Run a query + Query query = Query.entityQueryBuilder() + .kind("Person") + .filter(PropertyFilter.eq("favorite_food", "pizza")) + .build(); + QueryResults results = datastore.run(query); + while (results.hasNext()) { + Entity currentEntity = results.next(); + // Do something using the entity. (e.g. send an invite a pizza party) + } + } } ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -112,7 +222,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -121,6 +233,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-datastore [cloud-platform]: https://cloud.google.com/ diff --git a/gcloud-java-datastore/pom.xml b/gcloud-java-datastore/pom.xml index b58e9e0ffc74..4bf5caab2ff8 100644 --- a/gcloud-java-datastore/pom.xml +++ b/gcloud-java-datastore/pom.xml @@ -11,7 +11,7 @@ com.google.gcloud gcloud-java-pom - 0.0.12-SNAPSHOT + 0.0.13-SNAPSHOT gcloud-java-datastore @@ -27,6 +27,12 @@ google-api-services-datastore-protobuf v1beta2-rev1-2.1.2 compile + + + com.google.api-client + google-api-client + + junit diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java index bb08fca12e3c..55c573b9a636 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java @@ -19,7 +19,8 @@ import com.google.api.services.datastore.DatastoreV1; /** - * A full entity is a {@link BaseEntity} that with a complete set of properties. + * A full entity is a {@link BaseEntity} that holds all the properties associated with a + * Datastore entity (as opposed to {@link ProjectionEntity}). */ public class FullEntity extends BaseEntity { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java index 343535d94628..5791d37e9426 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java @@ -35,6 +35,10 @@ * A Google Cloud Datastore query. * For usage examples see {@link GqlQuery} and {@link StructuredQuery}. * + * Note that queries require proper indexing. See + * + * Cloud Datastore Index Configuration for help configuring indexes. + * * @param the type of the values returned by this query. * @see Datastore Queries */ diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java index 2135267d9ac4..1404b2817802 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java @@ -17,10 +17,9 @@ /** * A client to the Google Cloud Datastore. * - *

A simple usage example: + *

Here's a simple usage example for using gcloud-java from App/Compute Engine: *

 {@code
- * DatastoreOptions options = DatastoreOptions.builder().projectId(PROJECT_ID).build();
- * Datastore datastore = options.service();
+ * Datastore datastore = DatastoreOptions.defaultInstance().service();
  * KeyFactory keyFactory = datastore.newKeyFactory().kind(kind);
  * Key key = keyFactory.newKey(keyName);
  * Entity entity = datastore.get(key);
@@ -47,6 +46,11 @@
  * }
  * } 
* + *

When using gcloud-java from outside of App/Compute Engine, you have to specify a + * project ID and + * provide + * credentials. * @see Google Cloud Datastore */ package com.google.gcloud.datastore; diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java index ccb89267a29e..028027f4bc33 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java @@ -42,9 +42,8 @@ import org.json.JSONTokener; import java.net.InetAddress; -import java.net.MalformedURLException; +import java.net.SocketTimeoutException; import java.net.URL; -import java.net.UnknownHostException; import java.util.HashMap; import java.util.Map; @@ -95,7 +94,7 @@ private static boolean isLocalHost(String host) { } InetAddress hostAddr = InetAddress.getByName(new URL(normalizedHost).getHost()); return hostAddr.isAnyLocalAddress() || hostAddr.isLoopbackAddress(); - } catch (UnknownHostException | MalformedURLException e) { + } catch (Exception e) { // ignore } } @@ -123,9 +122,17 @@ private static DatastoreRpcException translate(DatastoreException exception) { if (reason == null) { reason = HTTP_STATUS_TO_REASON.get(exception.getCode()); } - return reason != null - ? new DatastoreRpcException(reason) - : new DatastoreRpcException("Unknown", exception.getCode(), false, message); + if (reason != null) { + return new DatastoreRpcException(reason); + } else { + boolean retryable = false; + reasonStr = "Unknown"; + if (exception.getCause() instanceof SocketTimeoutException) { + retryable = true; + reasonStr = "Request timeout"; + } + return new DatastoreRpcException(reasonStr, exception.getCode(), retryable, message); + } } @Override diff --git a/gcloud-java-examples/README.md b/gcloud-java-examples/README.md index bc738de41b51..2ccec3e81571 100644 --- a/gcloud-java-examples/README.md +++ b/gcloud-java-examples/README.md @@ -17,16 +17,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-examples - 0.0.11 + 0.0.12 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-examples:jar:0.0.11' +compile 'com.google.gcloud:gcloud-java-examples:jar:0.0.12' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-examples" % "0.0.11" +libraryDependencies += "com.google.gcloud" % "gcloud-java-examples" % "0.0.12" ``` To run examples from your command line: @@ -58,6 +58,11 @@ To run examples from your command line: $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="delete test.txt" ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -77,7 +82,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -86,6 +93,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [cloud-platform]: https://cloud.google.com/ [developers-console]:https://console.developers.google.com/ diff --git a/gcloud-java-examples/pom.xml b/gcloud-java-examples/pom.xml index c461846acab2..2b02be47cc40 100644 --- a/gcloud-java-examples/pom.xml +++ b/gcloud-java-examples/pom.xml @@ -11,7 +11,7 @@ com.google.gcloud gcloud-java-pom - 0.0.12-SNAPSHOT + 0.0.13-SNAPSHOT gcloud-java-examples diff --git a/gcloud-java-resourcemanager/pom.xml b/gcloud-java-resourcemanager/pom.xml index 4a4899231177..515a5c50e04f 100644 --- a/gcloud-java-resourcemanager/pom.xml +++ b/gcloud-java-resourcemanager/pom.xml @@ -11,7 +11,7 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.0.13-SNAPSHOT gcloud-java-resourcemanager diff --git a/gcloud-java-storage/README.md b/gcloud-java-storage/README.md index f2b99388ff0f..d679d3da9bc1 100644 --- a/gcloud-java-storage/README.md +++ b/gcloud-java-storage/README.md @@ -20,16 +20,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-storage - 0.0.11 + 0.0.12 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-storage:jar:0.0.11' +compile 'com.google.gcloud:gcloud-java-storage:jar:0.0.12' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-storage" % "0.0.11" +libraryDependencies += "com.google.gcloud" % "gcloud-java-storage" % "0.0.12" ``` Example Application @@ -56,32 +56,146 @@ Cloud Storage for your project. See the ``gcloud-java`` API [storage documentation][storage-api] to learn how to interact with the Cloud Storage using this Client Library. -Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you must [supply credentials](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) and a project ID if running this snippet elsewhere. +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a [Google Developers Console](https://console.developers.google.com/) project with the Storage JSON API enabled. You will need to [enable billing](https://support.google.com/cloud/answer/6158867?hl=en) to use Google Cloud Storage. [Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your project set up. You will also need to set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: `gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-storage` library. See the [Quickstart](#quickstart) section to add `gcloud-java-storage` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud Storage, you must create a service object with credentials. You can then make API calls by calling methods on the Storage service object. The simplest way to authenticate is to use [Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). These credentials are automatically inferred from your environment, so you only need the following code to create your service object: + +```java +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.StorageOptions; + +Storage storage = StorageOptions.defaultInstance().service(); +``` + +For other authentication options, see the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Storing data +Stored objects are called "blobs" in `gcloud-java` and are organized into containers called "buckets". In this code snippet, we will create a new bucket and upload a blob to that bucket. + +Add the following imports at the top of your file: + +```java +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.google.gcloud.storage.BlobId; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.BucketInfo; +``` + +Then add the following code to create a bucket and upload a simple blob. + +*Important: Bucket names have to be globally unique. If you choose a bucket name that already exists, you'll get a helpful error message telling you to choose another name. In the code below, replace "my_unique_bucket" with a unique bucket name. See more about naming rules [here](https://cloud.google.com/storage/docs/bucket-naming?hl=en#requirements).* + +```java +// Create a bucket +String bucketName = "my_unique_bucket"; // Change this to something unique +BucketInfo bucketInfo = storage.create(BucketInfo.of(bucketName)); + +// Upload a blob to the newly created bucket +BlobId blobId = BlobId.of(bucketName, "my_blob_name"); +BlobInfo blobInfo = storage.create( + BlobInfo.builder(blobId).contentType("text/plain").build(), + "a simple blob".getBytes(UTF_8)); +``` + +At this point, you will be able to see your newly created bucket and blob on the Google Developers Console. + +#### Retrieving data +Now that we have content uploaded to the server, we can see how to read data from the server. Add the following line to your program to get back the blob we uploaded. + +```java +String blobContent = new String(storage.readAllBytes(blobId), UTF_8); +``` + +#### Listing buckets and contents of buckets +Suppose that you've added more buckets and blobs, and now you want to see the names of your buckets and the contents of each one. Add the following imports: + +```java +import java.util.Iterator; +``` + +Then add the following code to list all your buckets and all the blobs inside your newly created bucket. + +```java +// List all your buckets +Iterator bucketInfoIterator = storage.list().iterateAll(); +System.out.println("My buckets:"); +while (bucketInfoIterator.hasNext()) { + System.out.println(bucketInfoIterator.next()); +} + +// List the blobs in a particular bucket +Iterator blobIterator = storage.list(bucketName).iterateAll(); +System.out.println("My blobs:"); +while (blobIterator.hasNext()) { + System.out.println(blobIterator.next()); +} +``` + +#### Complete source code + +Here we put together all the code shown above into one program. This program assumes that you are running on Compute Engine or from your own desktop. To run this example on App Engine, simply move the code from the main method to your application's servlet class and change the print statements to display on your webpage. ```java import static java.nio.charset.StandardCharsets.UTF_8; -import com.google.gcloud.storage.Blob; +import com.google.gcloud.storage.BlobId; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.BucketInfo; import com.google.gcloud.storage.Storage; import com.google.gcloud.storage.StorageOptions; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; - -Storage storage = StorageOptions.getDefaultInstance().service(); -Blob blob = new Blob(storage, "bucket", "blob_name"); -if (!blob.exists()) { - storage2.create(blob.info(), "Hello, Cloud Storage!".getBytes(UTF_8)); -} else { - System.out.println("Updating content for " + blob.info().name()); - byte[] prevContent = blob.content(); - System.out.println(new String(prevContent, UTF_8)); - WritableByteChannel channel = blob.writer(); - channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8))); - channel.close(); +import java.util.Iterator; + +public class GcloudStorageExample { + + public static void main(String[] args) { + // Create a service object + // Credentials are inferred from the environment. + Storage storage = StorageOptions.defaultInstance().service(); + + // Create a bucket + String bucketName = "my_unique_bucket"; // Change this to something unique + BucketInfo bucketInfo = storage.create(BucketInfo.of(bucketName)); + + // Upload a blob to the newly created bucket + BlobId blobId = BlobId.of(bucketName, "my_blob_name"); + BlobInfo blobInfo = storage.create( + BlobInfo.builder(blobId).contentType("text/plain").build(), + "a simple blob".getBytes(UTF_8)); + + // Retrieve a blob from the server + String blobContent = new String(storage.readAllBytes(blobId), UTF_8); + + // List all your buckets + Iterator bucketInfoIterator = storage.list().iterateAll(); + System.out.println("My buckets:"); + while (bucketInfoIterator.hasNext()) { + System.out.println(bucketInfoIterator.next()); + } + + // List the blobs in a particular bucket + Iterator blobIterator = storage.list(bucketName).iterateAll(); + System.out.println("My blobs:"); + while (blobIterator.hasNext()) { + System.out.println(blobIterator.next()); + } + } } ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -108,7 +222,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -117,6 +233,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-storage [cloud-platform]: https://cloud.google.com/ @@ -125,3 +242,4 @@ Apache 2.0 - See [LICENSE] for more information. [cloud-storage-docs]: https://cloud.google.com/storage/docs/overview [cloud-storage-create-bucket]: https://cloud.google.com/storage/docs/cloud-console#_creatingbuckets [storage-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/storage/package-summary.html +[cloud-storage-activation]:https://cloud.google.com/storage/docs/signup?hl=en diff --git a/gcloud-java-storage/pom.xml b/gcloud-java-storage/pom.xml index ef3ddec79816..3e0b6a9490a8 100644 --- a/gcloud-java-storage/pom.xml +++ b/gcloud-java-storage/pom.xml @@ -11,7 +11,7 @@ com.google.gcloud gcloud-java-pom - 0.0.12-SNAPSHOT + 0.0.13-SNAPSHOT gcloud-java-storage @@ -32,6 +32,10 @@ com.google.guava guava-jdk5 + + com.google.api-client + google-api-client + diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java index b1e188f1d1fb..d874f99ebb4c 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java @@ -30,11 +30,11 @@ import static com.google.gcloud.spi.StorageRpc.Option.PREDEFINED_DEFAULT_OBJECT_ACL; import static com.google.gcloud.spi.StorageRpc.Option.PREFIX; import static com.google.gcloud.spi.StorageRpc.Option.VERSIONS; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import com.google.api.client.googleapis.batch.json.JsonBatchCallback; import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonResponseException; -import com.google.api.client.googleapis.media.MediaHttpDownloader; import com.google.api.client.http.ByteArrayContent; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpHeaders; @@ -58,7 +58,9 @@ import com.google.api.services.storage.model.Objects; import com.google.api.services.storage.model.StorageObject; import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gcloud.storage.StorageException; import com.google.gcloud.storage.StorageOptions; @@ -66,7 +68,9 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.net.SocketTimeoutException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -80,6 +84,7 @@ public class DefaultStorageRpc implements StorageRpc { // see: https://cloud.google.com/storage/docs/concepts-techniques#practices private static final Set RETRYABLE_CODES = ImmutableSet.of(504, 503, 502, 500, 429, 408); private static final long MEGABYTE = 1024L * 1024L; + private static final int MAX_BATCH_DELETES = 100; public DefaultStorageRpc(StorageOptions options) { HttpTransport transport = options.httpTransportFactory().create(); @@ -93,10 +98,15 @@ public DefaultStorageRpc(StorageOptions options) { private static StorageException translate(IOException exception) { StorageException translated; - if (exception instanceof GoogleJsonResponseException) { + if (exception instanceof GoogleJsonResponseException + && ((GoogleJsonResponseException) exception).getDetails() != null) { translated = translate(((GoogleJsonResponseException) exception).getDetails()); } else { - translated = new StorageException(0, exception.getMessage(), false); + boolean retryable = false; + if (exception instanceof SocketTimeoutException) { + retryable = true; + } + translated = new StorageException(0, exception.getMessage(), retryable); } translated.initCause(exception); return translated; @@ -190,7 +200,11 @@ public Bucket get(Bucket bucket, Map options) { .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { - throw translate(ex); + StorageException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; } } @@ -199,7 +213,11 @@ public StorageObject get(StorageObject object, Map options) { try { return getRequest(object, options).execute(); } catch (IOException ex) { - throw translate(ex); + StorageException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; } } @@ -207,6 +225,7 @@ private Storage.Objects.Get getRequest(StorageObject object, Map opti throws IOException { return storage.objects() .get(object.getBucket(), object.getName()) + .setGeneration(object.getGeneration()) .setProjection(DEFAULT_PROJECTION) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) @@ -263,7 +282,7 @@ public boolean delete(Bucket bucket, Map options) { return true; } catch (IOException ex) { StorageException serviceException = translate(ex); - if (serviceException.code() == 404) { + if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; @@ -277,7 +296,7 @@ public boolean delete(StorageObject blob, Map options) { return true; } catch (IOException ex) { StorageException serviceException = translate(ex); - if (serviceException.code() == 404) { + if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; @@ -288,6 +307,7 @@ private Storage.Objects.Delete deleteRequest(StorageObject blob, Map throws IOException { return storage.objects() .delete(blob.getBucket(), blob.getName()) + .setGeneration(blob.getGeneration()) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) @@ -332,6 +352,7 @@ public byte[] load(StorageObject from, Map options) try { Storage.Objects.Get getRequest = storage.objects() .get(from.getBucket(), from.getName()) + .setGeneration(from.getGeneration()) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) @@ -347,6 +368,24 @@ public byte[] load(StorageObject from, Map options) @Override public BatchResponse batch(BatchRequest request) throws StorageException { + List>>> partitionedToDelete = + Lists.partition(request.toDelete, MAX_BATCH_DELETES); + Iterator>>> iterator = partitionedToDelete.iterator(); + BatchRequest chunkRequest = new BatchRequest(iterator.hasNext() ? iterator.next() : + ImmutableList.>>of(), request.toUpdate, request.toGet); + BatchResponse response = batchChunk(chunkRequest); + Map> deletes = + Maps.newHashMapWithExpectedSize(request.toDelete.size()); + deletes.putAll(response.deletes); + while (iterator.hasNext()) { + chunkRequest = new BatchRequest(iterator.next(), null, null); + BatchResponse deleteBatchResponse = batchChunk(chunkRequest); + deletes.putAll(deleteBatchResponse.deletes); + } + return new BatchResponse(deletes, response.updates, response.gets); + } + + private BatchResponse batchChunk(BatchRequest request) { com.google.api.client.googleapis.batch.BatchRequest batch = storage.batch(); final Map> deletes = Maps.newConcurrentMap(); @@ -364,7 +403,11 @@ public void onSuccess(Void ignore, HttpHeaders responseHeaders) { @Override public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { - deletes.put(tuple.x(), Tuple.of(null, translate(e))); + if (e.getCode() == HTTP_NOT_FOUND) { + deletes.put(tuple.x(), Tuple.of(Boolean.FALSE, null)); + } else { + deletes.put(tuple.x(), Tuple.of(null, translate(e))); + } } }); } @@ -393,8 +436,13 @@ public void onSuccess(StorageObject storageObject, HttpHeaders responseHeaders) @Override public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { - gets.put(tuple.x(), - Tuple.of(null, translate(e))); + if (e.getCode() == HTTP_NOT_FOUND) { + gets.put(tuple.x(), + Tuple.of(null, null)); + } else { + gets.put(tuple.x(), + Tuple.of(null, translate(e))); + } } }); } @@ -406,20 +454,23 @@ public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { } @Override - public byte[] read(StorageObject from, Map options, long position, int bytes) - throws StorageException { + public Tuple read(StorageObject from, Map options, long position, + int bytes) throws StorageException { try { - Get req = storage.objects().get(from.getBucket(), from.getName()); - req.setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) + Get req = storage.objects() + .get(from.getBucket(), from.getName()) + .setGeneration(from.getGeneration()) + .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(options)); - MediaHttpDownloader downloader = req.getMediaHttpDownloader(); - downloader.setContentRange(position, (int) position + bytes); - downloader.setDirectDownloadEnabled(true); + StringBuilder range = new StringBuilder(); + range.append("bytes=").append(position).append("-").append(position + bytes - 1); + req.getRequestHeaders().setRange(range.toString()); ByteArrayOutputStream output = new ByteArrayOutputStream(); - req.executeMediaAndDownloadTo(output); - return output.toByteArray(); + req.executeMedia().download(output); + String etag = req.getLastResponseHeaders().getETag(); + return Tuple.of(etag, output.toByteArray()); } catch (IOException ex) { throw translate(ex); } @@ -521,6 +572,7 @@ private RewriteResponse rewrite(RewriteRequest req, String token) throws Storage com.google.api.services.storage.model.RewriteResponse rewriteReponse = storage.objects() .rewrite(req.source.getBucket(), req.source.getName(), req.target.getBucket(), req.target.getName(), req.target.getContentType() != null ? req.target : null) + .setSourceGeneration(req.source.getGeneration()) .setRewriteToken(token) .setMaxBytesRewrittenPerCall(maxBytesRewrittenPerCall) .setProjection(DEFAULT_PROJECTION) diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java index e4b1be785951..c5fd1b3e2250 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java @@ -227,8 +227,18 @@ StorageObject create(StorageObject object, InputStream content, Map o Tuple> list(String bucket, Map options) throws StorageException; + /** + * Returns the requested bucket or {@code null} if not found. + * + * @throws StorageException upon failure + */ Bucket get(Bucket bucket, Map options) throws StorageException; + /** + * Returns the requested storage object or {@code null} if not found. + * + * @throws StorageException upon failure + */ StorageObject get(StorageObject object, Map options) throws StorageException; @@ -249,7 +259,7 @@ StorageObject compose(Iterable sources, StorageObject target, byte[] load(StorageObject storageObject, Map options) throws StorageException; - byte[] read(StorageObject from, Map options, long position, int bytes) + Tuple read(StorageObject from, Map options, long position, int bytes) throws StorageException; String open(StorageObject object, Map options) throws StorageException; diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java index a8e315be0e45..503cad361e29 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java @@ -281,7 +281,7 @@ public CopyWriter copyTo(BlobId targetBlob, BlobSourceOption... options) { * Deletes this blob. * * @param options blob delete options - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ public boolean delete(BlobSourceOption... options) { @@ -422,8 +422,8 @@ public Blob apply(BlobInfo f) { * @param storage the storage service used to issue the request * @param blobs the blobs to delete * @return an immutable list of booleans. If a blob has been deleted the corresponding item in the - * list is {@code true}. If deletion failed or access to the resource was denied the item is - * {@code false}. + * list is {@code true}. If a blob was not found, deletion failed or access to the resource + * was denied the corresponding item is {@code false}. * @throws StorageException upon failure */ public static List delete(Storage storage, BlobId... blobs) { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java index d1209826cc3e..d30003d632db 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java @@ -25,17 +25,21 @@ import java.util.Objects; /** - * Google Storage object identifier. + * Google Storage Object identifier. A {@code BlobId} object includes the name of the containing + * bucket, the blob's name and possibly the blob's generation. If {@link #generation()} is + * {@code null} the identifier refers to the latest blob's generation. */ public final class BlobId implements Serializable { private static final long serialVersionUID = -6156002883225601925L; private final String bucket; private final String name; + private final Long generation; - private BlobId(String bucket, String name) { + private BlobId(String bucket, String name, Long generation) { this.bucket = bucket; this.name = name; + this.generation = generation; } /** @@ -52,43 +56,66 @@ public String name() { return name; } + /** + * Returns blob's data generation. Used for versioning. + */ + public Long generation() { + return generation; + } + @Override public String toString() { return MoreObjects.toStringHelper(this) .add("bucket", bucket()) .add("name", name()) + .add("generation", generation()) .toString(); } @Override public int hashCode() { - return Objects.hash(bucket, name); + return Objects.hash(bucket, name, generation); } @Override public boolean equals(Object obj) { return obj instanceof BlobId && Objects.equals(bucket, ((BlobId) obj).bucket) - && Objects.equals(name, ((BlobId) obj).name); + && Objects.equals(name, ((BlobId) obj).name) + && Objects.equals(generation, ((BlobId) obj).generation); } StorageObject toPb() { StorageObject storageObject = new StorageObject(); storageObject.setBucket(bucket); storageObject.setName(name); + storageObject.setGeneration(generation); return storageObject; } /** - * Creates a blob identifier. + * Creates a blob identifier. Generation is set to {@code null}. * * @param bucket the name of the bucket that contains the blob * @param name the name of the blob */ public static BlobId of(String bucket, String name) { - return new BlobId(checkNotNull(bucket), checkNotNull(name)); + return new BlobId(checkNotNull(bucket), checkNotNull(name), null); + } + + /** + * Creates a {@code BlobId} object. + * + * @param bucket name of the containing bucket + * @param name blob's name + * @param generation blob's data generation, used for versioning. If {@code null} the identifier + * refers to the latest blob's generation + */ + public static BlobId of(String bucket, String name, Long generation) { + return new BlobId(checkNotNull(bucket), checkNotNull(name), generation); } static BlobId fromPb(StorageObject storageObject) { - return BlobId.of(storageObject.getBucket(), storageObject.getName()); + return BlobId.of(storageObject.getBucket(), storageObject.getName(), + storageObject.getGeneration()); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java index 65b87498b6cc..9d1fd4f5e25c 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java @@ -76,7 +76,6 @@ public StorageObject apply(BlobInfo blobInfo) { private final String crc32c; private final String mediaLink; private final Map metadata; - private final Long generation; private final Long metageneration; private final Long deleteTime; private final Long updateTime; @@ -107,7 +106,7 @@ public static final class Builder { private String contentLanguage; private Integer componentCount; private String cacheControl; - private ImmutableList acl; + private List acl; private Acl.Entity owner; private Long size; private String etag; @@ -116,13 +115,35 @@ public static final class Builder { private String crc32c; private String mediaLink; private Map metadata; - private Long generation; private Long metageneration; private Long deleteTime; private Long updateTime; private Builder() {} + private Builder(BlobInfo blobInfo) { + blobId = blobInfo.blobId; + id = blobInfo.id; + cacheControl = blobInfo.cacheControl; + contentEncoding = blobInfo.contentEncoding; + contentType = blobInfo.contentType; + contentDisposition = blobInfo.contentDisposition; + contentLanguage = blobInfo.contentLanguage; + componentCount = blobInfo.componentCount; + acl = blobInfo.acl; + owner = blobInfo.owner; + size = blobInfo.size; + etag = blobInfo.etag; + selfLink = blobInfo.selfLink; + md5 = blobInfo.md5; + crc32c = blobInfo.crc32c; + mediaLink = blobInfo.mediaLink; + metadata = blobInfo.metadata; + metageneration = blobInfo.metageneration; + deleteTime = blobInfo.deleteTime; + updateTime = blobInfo.updateTime; + } + /** * Sets the blob identity. */ @@ -260,11 +281,6 @@ public Builder metadata(Map metadata) { return this; } - Builder generation(Long generation) { - this.generation = generation; - return this; - } - Builder metageneration(Long metageneration) { this.metageneration = metageneration; return this; @@ -307,7 +323,6 @@ private BlobInfo(Builder builder) { crc32c = builder.crc32c; mediaLink = builder.mediaLink; metadata = builder.metadata; - generation = builder.generation; metageneration = builder.metageneration; deleteTime = builder.deleteTime; updateTime = builder.updateTime; @@ -481,7 +496,7 @@ public Map metadata() { * Returns blob's data generation. Used for blob versioning. */ public Long generation() { - return generation; + return blobId().generation(); } /** @@ -511,28 +526,7 @@ public Long updateTime() { * Returns a builder for the current blob. */ public Builder toBuilder() { - return new Builder() - .blobId(blobId) - .id(id) - .generation(generation) - .cacheControl(cacheControl) - .contentEncoding(contentEncoding) - .contentType(contentType) - .contentDisposition(contentDisposition) - .contentLanguage(contentLanguage) - .componentCount(componentCount) - .crc32c(crc32c) - .md5(md5) - .deleteTime(deleteTime) - .updateTime(updateTime) - .mediaLink(mediaLink) - .metadata(metadata) - .metageneration(metageneration) - .acl(acl) - .owner(owner) - .size(size) - .etag(etag) - .selfLink(selfLink); + return new Builder(this); } @Override @@ -540,6 +534,7 @@ public String toString() { return MoreObjects.toStringHelper(this) .add("bucket", bucket()) .add("name", name()) + .add("generation", generation()) .add("size", size()) .add("content-type", contentType()) .add("metadata", metadata()) @@ -590,7 +585,6 @@ public ObjectAccessControl apply(Acl acl) { storageObject.setContentEncoding(contentEncoding); storageObject.setCrc32c(crc32c); storageObject.setContentType(contentType); - storageObject.setGeneration(generation); storageObject.setMd5Hash(md5); storageObject.setMediaLink(mediaLink); storageObject.setMetageneration(metageneration); @@ -618,8 +612,19 @@ public static Builder builder(String bucket, String name) { } /** - * Returns a {@code BlobInfo} builder where blob identity is set to the provided value. + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ + public static Builder builder(BucketInfo bucketInfo, String name, Long generation) { + return builder(bucketInfo.name(), name, generation); + } + + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. */ + public static Builder builder(String bucket, String name, Long generation) { + return new Builder().blobId(BlobId.of(bucket, name, generation)); + } + public static Builder builder(BlobId blobId) { return new Builder().blobId(blobId); } @@ -638,9 +643,6 @@ static BlobInfo fromPb(StorageObject storageObject) { if (storageObject.getContentType() != null) { builder.contentType(storageObject.getContentType()); } - if (storageObject.getGeneration() != null) { - builder.generation(storageObject.getGeneration()); - } if (storageObject.getMd5Hash() != null) { builder.md5(storageObject.getMd5Hash()); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java index 205dc4b97309..54d39649cb70 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java @@ -26,9 +26,8 @@ /** * A channel for reading data from a Google Cloud Storage object. * - * Implementations of this class may buffer data internally to reduce remote calls. - * - * This class is @{link Serializable}, which allows incremental reads. + * Implementations of this class may buffer data internally to reduce remote calls. This interface + * implements {@link Restorable} to allow saving the reader's state to continue reading afterwards. */ public interface BlobReadChannel extends ReadableByteChannel, Closeable, Restorable { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java index 09047a642218..8fe6eae66d8f 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java @@ -23,6 +23,7 @@ import com.google.gcloud.RestorableState; import com.google.gcloud.RetryHelper; import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpc.Tuple; import java.io.IOException; import java.io.Serializable; @@ -41,6 +42,7 @@ class BlobReadChannelImpl implements BlobReadChannel { private final StorageOptions serviceOptions; private final BlobId blob; private final Map requestOptions; + private String lastEtag; private int position; private boolean isOpen; private boolean endOfStream; @@ -117,12 +119,19 @@ public int read(ByteBuffer byteBuffer) throws IOException { } final int toRead = Math.max(byteBuffer.remaining(), chunkSize); try { - buffer = runWithRetries(new Callable() { + Tuple result = runWithRetries(new Callable>() { @Override - public byte[] call() { + public Tuple call() { return storageRpc.read(storageObject, requestOptions, position, toRead); } }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); + if (lastEtag != null && !Objects.equals(result.x(), lastEtag)) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("Blob ").append(blob).append(" was updated while reading"); + throw new StorageException(0, messageBuilder.toString(), false); + } + lastEtag = result.x(); + buffer = result.y(); } catch (RetryHelper.RetryHelperException e) { throw StorageException.translateAndThrow(e); } @@ -152,6 +161,7 @@ static class StateImpl implements RestorableState, Serializable private final StorageOptions serviceOptions; private final BlobId blob; private final Map requestOptions; + private final String lastEtag; private final int position; private final boolean isOpen; private final boolean endOfStream; @@ -161,6 +171,7 @@ static class StateImpl implements RestorableState, Serializable this.serviceOptions = builder.serviceOptions; this.blob = builder.blob; this.requestOptions = builder.requestOptions; + this.lastEtag = builder.lastEtag; this.position = builder.position; this.isOpen = builder.isOpen; this.endOfStream = builder.endOfStream; @@ -171,6 +182,7 @@ static class Builder { private final StorageOptions serviceOptions; private final BlobId blob; private final Map requestOptions; + private String lastEtag; private int position; private boolean isOpen; private boolean endOfStream; @@ -182,6 +194,11 @@ private Builder(StorageOptions options, BlobId blob, Map r this.requestOptions = reqOptions; } + Builder lastEtag(String lastEtag) { + this.lastEtag = lastEtag; + return this; + } + Builder position(int position) { this.position = position; return this; @@ -215,6 +232,7 @@ static Builder builder( @Override public BlobReadChannel restore() { BlobReadChannelImpl channel = new BlobReadChannelImpl(serviceOptions, blob, requestOptions); + channel.lastEtag = lastEtag; channel.position = position; channel.isOpen = isOpen; channel.endOfStream = endOfStream; @@ -224,8 +242,8 @@ public BlobReadChannel restore() { @Override public int hashCode() { - return Objects.hash(serviceOptions, blob, requestOptions, position, isOpen, endOfStream, - chunkSize); + return Objects.hash(serviceOptions, blob, requestOptions, lastEtag, position, isOpen, + endOfStream, chunkSize); } @Override @@ -240,6 +258,7 @@ public boolean equals(Object obj) { return Objects.equals(this.serviceOptions, other.serviceOptions) && Objects.equals(this.blob, other.blob) && Objects.equals(this.requestOptions, other.requestOptions) + && Objects.equals(this.lastEtag, other.lastEtag) && this.position == other.position && this.isOpen == other.isOpen && this.endOfStream == other.endOfStream diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java index a6208e5020ae..fe9164532120 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java @@ -26,8 +26,8 @@ * A channel for writing data to a Google Cloud Storage object. * * Implementations of this class may further buffer data internally to reduce remote calls. Written - * data will only be visible after calling {@link #close()}. This class is serializable, to allow - * incremental writes. + * data will only be visible after calling {@link #close()}. This interface implements + * {@link Restorable} to allow saving the writer's state to continue writing afterwards. */ public interface BlobWriteChannel extends WritableByteChannel, Closeable, Restorable { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java index 21aafd92b5d4..54e60cc006dc 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java @@ -271,7 +271,7 @@ public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { * Deletes this bucket. * * @param options bucket delete options - * @return true if bucket was deleted + * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure */ public boolean delete(BucketSourceOption... options) { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java index d5a382446709..a8cc4a0f32d8 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java @@ -325,18 +325,37 @@ public static final class Builder { private Boolean versioningEnabled; private String indexPage; private String notFoundPage; - private ImmutableList deleteRules; + private List deleteRules; private String storageClass; private String location; private String etag; private Long createTime; private Long metageneration; - private ImmutableList cors; - private ImmutableList acl; - private ImmutableList defaultAcl; + private List cors; + private List acl; + private List defaultAcl; private Builder() {} + private Builder(BucketInfo bucketInfo) { + id = bucketInfo.id; + name = bucketInfo.name; + etag = bucketInfo.etag; + createTime = bucketInfo.createTime; + metageneration = bucketInfo.metageneration; + location = bucketInfo.location; + storageClass = bucketInfo.storageClass; + cors = bucketInfo.cors; + acl = bucketInfo.acl; + defaultAcl = bucketInfo.defaultAcl; + owner = bucketInfo.owner; + selfLink = bucketInfo.selfLink; + versioningEnabled = bucketInfo.versioningEnabled; + indexPage = bucketInfo.indexPage; + notFoundPage = bucketInfo.notFoundPage; + deleteRules = bucketInfo.deleteRules; + } + /** * Sets the bucket's name. */ @@ -629,23 +648,7 @@ public List defaultAcl() { * Returns a builder for the current bucket. */ public Builder toBuilder() { - return new Builder() - .name(name) - .id(id) - .createTime(createTime) - .etag(etag) - .metageneration(metageneration) - .cors(cors) - .acl(acl) - .defaultAcl(defaultAcl) - .location(location) - .storageClass(storageClass) - .owner(owner) - .selfLink(selfLink) - .versioningEnabled(versioningEnabled) - .indexPage(indexPage) - .notFoundPage(notFoundPage) - .deleteRules(deleteRules); + return new Builder(this); } @Override diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java index 2e95e69aa445..6e1c33b137fd 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java @@ -33,6 +33,7 @@ import java.io.InputStream; import java.io.Serializable; import java.net.URL; +import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -473,14 +474,32 @@ class BlobSourceOption extends Option { private static final long serialVersionUID = -3712768261070182991L; - private BlobSourceOption(StorageRpc.Option rpcOption, long value) { + private BlobSourceOption(StorageRpc.Option rpcOption, Long value) { super(rpcOption, value); } /** * Returns an option for blob's data generation match. If this option is used the request will - * fail if blob's generation does not match the provided value. + * fail if blob's generation does not match. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. */ + public static BlobSourceOption generationMatch() { + return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_MATCH, null); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobSourceOption generationNotMatch() { + return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, null); + } + public static BlobSourceOption generationMatch(long generation) { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); } @@ -517,7 +536,7 @@ class BlobGetOption extends Option { private static final long serialVersionUID = 803817709703661480L; - private BlobGetOption(StorageRpc.Option rpcOption, long value) { + private BlobGetOption(StorageRpc.Option rpcOption, Long value) { super(rpcOption, value); } @@ -527,8 +546,26 @@ private BlobGetOption(StorageRpc.Option rpcOption, String value) { /** * Returns an option for blob's data generation match. If this option is used the request will - * fail if blob's generation does not match the provided value. + * fail if blob's generation does not match. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobGetOption generationMatch() { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_MATCH, (Long) null); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. */ + public static BlobGetOption generationNotMatch() { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, (Long) null); + } + public static BlobGetOption generationMatch(long generation) { return new BlobGetOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); } @@ -1280,7 +1317,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx /** * Delete the requested bucket. * - * @return true if bucket was deleted + * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(String bucket, BucketSourceOption... options); @@ -1288,7 +1325,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx /** * Delete the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(String bucket, String blob, BlobSourceOption... options); @@ -1296,7 +1333,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx /** * Delete the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(BlobId blob, BlobSourceOption... options); @@ -1304,7 +1341,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx /** * Delete the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(BlobId blob); @@ -1369,14 +1406,29 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx BatchResponse apply(BatchRequest batchRequest); /** - * Return a channel for reading the blob's content. + * Return a channel for reading the blob's content. The blob's latest generation is read. If the + * blob changes while reading (i.e. {@link BlobInfo#etag()} changes), subsequent calls to + * {@code blobReadChannel.read(ByteBuffer)} may throw {@link StorageException}. + * + *

The {@link BlobSourceOption#generationMatch(long)} option can be provided to ensure that + * {@code blobReadChannel.read(ByteBuffer)} calls will throw {@link StorageException} if blob`s + * generation differs from the expected one. * * @throws StorageException upon failure */ BlobReadChannel reader(String bucket, String blob, BlobSourceOption... options); /** - * Return a channel for reading the blob's content. + * Return a channel for reading the blob's content. If {@code blob.generation()} is set + * data corresponding to that generation is read. If {@code blob.generation()} is {@code null} + * the blob's latest generation is read. If the blob changes while reading (i.e. + * {@link BlobInfo#etag()} changes), subsequent calls to {@code blobReadChannel.read(ByteBuffer)} + * may throw {@link StorageException}. + * + *

The {@link BlobSourceOption#generationMatch()} and + * {@link BlobSourceOption#generationMatch(long)} options can be used to ensure that + * {@code blobReadChannel.read(ByteBuffer)} calls will throw {@link StorageException} if the + * blob`s generation differs from the expected one. * * @throws StorageException upon failure */ @@ -1442,8 +1494,8 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * * @param blobIds blobs to delete * @return an immutable list of booleans. If a blob has been deleted the corresponding item in the - * list is {@code true}. If deletion failed or access to the resource was denied the item is - * {@code false}. + * list is {@code true}. If a blob was not found, deletion failed or access to the resource + * was denied the corresponding item is {@code false}. * @throws StorageException upon failure */ List delete(BlobId... blobIds); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java index 4c85113e940e..91a408657847 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java @@ -28,7 +28,6 @@ import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; -import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.api.services.storage.model.StorageObject; @@ -43,6 +42,8 @@ import com.google.common.hash.Hashing; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Ints; +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.AuthCredentials.ApplicationDefaultAuthCredentials; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; import com.google.gcloud.PageImpl; import com.google.gcloud.BaseService; @@ -175,14 +176,7 @@ public BucketInfo get(String bucket, BucketGetOption... options) { new Callable() { @Override public com.google.api.services.storage.model.Bucket call() { - try { - return storageRpc.get(bucketPb, optionsMap); - } catch (StorageException ex) { - if (ex.code() == HTTP_NOT_FOUND) { - return null; - } - throw ex; - } + return storageRpc.get(bucketPb, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); return answer == null ? null : BucketInfo.fromPb(answer); @@ -199,19 +193,12 @@ public BlobInfo get(String bucket, String blob, BlobGetOption... options) { @Override public BlobInfo get(BlobId blob, BlobGetOption... options) { final StorageObject storedObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { StorageObject storageObject = runWithRetries(new Callable() { @Override public StorageObject call() { - try { - return storageRpc.get(storedObject, optionsMap); - } catch (StorageException ex) { - if (ex.code() == HTTP_NOT_FOUND) { - return null; - } - throw ex; - } + return storageRpc.get(storedObject, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); return storageObject == null ? null : BlobInfo.fromPb(storageObject); @@ -405,7 +392,7 @@ public boolean delete(String bucket, String blob, BlobSourceOption... options) { @Override public boolean delete(BlobId blob, BlobSourceOption... options) { final StorageObject storageObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { return runWithRetries(new Callable() { @Override @@ -428,8 +415,9 @@ public BlobInfo compose(final ComposeRequest composeRequest) { final List sources = Lists.newArrayListWithCapacity(composeRequest.sourceBlobs().size()); for (ComposeRequest.SourceBlob sourceBlob : composeRequest.sourceBlobs()) { - sources.add(BlobInfo.builder(composeRequest.target().bucket(), sourceBlob.name()) - .generation(sourceBlob.generation()).build().toPb()); + sources.add(BlobInfo.builder( + BlobId.of(composeRequest.target().bucket(), sourceBlob.name(), sourceBlob.generation())) + .build().toPb()); } final StorageObject target = composeRequest.target().toPb(); final Map targetOptions = optionMap(composeRequest.target().generation(), @@ -450,7 +438,7 @@ public StorageObject call() { public CopyWriter copy(final CopyRequest copyRequest) { final StorageObject source = copyRequest.source().toPb(); final Map sourceOptions = - optionMap(null, null, copyRequest.sourceOptions(), true); + optionMap(copyRequest.source().generation(), null, copyRequest.sourceOptions(), true); final StorageObject target = copyRequest.target().toPb(); final Map targetOptions = optionMap(copyRequest.target().generation(), copyRequest.target().metageneration(), copyRequest.targetOptions()); @@ -476,7 +464,7 @@ public byte[] readAllBytes(String bucket, String blob, BlobSourceOption... optio @Override public byte[] readAllBytes(BlobId blob, BlobSourceOption... options) { final StorageObject storageObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { return runWithRetries(new Callable() { @Override @@ -495,7 +483,7 @@ public BatchResponse apply(BatchRequest batchRequest) { Lists.newArrayListWithCapacity(batchRequest.toDelete().size()); for (Map.Entry> entry : batchRequest.toDelete().entrySet()) { BlobId blob = entry.getKey(); - Map optionsMap = optionMap(null, null, entry.getValue()); + Map optionsMap = optionMap(blob.generation(), null, entry.getValue()); StorageObject storageObject = blob.toPb(); toDelete.add(Tuple.>of(storageObject, optionsMap)); } @@ -512,7 +500,7 @@ public BatchResponse apply(BatchRequest batchRequest) { Lists.newArrayListWithCapacity(batchRequest.toGet().size()); for (Map.Entry> entry : batchRequest.toGet().entrySet()) { BlobId blob = entry.getKey(); - Map optionsMap = optionMap(null, null, entry.getValue()); + Map optionsMap = optionMap(blob.generation(), null, entry.getValue()); toGet.add(Tuple.>of(blob.toPb(), optionsMap)); } StorageRpc.BatchResponse response = @@ -522,28 +510,23 @@ public BatchResponse apply(BatchRequest batchRequest) { List> updates = transformBatchResult( toUpdate, response.updates, BlobInfo.FROM_PB_FUNCTION); List> gets = transformBatchResult( - toGet, response.gets, BlobInfo.FROM_PB_FUNCTION, HTTP_NOT_FOUND); + toGet, response.gets, BlobInfo.FROM_PB_FUNCTION); return new BatchResponse(deletes, updates, gets); } private List> transformBatchResult( Iterable>> request, - Map> results, Function transform, - int... nullOnErrorCodes) { - Set nullOnErrorCodesSet = Sets.newHashSet(Ints.asList(nullOnErrorCodes)); + Map> results, Function transform) { List> response = Lists.newArrayListWithCapacity(results.size()); for (Tuple tuple : request) { Tuple result = results.get(tuple.x()); - if (result.x() != null) { - response.add(BatchResponse.Result.of(transform.apply(result.x()))); + I object = result.x(); + StorageException exception = result.y(); + if (exception != null) { + response.add(new BatchResponse.Result(exception)); } else { - StorageException exception = result.y(); - if (nullOnErrorCodesSet.contains(exception.code())) { - //noinspection unchecked - response.add(BatchResponse.Result.empty()); - } else { - response.add(new BatchResponse.Result(exception)); - } + response.add(object != null ? + BatchResponse.Result.of(transform.apply(object)) : BatchResponse.Result.empty()); } } return response; @@ -557,7 +540,7 @@ public BlobReadChannel reader(String bucket, String blob, BlobSourceOption... op @Override public BlobReadChannel reader(BlobId blob, BlobSourceOption... options) { - Map optionsMap = optionMap(options); + Map optionsMap = optionMap(blob, options); return new BlobReadChannelImpl(options(), blob, optionsMap); } @@ -583,9 +566,15 @@ public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOptio ServiceAccountAuthCredentials cred = (ServiceAccountAuthCredentials) optionMap.get(SignUrlOption.Option.SERVICE_ACCOUNT_CRED); if (cred == null) { - checkArgument(options().authCredentials() instanceof ServiceAccountAuthCredentials, - "Signing key was not provided and could not be derived"); - cred = (ServiceAccountAuthCredentials) this.options().authCredentials(); + AuthCredentials serviceCred = this.options().authCredentials(); + if (serviceCred instanceof ServiceAccountAuthCredentials) { + cred = (ServiceAccountAuthCredentials) serviceCred; + } else { + if (serviceCred instanceof ApplicationDefaultAuthCredentials) { + cred = ((ApplicationDefaultAuthCredentials) serviceCred).toServiceAccountCredentials(); + } + } + checkArgument(cred != null, "Signing key was not provided and could not be derived"); } // construct signature - see https://cloud.google.com/storage/docs/access-control#Signed-URLs StringBuilder stBuilder = new StringBuilder(); @@ -741,4 +730,8 @@ private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.O private Map optionMap(BlobInfo blobInfo, Option... options) { return optionMap(blobInfo.generation(), blobInfo.metageneration(), options); } + + private Map optionMap(BlobId blobId, Option... options) { + return optionMap(blobId.generation(), null, options); + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java index 2a09631be40a..137afd38b6ae 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java @@ -17,10 +17,9 @@ /** * A client to Google Cloud Storage. * - *

A simple usage example: + *

Here's a simple usage example for using gcloud-java from App/Compute Engine: *

{@code
- * StorageOptions options = StorageOptions.builder().projectId("project").build();
- * Storage storage = options.service();
+ * Storage storage = StorageOptions.defaultInstance().service();
  * BlobId blobId = BlobId.of("bucket", "blob_name");
  * Blob blob = Blob.load(storage, blobId);
  * if (blob == null) {
@@ -35,6 +34,11 @@
  *   channel.close();
  * }}
* + * When using gcloud-java from outside of App/Compute Engine, you have to specify a + * project ID and + * provide + * credentials. * @see Google Cloud Storage */ package com.google.gcloud.storage; diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java index b15768cffa98..f5cdae83f999 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java @@ -45,8 +45,6 @@ public class RemoteGcsHelper { private static final Logger log = Logger.getLogger(RemoteGcsHelper.class.getName()); private static final String BUCKET_NAME_PREFIX = "gcloud-test-bucket-temp-"; - private static final String PROJECT_ID_ENV_VAR = "GCLOUD_TESTS_PROJECT_ID"; - private static final String PRIVATE_KEY_ENV_VAR = "GCLOUD_TESTS_KEY"; private final StorageOptions options; private RemoteGcsHelper(StorageOptions options) { @@ -107,13 +105,7 @@ public static RemoteGcsHelper create(String projectId, InputStream keyStream) StorageOptions storageOptions = StorageOptions.builder() .authCredentials(AuthCredentials.createForJson(keyStream)) .projectId(projectId) - .retryParams(RetryParams.builder() - .retryMaxAttempts(10) - .retryMinAttempts(6) - .maxRetryDelayMillis(30000) - .totalRetryPeriodMillis(120000) - .initialRetryDelayMillis(250) - .build()) + .retryParams(retryParams()) .connectTimeout(60000) .readTimeout(60000) .build(); @@ -145,41 +137,30 @@ public static RemoteGcsHelper create(String projectId, String keyPath) log.log(Level.WARNING, ex.getMessage()); } throw GcsHelperException.translate(ex); - } catch (IOException ex) { - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, ex.getMessage()); - } - throw GcsHelperException.translate(ex); } } /** - * Creates a {@code RemoteGcsHelper} object. Project id and path to JSON key are read from two - * environment variables: {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY}. - * - * @return A {@code RemoteGcsHelper} object for the provided options. - * @throws com.google.gcloud.storage.testing.RemoteGcsHelper.GcsHelperException if environment - * variables {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY} are not set or if - * the file pointed by {@code GCLOUD_TESTS_KEY} does not exist + * Creates a {@code RemoteGcsHelper} object using default project id and authentication + * credentials. */ public static RemoteGcsHelper create() throws GcsHelperException { - String projectId = System.getenv(PROJECT_ID_ENV_VAR); - String keyPath = System.getenv(PRIVATE_KEY_ENV_VAR); - if (projectId == null) { - String message = "Environment variable " + PROJECT_ID_ENV_VAR + " not set"; - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, message); - } - throw new GcsHelperException(message); - } - if (keyPath == null) { - String message = "Environment variable " + PRIVATE_KEY_ENV_VAR + " not set"; - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, message); - } - throw new GcsHelperException(message); - } - return create(projectId, keyPath); + StorageOptions storageOptions = StorageOptions.builder() + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteGcsHelper(storageOptions); + } + + private static RetryParams retryParams() { + return RetryParams.builder() + .retryMaxAttempts(10) + .retryMinAttempts(6) + .maxRetryDelayMillis(30000) + .totalRetryPeriodMillis(120000) + .initialRetryDelayMillis(250) + .build(); } private static class DeleteBucketTask implements Callable { diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java index 600c8af0d554..63972ff85dfd 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java @@ -37,12 +37,12 @@ public class BatchRequestTest { @Test public void testBatchRequest() { BatchRequest request = BatchRequest.builder() - .delete("b1", "o1") + .delete(BlobId.of("b1", "o1", 1L), BlobSourceOption.generationMatch()) .delete("b1", "o2", BlobSourceOption.generationMatch(1), BlobSourceOption.metagenerationMatch(2)) .update(BlobInfo.builder("b2", "o1").build(), BlobTargetOption.predefinedAcl(PUBLIC_READ)) .update(BlobInfo.builder("b2", "o2").build()) - .get("b3", "o1") + .get(BlobId.of("b3", "o1", 1L), BlobGetOption.generationMatch()) .get("b3", "o2", BlobGetOption.generationMatch(1)) .get("b3", "o3") .build(); @@ -50,11 +50,15 @@ public void testBatchRequest() { Iterator>> deletes = request .toDelete().entrySet().iterator(); Entry> delete = deletes.next(); - assertEquals(BlobId.of("b1", "o1"), delete.getKey()); - assertTrue(Iterables.isEmpty(delete.getValue())); + assertEquals(BlobId.of("b1", "o1", 1L), delete.getKey()); + assertEquals(1, Iterables.size(delete.getValue())); + assertEquals(BlobSourceOption.generationMatch(), Iterables.getFirst(delete.getValue(), null)); delete = deletes.next(); assertEquals(BlobId.of("b1", "o2"), delete.getKey()); assertEquals(2, Iterables.size(delete.getValue())); + assertEquals(BlobSourceOption.generationMatch(1L), Iterables.getFirst(delete.getValue(), null)); + assertEquals(BlobSourceOption.metagenerationMatch(2L), + Iterables.get(delete.getValue(), 1, null)); assertFalse(deletes.hasNext()); Iterator>> updates = request @@ -71,8 +75,9 @@ public void testBatchRequest() { Iterator>> gets = request.toGet().entrySet().iterator(); Entry> get = gets.next(); - assertEquals(BlobId.of("b3", "o1"), get.getKey()); - assertTrue(Iterables.isEmpty(get.getValue())); + assertEquals(BlobId.of("b3", "o1", 1L), get.getKey()); + assertEquals(1, Iterables.size(get.getValue())); + assertEquals(BlobGetOption.generationMatch(), Iterables.getFirst(get.getValue(), null)); get = gets.next(); assertEquals(BlobId.of("b3", "o2"), get.getKey()); assertEquals(1, Iterables.size(get.getValue())); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java index 70560b0c9a9e..36b027dc7278 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java @@ -55,7 +55,7 @@ public class BlobInfoTest { private static final String SELF_LINK = "http://storage/b/n"; private static final Long SIZE = 1024L; private static final Long UPDATE_TIME = DELETE_TIME - 1L; - private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n") + private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n", GENERATION) .acl(ACL) .componentCount(COMPONENT_COUNT) .contentType(CONTENT_TYPE) @@ -66,7 +66,6 @@ public class BlobInfoTest { .crc32c(CRC32) .deleteTime(DELETE_TIME) .etag(ETAG) - .generation(GENERATION) .id(ID) .md5(MD5) .mediaLink(MEDIA_LINK) @@ -85,10 +84,16 @@ public void testToBuilder() { assertEquals("n2", blobInfo.name()); assertEquals("b2", blobInfo.bucket()); assertEquals(Long.valueOf(200), blobInfo.size()); - blobInfo = blobInfo.toBuilder().blobId(BlobId.of("b", "n")).size(SIZE).build(); + blobInfo = blobInfo.toBuilder().blobId(BlobId.of("b", "n", GENERATION)).size(SIZE).build(); compareBlobs(BLOB_INFO, blobInfo); } + @Test + public void testToBuilderIncomplete() { + BlobInfo incompleteBlobInfo = BlobInfo.builder(BlobId.of("b2", "n2")).build(); + compareBlobs(incompleteBlobInfo, incompleteBlobInfo.toBuilder().build()); + } + @Test public void testBuilder() { assertEquals("b", BLOB_INFO.bucket()); @@ -150,6 +155,6 @@ public void testToPbAndFromPb() { @Test public void testBlobId() { - assertEquals(BlobId.of("b", "n"), BLOB_INFO.blobId()); + assertEquals(BlobId.of("b", "n", GENERATION), BLOB_INFO.blobId()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java index e1f904bf72fe..f99fe893d0d9 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java @@ -19,7 +19,6 @@ import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertArrayEquals; @@ -46,7 +45,7 @@ public class BlobReadChannelImplTest { private static final String BUCKET_NAME = "b"; private static final String BLOB_NAME = "n"; - private static final BlobId BLOB_ID = BlobId.of(BUCKET_NAME, BLOB_NAME); + private static final BlobId BLOB_ID = BlobId.of(BUCKET_NAME, BLOB_NAME, -1L); private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; private static final int CUSTOM_CHUNK_SIZE = 2 * 1024 * 1024; @@ -88,7 +87,7 @@ public void testReadBuffered() throws IOException { ByteBuffer firstReadBuffer = ByteBuffer.allocate(42); ByteBuffer secondReadBuffer = ByteBuffer.allocate(42); expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(result); + .andReturn(StorageRpc.Tuple.of("etag", result)); replay(storageRpcMock); reader.read(firstReadBuffer); reader.read(secondReadBuffer); @@ -107,10 +106,11 @@ public void testReadBig() throws IOException { byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer firstReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); ByteBuffer secondReadBuffer = ByteBuffer.allocate(42); - storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE); - expectLastCall().andReturn(firstResult); - storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE, CUSTOM_CHUNK_SIZE); - expectLastCall().andReturn(secondResult); + expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", firstResult)); + expect(storageRpcMock.read( + BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE, CUSTOM_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", secondResult)); replay(storageRpcMock); reader.read(firstReadBuffer); reader.read(secondReadBuffer); @@ -125,7 +125,7 @@ public void testReadFinish() throws IOException { byte[] result = {}; ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(result); + .andReturn(StorageRpc.Tuple.of("etag", result)); replay(storageRpcMock); assertEquals(-1, reader.read(readBuffer)); } @@ -137,7 +137,7 @@ public void testSeek() throws IOException { byte[] result = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE)) - .andReturn(result); + .andReturn(StorageRpc.Tuple.of("etag", result)); replay(storageRpcMock); reader.read(readBuffer); assertArrayEquals(result, readBuffer.array()); @@ -166,6 +166,31 @@ public void testReadClosed() { } } + @Test + public void testReadGenerationChanged() throws IOException { + BlobId blobId = BlobId.of(BUCKET_NAME, BLOB_NAME); + reader = new BlobReadChannelImpl(options, blobId, EMPTY_RPC_OPTIONS); + byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE); + byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE); + ByteBuffer firstReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); + ByteBuffer secondReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); + expect(storageRpcMock.read(blobId.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag1", firstResult)); + expect(storageRpcMock.read( + blobId.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag2", firstResult)); + replay(storageRpcMock); + reader.read(firstReadBuffer); + try { + reader.read(secondReadBuffer); + fail("Expected BlobReadChannel read to throw StorageException"); + } catch (StorageException ex) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("Blob ").append(blobId).append(" was updated while reading"); + assertEquals(messageBuilder.toString(), ex.getMessage()); + } + } + @Test public void testSaveAndRestore() throws IOException, ClassNotFoundException { byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE); @@ -173,9 +198,9 @@ public void testSaveAndRestore() throws IOException, ClassNotFoundException { ByteBuffer firstReadBuffer = ByteBuffer.allocate(42); ByteBuffer secondReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(firstResult); + .andReturn(StorageRpc.Tuple.of("etag", firstResult)); expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE)) - .andReturn(secondResult); + .andReturn(StorageRpc.Tuple.of("etag", secondResult)); replay(storageRpcMock); reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.read(firstReadBuffer); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java index 4fa420b4b6e1..e0de2b77a899 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java @@ -91,7 +91,7 @@ public void testToBuilder() { @Test public void testToBuilderIncomplete() { BucketInfo incompleteBucketInfo = BucketInfo.builder("b").build(); - assertEquals(incompleteBucketInfo.name(), incompleteBucketInfo.toBuilder().build().name()); + compareBuckets(incompleteBucketInfo, incompleteBucketInfo.toBuilder().build()); } @Test diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java index 423e972a8de6..ed58690efde7 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java @@ -19,11 +19,13 @@ import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.google.api.client.util.Lists; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.gcloud.Page; @@ -48,6 +50,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Random; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -63,6 +66,7 @@ public class ITStorageTest { private static final String CONTENT_TYPE = "text/plain"; private static final byte[] BLOB_BYTE_CONTENT = {0xD, 0xE, 0xA, 0xD}; private static final String BLOB_STRING_CONTENT = "Hello Google Cloud Storage!"; + private static final int MAX_BATCH_DELETES = 100; @BeforeClass public static void beforeClass() { @@ -129,7 +133,8 @@ public void testCreateBlob() { BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); BlobInfo remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT); assertNotNull(remoteBlob); - assertEquals(blob.blobId(), remoteBlob.blobId()); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); byte[] readBytes = storage.readAllBytes(BUCKET, blobName); assertArrayEquals(BLOB_BYTE_CONTENT, readBytes); assertTrue(storage.delete(BUCKET, blobName)); @@ -141,7 +146,8 @@ public void testCreateEmptyBlob() { BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); BlobInfo remoteBlob = storage.create(blob); assertNotNull(remoteBlob); - assertEquals(blob.blobId(), remoteBlob.blobId()); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); byte[] readBytes = storage.readAllBytes(BUCKET, blobName); assertArrayEquals(new byte[0], readBytes); assertTrue(storage.delete(BUCKET, blobName)); @@ -154,7 +160,8 @@ public void testCreateBlobStream() throws UnsupportedEncodingException { ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8)); BlobInfo remoteBlob = storage.create(blob, stream); assertNotNull(remoteBlob); - assertEquals(blob.blobId(), remoteBlob.blobId()); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); assertEquals(blob.contentType(), remoteBlob.contentType()); byte[] readBytes = storage.readAllBytes(BUCKET, blobName); assertEquals(BLOB_STRING_CONTENT, new String(readBytes, UTF_8)); @@ -166,8 +173,9 @@ public void testCreateBlobFail() { String blobName = "test-create-blob-fail"; BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); + BlobInfo wrongGenerationBlob = BlobInfo.builder(BUCKET, blobName, -1L).build(); try { - storage.create(blob.toBuilder().generation(-1L).build(), BLOB_BYTE_CONTENT, + storage.create(wrongGenerationBlob, BLOB_BYTE_CONTENT, Storage.BlobTargetOption.generationMatch()); fail("StorageException was expected"); } catch (StorageException ex) { @@ -229,13 +237,39 @@ public void testGetBlobAllSelectedFields() { assertNotNull(storage.create(blob)); BlobInfo remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields(BlobField.values())); - assertEquals(blob.blobId(), remoteBlob.blobId()); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); assertEquals(ImmutableMap.of("k", "v"), remoteBlob.metadata()); assertNotNull(remoteBlob.id()); assertNotNull(remoteBlob.selfLink()); assertTrue(storage.delete(BUCKET, blobName)); } + @Test + public void testGetBlobFail() { + String blobName = "test-get-blob-fail"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + assertNotNull(storage.create(blob)); + BlobId wrongGenerationBlob = BlobId.of(BUCKET, blobName); + try { + storage.get(wrongGenerationBlob, Storage.BlobGetOption.generationMatch(-1)); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testGetBlobFailNonExistingGeneration() { + String blobName = "test-get-blob-fail-non-existing-generation"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + assertNotNull(storage.create(blob)); + BlobId wrongGenerationBlob = BlobId.of(BUCKET, blobName, -1L); + assertNull(storage.get(wrongGenerationBlob)); + assertTrue(storage.delete(BUCKET, blobName)); + } + @Test public void testListBlobsSelectedFields() { String[] blobNames = {"test-list-blobs-selected-fields-blob1", @@ -297,7 +331,8 @@ public void testUpdateBlob() { assertNotNull(storage.create(blob)); BlobInfo updatedBlob = storage.update(blob.toBuilder().contentType(CONTENT_TYPE).build()); assertNotNull(updatedBlob); - assertEquals(blob.blobId(), updatedBlob.blobId()); + assertEquals(blob.name(), updatedBlob.name()); + assertEquals(blob.bucket(), updatedBlob.bucket()); assertEquals(CONTENT_TYPE, updatedBlob.contentType()); assertTrue(storage.delete(BUCKET, blobName)); } @@ -316,7 +351,8 @@ public void testUpdateBlobReplaceMetadata() { assertNotNull(updatedBlob); assertNull(updatedBlob.metadata()); updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); - assertEquals(blob.blobId(), updatedBlob.blobId()); + assertEquals(blob.name(), updatedBlob.name()); + assertEquals(blob.bucket(), updatedBlob.bucket()); assertEquals(newMetadata, updatedBlob.metadata()); assertTrue(storage.delete(BUCKET, blobName)); } @@ -334,7 +370,8 @@ public void testUpdateBlobMergeMetadata() { assertNotNull(storage.create(blob)); BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); assertNotNull(updatedBlob); - assertEquals(blob.blobId(), updatedBlob.blobId()); + assertEquals(blob.name(), updatedBlob.name()); + assertEquals(blob.bucket(), updatedBlob.bucket()); assertEquals(expectedMetadata, updatedBlob.metadata()); assertTrue(storage.delete(BUCKET, blobName)); } @@ -354,7 +391,8 @@ public void testUpdateBlobUnsetMetadata() { assertNotNull(storage.create(blob)); BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); assertNotNull(updatedBlob); - assertEquals(blob.blobId(), updatedBlob.blobId()); + assertEquals(blob.name(), updatedBlob.name()); + assertEquals(blob.bucket(), updatedBlob.bucket()); assertEquals(expectedMetadata, updatedBlob.metadata()); assertTrue(storage.delete(BUCKET, blobName)); } @@ -364,9 +402,11 @@ public void testUpdateBlobFail() { String blobName = "test-update-blob-fail"; BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); + BlobInfo wrongGenerationBlob = BlobInfo.builder(BUCKET, blobName, -1L) + .contentType(CONTENT_TYPE) + .build(); try { - storage.update(blob.toBuilder().contentType(CONTENT_TYPE).generation(-1L).build(), - Storage.BlobTargetOption.generationMatch()); + storage.update(wrongGenerationBlob, Storage.BlobTargetOption.generationMatch()); fail("StorageException was expected"); } catch (StorageException ex) { // expected @@ -380,6 +420,14 @@ public void testDeleteNonExistingBlob() { assertTrue(!storage.delete(BUCKET, blobName)); } + @Test + public void testDeleteBlobNonExistingGeneration() { + String blobName = "test-delete-blob-non-existing-generation"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + assertNotNull(storage.create(blob)); + assertTrue(!storage.delete(BlobId.of(BUCKET, blobName, -1L))); + } + @Test public void testDeleteBlobFail() { String blobName = "test-delete-blob-fail"; @@ -408,7 +456,8 @@ public void testComposeBlob() { Storage.ComposeRequest.of(ImmutableList.of(sourceBlobName1, sourceBlobName2), targetBlob); BlobInfo remoteBlob = storage.compose(req); assertNotNull(remoteBlob); - assertEquals(targetBlob.blobId(), remoteBlob.blobId()); + assertEquals(targetBlob.name(), remoteBlob.name()); + assertEquals(targetBlob.bucket(), remoteBlob.bucket()); byte[] readBytes = storage.readAllBytes(BUCKET, targetBlobName); byte[] composedBytes = Arrays.copyOf(BLOB_BYTE_CONTENT, BLOB_BYTE_CONTENT.length * 2); System.arraycopy(BLOB_BYTE_CONTENT, 0, composedBytes, BLOB_BYTE_CONTENT.length, @@ -491,12 +540,12 @@ public void testCopyBlobUpdateMetadata() { @Test public void testCopyBlobFail() { String sourceBlobName = "test-copy-blob-source-fail"; - BlobId source = BlobId.of(BUCKET, sourceBlobName); + BlobId source = BlobId.of(BUCKET, sourceBlobName, -1L); assertNotNull(storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT)); String targetBlobName = "test-copy-blob-target-fail"; BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName).contentType(CONTENT_TYPE).build(); Storage.CopyRequest req = Storage.CopyRequest.builder() - .source(source) + .source(BUCKET, sourceBlobName) .sourceOptions(Storage.BlobSourceOption.generationMatch(-1L)) .target(target) .build(); @@ -506,6 +555,17 @@ public void testCopyBlobFail() { } catch (StorageException ex) { // expected } + Storage.CopyRequest req2 = Storage.CopyRequest.builder() + .source(source) + .sourceOptions(Storage.BlobSourceOption.generationMatch()) + .target(target) + .build(); + try { + storage.copy(req2); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } assertTrue(storage.delete(BUCKET, sourceBlobName)); } @@ -531,8 +591,10 @@ public void testBatchRequest() { assertEquals(0, updateResponse.gets().size()); BlobInfo remoteUpdatedBlob1 = updateResponse.updates().get(0).get(); BlobInfo remoteUpdatedBlob2 = updateResponse.updates().get(1).get(); - assertEquals(sourceBlob1.blobId(), remoteUpdatedBlob1.blobId()); - assertEquals(sourceBlob2.blobId(), remoteUpdatedBlob2.blobId()); + assertEquals(sourceBlob1.bucket(), remoteUpdatedBlob1.bucket()); + assertEquals(sourceBlob1.name(), remoteUpdatedBlob1.name()); + assertEquals(sourceBlob2.bucket(), remoteUpdatedBlob2.bucket()); + assertEquals(sourceBlob2.name(), remoteUpdatedBlob2.name()); assertEquals(updatedBlob1.contentType(), remoteUpdatedBlob1.contentType()); assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType()); @@ -563,24 +625,78 @@ public void testBatchRequest() { assertTrue(deleteResponse.deletes().get(1).get()); } + @Test + public void testBatchRequestManyDeletes() { + List blobsToDelete = Lists.newArrayListWithCapacity(2 * MAX_BATCH_DELETES); + for (int i = 0; i < 2 * MAX_BATCH_DELETES; i++) { + blobsToDelete.add(BlobId.of(BUCKET, "test-batch-request-many-deletes-blob-" + i)); + } + BatchRequest.Builder builder = BatchRequest.builder(); + for (BlobId blob : blobsToDelete) { + builder.delete(blob); + } + String sourceBlobName1 = "test-batch-request-many-deletes-source-blob-1"; + String sourceBlobName2 = "test-batch-request-many-deletes-source-blob-2"; + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + assertNotNull(storage.create(sourceBlob1)); + assertNotNull(storage.create(sourceBlob2)); + BlobInfo updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build(); + + BatchRequest updateRequest = builder + .get(BUCKET, sourceBlobName1) + .update(updatedBlob2) + .build(); + BatchResponse response = storage.apply(updateRequest); + assertEquals(2 * MAX_BATCH_DELETES, response.deletes().size()); + assertEquals(1, response.updates().size()); + assertEquals(1, response.gets().size()); + + // Check deletes + for (BatchResponse.Result deleteResult : response.deletes()) { + assertFalse(deleteResult.failed()); + assertFalse(deleteResult.get()); + } + + // Check updates + BlobInfo remoteUpdatedBlob2 = response.updates().get(0).get(); + assertEquals(sourceBlob2.bucket(), remoteUpdatedBlob2.bucket()); + assertEquals(sourceBlob2.name(), remoteUpdatedBlob2.name()); + assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType()); + + // Check gets + BlobInfo remoteBlob1 = response.gets().get(0).get(); + assertEquals(sourceBlob1.bucket(), remoteBlob1.bucket()); + assertEquals(sourceBlob1.name(), remoteBlob1.name()); + + assertTrue(storage.delete(BUCKET, sourceBlobName1)); + assertTrue(storage.delete(BUCKET, sourceBlobName2)); + } + @Test public void testBatchRequestFail() { String blobName = "test-batch-request-blob-fail"; BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); - BlobInfo updatedBlob = blob.toBuilder().generation(-1L).build(); + BlobInfo updatedBlob = BlobInfo.builder(BUCKET, blobName, -1L).build(); BatchRequest batchRequest = BatchRequest.builder() .update(updatedBlob, Storage.BlobTargetOption.generationMatch()) .delete(BUCKET, blobName, Storage.BlobSourceOption.generationMatch(-1L)) + .delete(BlobId.of(BUCKET, blobName, -1L)) .get(BUCKET, blobName, Storage.BlobGetOption.generationMatch(-1L)) + .get(BlobId.of(BUCKET, blobName, -1L)) .build(); - BatchResponse updateResponse = storage.apply(batchRequest); - assertEquals(1, updateResponse.updates().size()); - assertEquals(1, updateResponse.deletes().size()); - assertEquals(1, updateResponse.gets().size()); - assertTrue(updateResponse.updates().get(0).failed()); - assertTrue(updateResponse.gets().get(0).failed()); - assertTrue(updateResponse.deletes().get(0).failed()); + BatchResponse batchResponse = storage.apply(batchRequest); + assertEquals(1, batchResponse.updates().size()); + assertEquals(2, batchResponse.deletes().size()); + assertEquals(2, batchResponse.gets().size()); + assertTrue(batchResponse.updates().get(0).failed()); + assertTrue(batchResponse.gets().get(0).failed()); + assertFalse(batchResponse.gets().get(1).failed()); + assertNull(batchResponse.gets().get(1).get()); + assertTrue(batchResponse.deletes().get(0).failed()); + assertFalse(batchResponse.deletes().get(1).failed()); + assertFalse(batchResponse.deletes().get(1).get()); assertTrue(storage.delete(BUCKET, blobName)); } @@ -648,13 +764,63 @@ public void testReadChannelFail() throws IOException { } catch (StorageException ex) { // expected } + try (BlobReadChannel reader = + storage.reader(blob.blobId(), Storage.BlobSourceOption.generationMatch(-1L))) { + reader.read(ByteBuffer.allocate(42)); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + BlobId blobIdWrongGeneration = BlobId.of(BUCKET, blobName, -1L); + try (BlobReadChannel reader = + storage.reader(blobIdWrongGeneration, Storage.BlobSourceOption.generationMatch())) { + reader.read(ByteBuffer.allocate(42)); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testReadChannelFailUpdatedGeneration() throws IOException { + String blobName = "test-read-blob-fail-updated-generation"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Random random = new Random(); + int chunkSize = 1024; + int blobSize = 2 * chunkSize; + byte[] content = new byte[blobSize]; + random.nextBytes(content); + BlobInfo remoteBlob = storage.create(blob, content); + assertNotNull(remoteBlob); + assertEquals(blobSize, (long) remoteBlob.size()); + try (BlobReadChannel reader = storage.reader(blob.blobId())) { + reader.chunkSize(chunkSize); + ByteBuffer readBytes = ByteBuffer.allocate(chunkSize); + int numReadBytes = reader.read(readBytes); + assertEquals(chunkSize, numReadBytes); + assertArrayEquals(Arrays.copyOf(content, chunkSize), readBytes.array()); + try (BlobWriteChannel writer = storage.writer(blob)) { + byte[] newContent = new byte[blobSize]; + random.nextBytes(newContent); + int numWrittenBytes = writer.write(ByteBuffer.wrap(newContent)); + assertEquals(blobSize, numWrittenBytes); + } + readBytes = ByteBuffer.allocate(chunkSize); + reader.read(readBytes); + fail("StorageException was expected"); + } catch(StorageException ex) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("Blob ").append(blob.blobId()).append(" was updated while reading"); + assertEquals(messageBuilder.toString(), ex.getMessage()); + } assertTrue(storage.delete(BUCKET, blobName)); } @Test public void testWriteChannelFail() throws IOException { String blobName = "test-write-channel-blob-fail"; - BlobInfo blob = BlobInfo.builder(BUCKET, blobName).generation(-1L).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName, -1L).build(); try { try (BlobWriteChannel writer = storage.writer(blob, Storage.BlobWriteOption.generationMatch())) { @@ -707,7 +873,8 @@ public void testPostSignedUrl() throws IOException { connection.connect(); BlobInfo remoteBlob = storage.get(BUCKET, blobName); assertNotNull(remoteBlob); - assertEquals(blob.blobId(), remoteBlob.blobId()); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); assertTrue(storage.delete(BUCKET, blobName)); } @@ -720,8 +887,10 @@ public void testGetBlobs() { assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); - assertEquals(sourceBlob1.blobId(), remoteBlobs.get(0).blobId()); - assertEquals(sourceBlob2.blobId(), remoteBlobs.get(1).blobId()); + assertEquals(sourceBlob1.bucket(), remoteBlobs.get(0).bucket()); + assertEquals(sourceBlob1.name(), remoteBlobs.get(0).name()); + assertEquals(sourceBlob2.bucket(), remoteBlobs.get(1).bucket()); + assertEquals(sourceBlob2.name(), remoteBlobs.get(1).name()); assertTrue(storage.delete(BUCKET, sourceBlobName1)); assertTrue(storage.delete(BUCKET, sourceBlobName2)); } @@ -734,7 +903,8 @@ public void testGetBlobsFail() { BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); - assertEquals(sourceBlob1.blobId(), remoteBlobs.get(0).blobId()); + assertEquals(sourceBlob1.bucket(), remoteBlobs.get(0).bucket()); + assertEquals(sourceBlob1.name(), remoteBlobs.get(0).name()); assertNull(remoteBlobs.get(1)); assertTrue(storage.delete(BUCKET, sourceBlobName1)); } @@ -777,9 +947,11 @@ public void testUpdateBlobs() { List updatedBlobs = storage.update( remoteBlob1.toBuilder().contentType(CONTENT_TYPE).build(), remoteBlob2.toBuilder().contentType(CONTENT_TYPE).build()); - assertEquals(sourceBlob1.blobId(), updatedBlobs.get(0).blobId()); + assertEquals(sourceBlob1.bucket(), updatedBlobs.get(0).bucket()); + assertEquals(sourceBlob1.name(), updatedBlobs.get(0).name()); assertEquals(CONTENT_TYPE, updatedBlobs.get(0).contentType()); - assertEquals(sourceBlob2.blobId(), updatedBlobs.get(1).blobId()); + assertEquals(sourceBlob2.bucket(), updatedBlobs.get(1).bucket()); + assertEquals(sourceBlob2.name(), updatedBlobs.get(1).name()); assertEquals(CONTENT_TYPE, updatedBlobs.get(1).contentType()); assertTrue(storage.delete(BUCKET, sourceBlobName1)); assertTrue(storage.delete(BUCKET, sourceBlobName2)); @@ -796,7 +968,8 @@ public void testUpdateBlobsFail() { List updatedBlobs = storage.update( remoteBlob1.toBuilder().contentType(CONTENT_TYPE).build(), sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build()); - assertEquals(sourceBlob1.blobId(), updatedBlobs.get(0).blobId()); + assertEquals(sourceBlob1.bucket(), updatedBlobs.get(0).bucket()); + assertEquals(sourceBlob1.name(), updatedBlobs.get(0).name()); assertEquals(CONTENT_TYPE, updatedBlobs.get(0).contentType()); assertNull(updatedBlobs.get(1)); assertTrue(storage.delete(BUCKET, sourceBlobName1)); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java index f07c7000813e..32a466a9d551 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java @@ -89,8 +89,8 @@ public class StorageImplTest { private static final BucketInfo BUCKET_INFO2 = BucketInfo.builder(BUCKET_NAME2).build(); // BlobInfo objects - private static final BlobInfo BLOB_INFO1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1) - .metageneration(42L).generation(24L).contentType("application/json").md5("md5string").build(); + private static final BlobInfo BLOB_INFO1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1, 24L) + .metageneration(42L).contentType("application/json").md5("md5string").build(); private static final BlobInfo BLOB_INFO2 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME2).build(); private static final BlobInfo BLOB_INFO3 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME3).build(); @@ -157,6 +157,8 @@ public class StorageImplTest { Storage.BlobGetOption.metagenerationMatch(BLOB_INFO1.metageneration()); private static final Storage.BlobGetOption BLOB_GET_GENERATION = Storage.BlobGetOption.generationMatch(BLOB_INFO1.generation()); + private static final Storage.BlobGetOption BLOB_GET_GENERATION_FROM_BLOB_ID = + Storage.BlobGetOption.generationMatch(); private static final Storage.BlobGetOption BLOB_GET_FIELDS = Storage.BlobGetOption.fields(Storage.BlobField.CONTENT_TYPE, Storage.BlobField.CRC32C); private static final Storage.BlobGetOption BLOB_GET_EMPTY_FIELDS = @@ -168,6 +170,8 @@ public class StorageImplTest { Storage.BlobSourceOption.metagenerationMatch(BLOB_INFO1.metageneration()); private static final Storage.BlobSourceOption BLOB_SOURCE_GENERATION = Storage.BlobSourceOption.generationMatch(BLOB_INFO1.generation()); + private static final Storage.BlobSourceOption BLOB_SOURCE_GENERATION_FROM_BLOB_ID = + Storage.BlobSourceOption.generationMatch(); private static final Map BLOB_SOURCE_OPTIONS = ImmutableMap.of( StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_SOURCE_METAGENERATION.value(), StorageRpc.Option.IF_GENERATION_MATCH, BLOB_SOURCE_GENERATION.value()); @@ -454,6 +458,18 @@ public void testGetBlobWithOptions() { assertEquals(BLOB_INFO1, blob); } + @Test + public void testGetBlobWithOptionsFromBlobId() { + EasyMock.expect( + storageRpcMock.get(BLOB_INFO1.blobId().toPb(), BLOB_GET_OPTIONS)) + .andReturn(BLOB_INFO1.toPb()); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BlobInfo blob = + storage.get(BLOB_INFO1.blobId(), BLOB_GET_METAGENERATION, BLOB_GET_GENERATION_FROM_BLOB_ID); + assertEquals(BLOB_INFO1, blob); + } + @Test public void testGetBlobWithSelectedFields() { Capture> capturedOptions = @@ -766,6 +782,17 @@ public void testDeleteBlobWithOptions() { BLOB_SOURCE_METAGENERATION)); } + @Test + public void testDeleteBlobWithOptionsFromBlobId() { + EasyMock.expect( + storageRpcMock.delete(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS)) + .andReturn(true); + EasyMock.replay(storageRpcMock); + storage = options.service(); + assertTrue(storage.delete(BLOB_INFO1.blobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID, + BLOB_SOURCE_METAGENERATION)); + } + @Test public void testCompose() { Storage.ComposeRequest req = Storage.ComposeRequest.builder() @@ -831,6 +858,26 @@ public void testCopyWithOptions() { assertTrue(!writer.isDone()); } + @Test + public void testCopyWithOptionsFromBlobId() { + CopyRequest request = Storage.CopyRequest.builder() + .source(BLOB_INFO1.blobId()) + .sourceOptions(BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION) + .target(BLOB_INFO1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) + .build(); + StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), + BLOB_SOURCE_OPTIONS_COPY, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); + StorageRpc.RewriteResponse rpcResponse = + new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); + EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); + EasyMock.replay(storageRpcMock); + storage = options.service(); + CopyWriter writer = storage.copy(request); + assertEquals(42L, writer.blobSize()); + assertEquals(21L, writer.totalBytesCopied()); + assertTrue(!writer.isDone()); + } + @Test public void testCopyMultipleRequests() { CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.blobId(), BLOB_INFO2.blobId()); @@ -877,6 +924,18 @@ public void testReadAllBytesWithOptions() { assertArrayEquals(BLOB_CONTENT, readBytes); } + @Test + public void testReadAllBytesWithOptionsFromBlobId() { + EasyMock.expect( + storageRpcMock.load(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS)) + .andReturn(BLOB_CONTENT); + EasyMock.replay(storageRpcMock); + storage = options.service(); + byte[] readBytes = storage.readAllBytes(BLOB_INFO1.blobId(), + BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION); + assertArrayEquals(BLOB_CONTENT, readBytes); + } + @Test public void testApply() { BatchRequest req = BatchRequest.builder() @@ -971,7 +1030,7 @@ public void testReaderWithOptions() throws IOException { byte[] result = new byte[DEFAULT_CHUNK_SIZE]; EasyMock.expect( storageRpcMock.read(BLOB_INFO2.toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(result); + .andReturn(StorageRpc.Tuple.of("etag", result)); EasyMock.replay(storageRpcMock); storage = options.service(); BlobReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION, @@ -981,6 +1040,21 @@ public void testReaderWithOptions() throws IOException { channel.read(ByteBuffer.allocate(42)); } + @Test + public void testReaderWithOptionsFromBlobId() throws IOException { + byte[] result = new byte[DEFAULT_CHUNK_SIZE]; + EasyMock.expect( + storageRpcMock.read(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", result)); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BlobReadChannel channel = storage.reader(BLOB_INFO1.blobId(), + BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION); + assertNotNull(channel); + assertTrue(channel.isOpen()); + channel.read(ByteBuffer.allocate(42)); + } + @Test public void testWriter() { BlobInfo.Builder infoBuilder = BLOB_INFO1.toBuilder(); diff --git a/gcloud-java/README.md b/gcloud-java/README.md index eaaed21af5fe..e381ca80cdaa 100644 --- a/gcloud-java/README.md +++ b/gcloud-java/README.md @@ -25,18 +25,23 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java - 0.0.11 + 0.0.12 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java:jar:0.0.11' +compile 'com.google.gcloud:gcloud-java:jar:0.0.12' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.0.11" +libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.0.12" ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -56,7 +61,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -65,6 +72,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [cloud-platform]: https://cloud.google.com/ [cloud-datastore]: https://cloud.google.com/datastore/docs diff --git a/gcloud-java/pom.xml b/gcloud-java/pom.xml index f0ee3bf758f7..e6f9034d0087 100644 --- a/gcloud-java/pom.xml +++ b/gcloud-java/pom.xml @@ -11,7 +11,7 @@ com.google.gcloud gcloud-java-pom - 0.0.12-SNAPSHOT + 0.0.13-SNAPSHOT diff --git a/pom.xml b/pom.xml index ce0c8d32cb49..84989fe5186b 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.gcloud gcloud-java-pom pom - 0.0.12-SNAPSHOT + 0.0.13-SNAPSHOT GCloud Java https://github.com/GoogleCloudPlatform/gcloud-java diff --git a/utilities/integration_test_env.sh b/utilities/integration_test_env.sh index f7aca1a8a623..a1bebe4dcb69 100755 --- a/utilities/integration_test_env.sh +++ b/utilities/integration_test_env.sh @@ -1,3 +1,3 @@ # Export test env variables -export GCLOUD_TESTS_PROJECT_ID="gcloud-devel" -export GCLOUD_TESTS_KEY=$TRAVIS_BUILD_DIR/signing-tools/gcloud-devel-travis.json +export GCLOUD_PROJECT="gcloud-devel" +export GOOGLE_APPLICATION_CREDENTIALS=$TRAVIS_BUILD_DIR/signing-tools/gcloud-devel-travis.json diff --git a/utilities/update_docs_version.sh b/utilities/update_docs_version.sh index 4b1641a0bd81..4fc0aa772963 100755 --- a/utilities/update_docs_version.sh +++ b/utilities/update_docs_version.sh @@ -21,6 +21,6 @@ if [ "${RELEASED_VERSION##*-}" != "SNAPSHOT" ]; then git add README.md */README.md git config --global user.name "travis-ci" git config --global user.email "travis@travis-ci.org" - git commit -m "Updating version in README files." + git commit -m "Updating version in README files. [ci skip]" git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" HEAD:master > /dev/null 2>&1 fi diff --git a/utilities/verify.sh b/utilities/verify.sh index 463180415e98..d98e4ab53513 100755 --- a/utilities/verify.sh +++ b/utilities/verify.sh @@ -4,7 +4,7 @@ source ./utilities/integration_test_env.sh # This script is used by Travis-CI to run tests. # This script is referenced in .travis.yml. -if [ "${TRAVIS_BRANCH}" == "master" -a "${TRAVIS_PULL_REQUEST}" == "false" ]; then +if [ "${TRAVIS_PULL_REQUEST}" == "false" ]; then # Get signing tools and API keyfile openssl aes-256-cbc -K $encrypted_631490ecae8f_key -iv $encrypted_631490ecae8f_iv -in target/travis/signing-tools.tar.enc -out $TRAVIS_BUILD_DIR/signing-tools.tar -d mkdir $TRAVIS_BUILD_DIR/signing-tools