diff --git a/README.md b/README.md index a908cef0bf35..a753a96c8b21 100644 --- a/README.md +++ b/README.md @@ -30,16 +30,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java:0.1.4' +compile 'com.google.gcloud:gcloud-java:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.1.5" ``` Example Applications diff --git a/gcloud-java-bigquery/README.md b/gcloud-java-bigquery/README.md index 58633ba635f9..81b5db71bcac 100644 --- a/gcloud-java-bigquery/README.md +++ b/gcloud-java-bigquery/README.md @@ -22,16 +22,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-bigquery - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-bigquery:0.1.4' +compile 'com.google.gcloud:gcloud-java-bigquery:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-bigquery" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java-bigquery" % "0.1.5" ``` Example Application @@ -185,7 +185,7 @@ Then add the following code to run the query and wait for the result: QueryRequest queryRequest = QueryRequest.builder("SELECT * FROM my_dataset_id.my_table_id") .maxWaitTime(60000L) - .maxResults(1000L) + .pageSize(1000L) .build(); // Request query to be executed and wait for results QueryResponse queryResponse = bigquery.query(queryRequest); diff --git a/gcloud-java-bigquery/pom.xml b/gcloud-java-bigquery/pom.xml index 34ddd7679e97..5c79f150c722 100644 --- a/gcloud-java-bigquery/pom.xml +++ b/gcloud-java-bigquery/pom.xml @@ -10,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-bigquery @@ -39,6 +39,13 @@ + + ${project.groupId} + gcloud-java-core + ${project.version} + test-jar + test + junit junit diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java index 4b5d3ef0c81a..14e324a43370 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -25,7 +25,7 @@ import com.google.common.collect.Sets; import com.google.gcloud.Page; import com.google.gcloud.Service; -import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpc; import java.util.List; import java.util.Set; @@ -171,16 +171,16 @@ private DatasetListOption(BigQueryRpc.Option option, Object value) { } /** - * Returns an option to specify the maximum number of datasets to be returned. + * Returns an option to specify the maximum number of datasets returned per page. */ - public static DatasetListOption maxResults(long maxResults) { - return new DatasetListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + public static DatasetListOption pageSize(long pageSize) { + return new DatasetListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing datasets. */ - public static DatasetListOption startPageToken(String pageToken) { + public static DatasetListOption pageToken(String pageToken) { return new DatasetListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } @@ -246,17 +246,17 @@ private TableListOption(BigQueryRpc.Option option, Object value) { } /** - * Returns an option to specify the maximum number of tables to be returned. + * Returns an option to specify the maximum number of tables returned per page. */ - public static TableListOption maxResults(long maxResults) { - checkArgument(maxResults >= 0); - return new TableListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + public static TableListOption pageSize(long pageSize) { + checkArgument(pageSize >= 0); + return new TableListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing tables. */ - public static TableListOption startPageToken(String pageToken) { + public static TableListOption pageToken(String pageToken) { return new TableListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } } @@ -295,17 +295,17 @@ private TableDataListOption(BigQueryRpc.Option option, Object value) { } /** - * Returns an option to specify the maximum number of rows to be returned. + * Returns an option to specify the maximum number of rows returned per page. */ - public static TableDataListOption maxResults(long maxResults) { - checkArgument(maxResults >= 0); - return new TableDataListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + public static TableDataListOption pageSize(long pageSize) { + checkArgument(pageSize >= 0); + return new TableDataListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing table data. */ - public static TableDataListOption startPageToken(String pageToken) { + public static TableDataListOption pageToken(String pageToken) { return new TableDataListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } @@ -352,17 +352,17 @@ public String apply(JobStatus.State state) { } /** - * Returns an option to specify the maximum number of jobs to be returned. + * Returns an option to specify the maximum number of jobs returned per page. */ - public static JobListOption maxResults(long maxResults) { - checkArgument(maxResults >= 0); - return new JobListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + public static JobListOption pageSize(long pageSize) { + checkArgument(pageSize >= 0); + return new JobListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing jobs. */ - public static JobListOption startPageToken(String pageToken) { + public static JobListOption pageToken(String pageToken) { return new JobListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } @@ -418,17 +418,17 @@ private QueryResultsOption(BigQueryRpc.Option option, Object value) { } /** - * Returns an option to specify the maximum number of rows to be returned. + * Returns an option to specify the maximum number of rows returned per page. */ - public static QueryResultsOption maxResults(long maxResults) { - checkArgument(maxResults >= 0); - return new QueryResultsOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + public static QueryResultsOption pageSize(long pageSize) { + checkArgument(pageSize >= 0); + return new QueryResultsOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start getting query results. */ - public static QueryResultsOption startPageToken(String pageToken) { + public static QueryResultsOption pageToken(String pageToken) { return new QueryResultsOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); } @@ -457,35 +457,35 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - Dataset create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + Dataset create(DatasetInfo dataset, DatasetOption... options); /** * Creates a new table. * * @throws BigQueryException upon failure */ - Table create(TableInfo table, TableOption... options) throws BigQueryException; + Table create(TableInfo table, TableOption... options); /** * Creates a new job. * * @throws BigQueryException upon failure */ - Job create(JobInfo job, JobOption... options) throws BigQueryException; + Job create(JobInfo job, JobOption... options); /** * Returns the requested dataset or {@code null} if not found. * * @throws BigQueryException upon failure */ - Dataset getDataset(String datasetId, DatasetOption... options) throws BigQueryException; + Dataset getDataset(String datasetId, DatasetOption... options); /** * Returns the requested dataset or {@code null} if not found. * * @throws BigQueryException upon failure */ - Dataset getDataset(DatasetId datasetId, DatasetOption... options) throws BigQueryException; + Dataset getDataset(DatasetId datasetId, DatasetOption... options); /** * Lists the project's datasets. This method returns partial information on each dataset @@ -495,7 +495,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - Page listDatasets(DatasetListOption... options) throws BigQueryException; + Page listDatasets(DatasetListOption... options); /** * Deletes the requested dataset. @@ -503,7 +503,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean delete(String datasetId, DatasetDeleteOption... options) throws BigQueryException; + boolean delete(String datasetId, DatasetDeleteOption... options); /** * Deletes the requested dataset. @@ -511,7 +511,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean delete(DatasetId datasetId, DatasetDeleteOption... options) throws BigQueryException; + boolean delete(DatasetId datasetId, DatasetDeleteOption... options); /** * Deletes the requested table. @@ -519,7 +519,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean delete(String datasetId, String tableId) throws BigQueryException; + boolean delete(String datasetId, String tableId); /** * Deletes the requested table. @@ -527,35 +527,35 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean delete(TableId tableId) throws BigQueryException; + boolean delete(TableId tableId); /** * Updates dataset information. * * @throws BigQueryException upon failure */ - Dataset update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + Dataset update(DatasetInfo dataset, DatasetOption... options); /** * Updates table information. * * @throws BigQueryException upon failure */ - Table update(TableInfo table, TableOption... options) throws BigQueryException; + Table update(TableInfo table, TableOption... options); /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - Table getTable(String datasetId, String tableId, TableOption... options) throws BigQueryException; + Table getTable(String datasetId, String tableId, TableOption... options); /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - Table getTable(TableId tableId, TableOption... options) throws BigQueryException; + Table getTable(TableId tableId, TableOption... options); /** * Lists the tables in the dataset. This method returns partial information on each table @@ -566,7 +566,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - Page listTables(String datasetId, TableListOption... options) throws BigQueryException; + Page
listTables(String datasetId, TableListOption... options); /** * Lists the tables in the dataset. This method returns partial information on each table @@ -577,14 +577,14 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - Page
listTables(DatasetId datasetId, TableListOption... options) throws BigQueryException; + Page
listTables(DatasetId datasetId, TableListOption... options); /** * Sends an insert all request. * * @throws BigQueryException upon failure */ - InsertAllResponse insertAll(InsertAllRequest request) throws BigQueryException; + InsertAllResponse insertAll(InsertAllRequest request); /** * Lists the table's rows. @@ -592,36 +592,35 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @throws BigQueryException upon failure */ Page> listTableData(String datasetId, String tableId, - TableDataListOption... options) throws BigQueryException; + TableDataListOption... options); /** * Lists the table's rows. * * @throws BigQueryException upon failure */ - Page> listTableData(TableId tableId, TableDataListOption... options) - throws BigQueryException; + Page> listTableData(TableId tableId, TableDataListOption... options); /** * Returns the requested job or {@code null} if not found. * * @throws BigQueryException upon failure */ - Job getJob(String jobId, JobOption... options) throws BigQueryException; + Job getJob(String jobId, JobOption... options); /** * Returns the requested job or {@code null} if not found. * * @throws BigQueryException upon failure */ - Job getJob(JobId jobId, JobOption... options) throws BigQueryException; + Job getJob(JobId jobId, JobOption... options); /** * Lists the jobs. * * @throws BigQueryException upon failure */ - Page listJobs(JobListOption... options) throws BigQueryException; + Page listJobs(JobListOption... options); /** * Sends a job cancel request. This call will return immediately. The job status can then be @@ -632,7 +631,7 @@ Page> listTableData(TableId tableId, TableDataListOption... opt * found * @throws BigQueryException upon failure */ - boolean cancel(String jobId) throws BigQueryException; + boolean cancel(String jobId); /** * Sends a job cancel request. This call will return immediately. The job status can then be @@ -643,21 +642,21 @@ Page> listTableData(TableId tableId, TableDataListOption... opt * found * @throws BigQueryException upon failure */ - boolean cancel(JobId tableId) throws BigQueryException; + boolean cancel(JobId tableId); /** * Runs the query associated with the request. * * @throws BigQueryException upon failure */ - QueryResponse query(QueryRequest request) throws BigQueryException; + QueryResponse query(QueryRequest request); /** * Returns results of the query associated with the provided job. * * @throws BigQueryException upon failure */ - QueryResponse getQueryResults(JobId job, QueryResultsOption... options) throws BigQueryException; + QueryResponse getQueryResults(JobId job, QueryResultsOption... options); /** * Returns a channel to write data to be inserted into a BigQuery table. Data format and other diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java index a157afd25db2..e78734a2899e 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java @@ -22,6 +22,7 @@ import com.google.gcloud.RetryHelper.RetryInterruptedException; import java.io.IOException; +import java.util.Objects; import java.util.Set; /** @@ -73,6 +74,23 @@ protected Set retryableErrors() { return RETRYABLE_ERRORS; } + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof BigQueryException)) { + return false; + } + BigQueryException other = (BigQueryException) obj; + return super.equals(other) && Objects.equals(error, other.error); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), error); + } + /** * Translate RetryHelperException to the BigQueryException that caused the error. This method will * always throw an exception. diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java index 1158dd86c83d..27f4af5d5007 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -35,7 +35,7 @@ import com.google.gcloud.PageImpl.NextPageFetcher; import com.google.gcloud.RetryHelper; import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; -import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpc; import java.util.List; import java.util.Map; @@ -153,7 +153,7 @@ public QueryResult nextPage() { } @Override - public Dataset create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException { + public Dataset create(DatasetInfo dataset, DatasetOption... options) { final com.google.api.services.bigquery.model.Dataset datasetPb = dataset.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -171,7 +171,7 @@ public com.google.api.services.bigquery.model.Dataset call() { } @Override - public Table create(TableInfo table, TableOption... options) throws BigQueryException { + public Table create(TableInfo table, TableOption... options) { final com.google.api.services.bigquery.model.Table tablePb = table.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -189,7 +189,7 @@ public com.google.api.services.bigquery.model.Table call() { } @Override - public Job create(JobInfo job, JobOption... options) throws BigQueryException { + public Job create(JobInfo job, JobOption... options) { final com.google.api.services.bigquery.model.Job jobPb = job.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -207,13 +207,12 @@ public com.google.api.services.bigquery.model.Job call() { } @Override - public Dataset getDataset(String datasetId, DatasetOption... options) throws BigQueryException { + public Dataset getDataset(String datasetId, DatasetOption... options) { return getDataset(DatasetId.of(datasetId), options); } @Override - public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) - throws BigQueryException { + public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) { final Map optionsMap = optionMap(options); try { com.google.api.services.bigquery.model.Dataset answer = @@ -230,7 +229,7 @@ public com.google.api.services.bigquery.model.Dataset call() { } @Override - public Page listDatasets(DatasetListOption... options) throws BigQueryException { + public Page listDatasets(DatasetListOption... options) { return listDatasets(options(), optionMap(options)); } @@ -261,13 +260,12 @@ public Dataset apply(com.google.api.services.bigquery.model.Dataset dataset) { } @Override - public boolean delete(String datasetId, DatasetDeleteOption... options) throws BigQueryException { + public boolean delete(String datasetId, DatasetDeleteOption... options) { return delete(DatasetId.of(datasetId), options); } @Override - public boolean delete(final DatasetId datasetId, DatasetDeleteOption... options) - throws BigQueryException { + public boolean delete(final DatasetId datasetId, DatasetDeleteOption... options) { final Map optionsMap = optionMap(options); try { return runWithRetries(new Callable() { @@ -282,12 +280,12 @@ public Boolean call() { } @Override - public boolean delete(String datasetId, String tableId) throws BigQueryException { + public boolean delete(String datasetId, String tableId) { return delete(TableId.of(datasetId, tableId)); } @Override - public boolean delete(final TableId tableId) throws BigQueryException { + public boolean delete(final TableId tableId) { try { return runWithRetries(new Callable() { @Override @@ -301,7 +299,7 @@ public Boolean call() { } @Override - public Dataset update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException { + public Dataset update(DatasetInfo dataset, DatasetOption... options) { final com.google.api.services.bigquery.model.Dataset datasetPb = dataset.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -319,7 +317,7 @@ public com.google.api.services.bigquery.model.Dataset call() { } @Override - public Table update(TableInfo table, TableOption... options) throws BigQueryException { + public Table update(TableInfo table, TableOption... options) { final com.google.api.services.bigquery.model.Table tablePb = table.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -337,13 +335,12 @@ public com.google.api.services.bigquery.model.Table call() { } @Override - public Table getTable(final String datasetId, final String tableId, TableOption... options) - throws BigQueryException { + public Table getTable(final String datasetId, final String tableId, TableOption... options) { return getTable(TableId.of(datasetId, tableId), options); } @Override - public Table getTable(final TableId tableId, TableOption... options) throws BigQueryException { + public Table getTable(final TableId tableId, TableOption... options) { final Map optionsMap = optionMap(options); try { com.google.api.services.bigquery.model.Table answer = @@ -360,14 +357,12 @@ public com.google.api.services.bigquery.model.Table call() { } @Override - public Page
listTables(String datasetId, TableListOption... options) - throws BigQueryException { + public Page
listTables(String datasetId, TableListOption... options) { return listTables(datasetId, options(), optionMap(options)); } @Override - public Page
listTables(DatasetId datasetId, TableListOption... options) - throws BigQueryException { + public Page
listTables(DatasetId datasetId, TableListOption... options) { return listTables(datasetId.dataset(), options(), optionMap(options)); } @@ -399,7 +394,7 @@ public Table apply(com.google.api.services.bigquery.model.Table table) { } @Override - public InsertAllResponse insertAll(InsertAllRequest request) throws BigQueryException { + public InsertAllResponse insertAll(InsertAllRequest request) { final TableId tableId = request.table(); final TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest(); requestPb.setIgnoreUnknownValues(request.ignoreUnknownValues()); @@ -418,13 +413,12 @@ public Rows apply(RowToInsert rowToInsert) { @Override public Page> listTableData(String datasetId, String tableId, - TableDataListOption... options) throws BigQueryException { + TableDataListOption... options) { return listTableData(TableId.of(datasetId, tableId), options(), optionMap(options)); } @Override - public Page> listTableData(TableId tableId, TableDataListOption... options) - throws BigQueryException { + public Page> listTableData(TableId tableId, TableDataListOption... options) { return listTableData(tableId, options(), optionMap(options)); } @@ -459,12 +453,12 @@ public List apply(TableRow rowPb) { } @Override - public Job getJob(String jobId, JobOption... options) throws BigQueryException { + public Job getJob(String jobId, JobOption... options) { return getJob(JobId.of(jobId), options); } @Override - public Job getJob(final JobId jobId, JobOption... options) throws BigQueryException { + public Job getJob(final JobId jobId, JobOption... options) { final Map optionsMap = optionMap(options); try { com.google.api.services.bigquery.model.Job answer = @@ -481,7 +475,7 @@ public com.google.api.services.bigquery.model.Job call() { } @Override - public Page listJobs(JobListOption... options) throws BigQueryException { + public Page listJobs(JobListOption... options) { return listJobs(options(), optionMap(options)); } @@ -508,12 +502,12 @@ public Job apply(com.google.api.services.bigquery.model.Job job) { } @Override - public boolean cancel(String jobId) throws BigQueryException { + public boolean cancel(String jobId) { return cancel(JobId.of(jobId)); } @Override - public boolean cancel(final JobId jobId) throws BigQueryException { + public boolean cancel(final JobId jobId) { try { return runWithRetries(new Callable() { @Override @@ -527,7 +521,7 @@ public Boolean call() { } @Override - public QueryResponse query(final QueryRequest request) throws BigQueryException { + public QueryResponse query(final QueryRequest request) { try { com.google.api.services.bigquery.model.QueryResponse results = runWithRetries(new Callable() { @@ -566,8 +560,7 @@ public com.google.api.services.bigquery.model.QueryResponse call() { } @Override - public QueryResponse getQueryResults(JobId job, QueryResultsOption... options) - throws BigQueryException { + public QueryResponse getQueryResults(JobId job, QueryResultsOption... options) { Map optionsMap = optionMap(options); return getQueryResults(job, options(), optionsMap); } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java index 71d43cfbe565..d48cf646f349 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java @@ -18,9 +18,9 @@ import com.google.common.collect.ImmutableSet; import com.google.gcloud.ServiceOptions; -import com.google.gcloud.spi.BigQueryRpc; -import com.google.gcloud.spi.BigQueryRpcFactory; -import com.google.gcloud.spi.DefaultBigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpcFactory; +import com.google.gcloud.bigquery.spi.DefaultBigQueryRpc; import java.util.Set; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java index d88820fe5a29..3fdc27ecab99 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java @@ -19,7 +19,7 @@ import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.base.MoreObjects; -import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpc; import java.io.Serializable; import java.util.Objects; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java index 5f99f3c5b4ee..b3522a2a6ba3 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java @@ -40,7 +40,7 @@ * QueryRequest request = QueryRequest.builder("SELECT field FROM table") * .defaultDataset(DatasetId.of("dataset")) * .maxWaitTime(60000L) - * .maxResults(1000L) + * .pageSize(1000L) * .build(); * QueryResponse response = bigquery.query(request); * while (!response.jobCompleted()) { @@ -65,7 +65,7 @@ public class QueryRequest implements Serializable { private static final long serialVersionUID = -8727328332415880852L; private final String query; - private final Long maxResults; + private final Long pageSize; private final DatasetId defaultDataset; private final Long maxWaitTime; private final Boolean dryRun; @@ -74,7 +74,7 @@ public class QueryRequest implements Serializable { public static final class Builder { private String query; - private Long maxResults; + private Long pageSize; private DatasetId defaultDataset; private Long maxWaitTime; private Boolean dryRun; @@ -96,8 +96,8 @@ public Builder query(String query) { * query result set is large. In addition to this limit, responses are also limited to 10 MB. * By default, there is no maximum row count, and only the byte limit applies. */ - public Builder maxResults(Long maxResults) { - this.maxResults = maxResults; + public Builder pageSize(Long pageSize) { + this.pageSize = pageSize; return this; } @@ -157,7 +157,7 @@ public QueryRequest build() { private QueryRequest(Builder builder) { query = builder.query; - maxResults = builder.maxResults; + pageSize = builder.pageSize; defaultDataset = builder.defaultDataset; maxWaitTime = builder.maxWaitTime; dryRun = builder.dryRun; @@ -174,8 +174,8 @@ public String query() { /** * Returns the maximum number of rows of data to return per page of results. */ - public Long maxResults() { - return maxResults; + public Long pageSize() { + return pageSize; } /** @@ -224,7 +224,7 @@ public Boolean useQueryCache() { public Builder toBuilder() { return new Builder() .query(query) - .maxResults(maxResults) + .pageSize(pageSize) .defaultDataset(defaultDataset) .maxWaitTime(maxWaitTime) .dryRun(dryRun) @@ -235,7 +235,7 @@ public Builder toBuilder() { public String toString() { return MoreObjects.toStringHelper(this) .add("query", query) - .add("maxResults", maxResults) + .add("pageSize", pageSize) .add("defaultDataset", defaultDataset) .add("maxWaitTime", maxWaitTime) .add("dryRun", dryRun) @@ -245,7 +245,7 @@ public String toString() { @Override public int hashCode() { - return Objects.hash(query, maxResults, defaultDataset, maxWaitTime, dryRun, useQueryCache); + return Objects.hash(query, pageSize, defaultDataset, maxWaitTime, dryRun, useQueryCache); } @Override @@ -264,8 +264,8 @@ QueryRequest setProjectId(String projectId) { com.google.api.services.bigquery.model.QueryRequest toPb() { com.google.api.services.bigquery.model.QueryRequest queryRequestPb = new com.google.api.services.bigquery.model.QueryRequest().setQuery(query); - if (maxResults != null) { - queryRequestPb.setMaxResults(maxResults); + if (pageSize != null) { + queryRequestPb.setMaxResults(pageSize); } if (defaultDataset != null) { queryRequestPb.setDefaultDataset(defaultDataset.toPb()); @@ -299,7 +299,7 @@ public static QueryRequest of(String query) { static QueryRequest fromPb(com.google.api.services.bigquery.model.QueryRequest queryRequestPb) { Builder builder = builder(queryRequestPb.getQuery()); if (queryRequestPb.getMaxResults() != null) { - builder.maxResults(queryRequestPb.getMaxResults()); + builder.pageSize(queryRequestPb.getMaxResults()); } if (queryRequestPb.getDefaultDataset() != null) { builder.defaultDataset(DatasetId.fromPb(queryRequestPb.getDefaultDataset())); diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpc.java similarity index 76% rename from gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java rename to gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpc.java index 6062e19950e0..d0b740e9e390 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpc.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.bigquery.spi; import com.google.api.services.bigquery.model.Dataset; import com.google.api.services.bigquery.model.GetQueryResultsResponse; @@ -100,7 +100,7 @@ public Y y() { * * @throws BigQueryException upon failure */ - Dataset getDataset(String datasetId, Map options) throws BigQueryException; + Dataset getDataset(String datasetId, Map options); /** * Lists the project's datasets. Partial information is returned on a dataset (datasetReference, @@ -108,13 +108,28 @@ public Y y() { * * @throws BigQueryException upon failure */ - Tuple> listDatasets(Map options) throws BigQueryException; + Tuple> listDatasets(Map options); - Dataset create(Dataset dataset, Map options) throws BigQueryException; + /** + * Creates a new dataset. + * + * @throws BigQueryException upon failure + */ + Dataset create(Dataset dataset, Map options); - Table create(Table table, Map options) throws BigQueryException; + /** + * Creates a new table. + * + * @throws BigQueryException upon failure + */ + Table create(Table table, Map options); - Job create(Job job, Map options) throws BigQueryException; + /** + * Creates a new job. + * + * @throws BigQueryException upon failure + */ + Job create(Job job, Map options); /** * Delete the requested dataset. @@ -122,18 +137,28 @@ public Y y() { * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean deleteDataset(String datasetId, Map options) throws BigQueryException; + boolean deleteDataset(String datasetId, Map options); - Dataset patch(Dataset dataset, Map options) throws BigQueryException; + /** + * Updates dataset information. + * + * @throws BigQueryException upon failure + */ + Dataset patch(Dataset dataset, Map options); - Table patch(Table table, Map options) throws BigQueryException; + /** + * Updates table information. + * + * @throws BigQueryException upon failure + */ + Table patch(Table table, Map options); /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - Table getTable(String datasetId, String tableId, Map options) throws BigQueryException; + Table getTable(String datasetId, String tableId, Map options); /** * Lists the dataset's tables. Partial information is returned on a table (tableReference, @@ -141,8 +166,7 @@ public Y y() { * * @throws BigQueryException upon failure */ - Tuple> listTables(String dataset, Map options) - throws BigQueryException; + Tuple> listTables(String dataset, Map options); /** * Delete the requested table. @@ -150,27 +174,37 @@ Tuple> listTables(String dataset, Map options * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean deleteTable(String datasetId, String tableId) throws BigQueryException; + boolean deleteTable(String datasetId, String tableId); + /** + * Sends an insert all request. + * + * @throws BigQueryException upon failure + */ TableDataInsertAllResponse insertAll(String datasetId, String tableId, - TableDataInsertAllRequest request) throws BigQueryException; + TableDataInsertAllRequest request); + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ Tuple> listTableData(String datasetId, String tableId, - Map options) throws BigQueryException; + Map options); /** * Returns the requested job or {@code null} if not found. * * @throws BigQueryException upon failure */ - Job getJob(String jobId, Map options) throws BigQueryException; + Job getJob(String jobId, Map options); /** * Lists the project's jobs. * * @throws BigQueryException upon failure */ - Tuple> listJobs(Map options) throws BigQueryException; + Tuple> listJobs(Map options); /** * Sends a job cancel request. This call will return immediately, and the client will need to poll @@ -180,12 +214,21 @@ Tuple> listTableData(String datasetId, String tableId * found * @throws BigQueryException upon failure */ - boolean cancel(String jobId) throws BigQueryException; + boolean cancel(String jobId); - GetQueryResultsResponse getQueryResults(String jobId, Map options) - throws BigQueryException; + /** + * Returns results of the query associated with the provided job. + * + * @throws BigQueryException upon failure + */ + GetQueryResultsResponse getQueryResults(String jobId, Map options); - QueryResponse query(QueryRequest request) throws BigQueryException; + /** + * Runs the query associated with the request. + * + * @throws BigQueryException upon failure + */ + QueryResponse query(QueryRequest request); /** * Opens a resumable upload session to load data into a BigQuery table and returns an upload URI. @@ -193,7 +236,7 @@ GetQueryResultsResponse getQueryResults(String jobId, Map options) * @param configuration load configuration * @throws BigQueryException upon failure */ - String open(JobConfiguration configuration) throws BigQueryException; + String open(JobConfiguration configuration); /** * Uploads the provided data to the resumable upload session at the specified position. @@ -207,5 +250,5 @@ GetQueryResultsResponse getQueryResults(String jobId, Map options) * @throws BigQueryException upon failure */ void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, - boolean last) throws BigQueryException; + boolean last); } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpcFactory.java similarity index 90% rename from gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java rename to gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpcFactory.java index 2706868756a5..1323ec0624f4 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpcFactory.java @@ -14,9 +14,10 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.bigquery.spi; import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.spi.ServiceRpcFactory; /** * An interface for BigQuery RPC factory. diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/DefaultBigQueryRpc.java similarity index 89% rename from gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java rename to gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/DefaultBigQueryRpc.java index b57f1dc8a128..71712bda7806 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/DefaultBigQueryRpc.java @@ -12,14 +12,17 @@ * the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.bigquery.spi; -import static com.google.gcloud.spi.BigQueryRpc.Option.DELETE_CONTENTS; -import static com.google.gcloud.spi.BigQueryRpc.Option.FIELDS; -import static com.google.gcloud.spi.BigQueryRpc.Option.MAX_RESULTS; -import static com.google.gcloud.spi.BigQueryRpc.Option.PAGE_TOKEN; -import static com.google.gcloud.spi.BigQueryRpc.Option.START_INDEX; -import static com.google.gcloud.spi.BigQueryRpc.Option.TIMEOUT; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.ALL_DATASETS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.ALL_USERS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.DELETE_CONTENTS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.FIELDS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.MAX_RESULTS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.PAGE_TOKEN; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.START_INDEX; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.STATE_FILTER; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.TIMEOUT; import static java.net.HttpURLConnection.HTTP_CREATED; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.net.HttpURLConnection.HTTP_OK; @@ -90,7 +93,7 @@ private static BigQueryException translate(IOException exception) { } @Override - public Dataset getDataset(String datasetId, Map options) throws BigQueryException { + public Dataset getDataset(String datasetId, Map options) { try { return bigquery.datasets() .get(this.options.projectId(), datasetId) @@ -106,14 +109,14 @@ public Dataset getDataset(String datasetId, Map options) throws BigQu } @Override - public Tuple> listDatasets(Map options) - throws BigQueryException { + public Tuple> listDatasets(Map options) { try { DatasetList datasetsList = bigquery.datasets() .list(this.options.projectId()) - .setAll(Option.ALL_DATASETS.getBoolean(options)) + .setAll(ALL_DATASETS.getBoolean(options)) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) + .setPageToken(PAGE_TOKEN.getString(options)) .execute(); Iterable datasets = datasetsList.getDatasets(); return Tuple.of(datasetsList.getNextPageToken(), @@ -135,7 +138,7 @@ public Dataset apply(DatasetList.Datasets datasetPb) { } @Override - public Dataset create(Dataset dataset, Map options) throws BigQueryException { + public Dataset create(Dataset dataset, Map options) { try { return bigquery.datasets().insert(this.options.projectId(), dataset) .setFields(FIELDS.getString(options)) @@ -146,8 +149,7 @@ public Dataset create(Dataset dataset, Map options) throws BigQueryEx } @Override - public Table create(Table table, Map options) - throws BigQueryException { + public Table create(Table table, Map options) { try { // unset the type, as it is output only table.setType(null); @@ -161,7 +163,7 @@ public Table create(Table table, Map options) } @Override - public Job create(Job job, Map options) throws BigQueryException { + public Job create(Job job, Map options) { try { return bigquery.jobs() .insert(this.options.projectId(), job) @@ -173,7 +175,7 @@ public Job create(Job job, Map options) throws BigQueryException { } @Override - public boolean deleteDataset(String datasetId, Map options) throws BigQueryException { + public boolean deleteDataset(String datasetId, Map options) { try { bigquery.datasets().delete(this.options.projectId(), datasetId) .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) @@ -189,7 +191,7 @@ public boolean deleteDataset(String datasetId, Map options) throws Bi } @Override - public Dataset patch(Dataset dataset, Map options) throws BigQueryException { + public Dataset patch(Dataset dataset, Map options) { try { DatasetReference reference = dataset.getDatasetReference(); return bigquery.datasets() @@ -202,7 +204,7 @@ public Dataset patch(Dataset dataset, Map options) throws BigQueryExc } @Override - public Table patch(Table table, Map options) throws BigQueryException { + public Table patch(Table table, Map options) { try { // unset the type, as it is output only table.setType(null); @@ -217,8 +219,7 @@ public Table patch(Table table, Map options) throws BigQueryException } @Override - public Table getTable(String datasetId, String tableId, Map options) - throws BigQueryException { + public Table getTable(String datasetId, String tableId, Map options) { try { return bigquery.tables() .get(this.options.projectId(), datasetId, tableId) @@ -234,8 +235,7 @@ public Table getTable(String datasetId, String tableId, Map options) } @Override - public Tuple> listTables(String datasetId, Map options) - throws BigQueryException { + public Tuple> listTables(String datasetId, Map options) { try { TableList tableList = bigquery.tables() .list(this.options.projectId(), datasetId) @@ -262,7 +262,7 @@ public Table apply(TableList.Tables tablePb) { } @Override - public boolean deleteTable(String datasetId, String tableId) throws BigQueryException { + public boolean deleteTable(String datasetId, String tableId) { try { bigquery.tables().delete(this.options.projectId(), datasetId, tableId).execute(); return true; @@ -277,7 +277,7 @@ public boolean deleteTable(String datasetId, String tableId) throws BigQueryExce @Override public TableDataInsertAllResponse insertAll(String datasetId, String tableId, - TableDataInsertAllRequest request) throws BigQueryException { + TableDataInsertAllRequest request) { try { return bigquery.tabledata() .insertAll(this.options.projectId(), datasetId, tableId, request) @@ -289,7 +289,7 @@ public TableDataInsertAllResponse insertAll(String datasetId, String tableId, @Override public Tuple> listTableData(String datasetId, String tableId, - Map options) throws BigQueryException { + Map options) { try { TableDataList tableDataList = bigquery.tabledata() .list(this.options.projectId(), datasetId, tableId) @@ -306,7 +306,7 @@ public Tuple> listTableData(String datasetId, String } @Override - public Job getJob(String jobId, Map options) throws BigQueryException { + public Job getJob(String jobId, Map options) { try { return bigquery.jobs() .get(this.options.projectId(), jobId) @@ -322,13 +322,13 @@ public Job getJob(String jobId, Map options) throws BigQueryException } @Override - public Tuple> listJobs(Map options) throws BigQueryException { + public Tuple> listJobs(Map options) { try { JobList jobsList = bigquery.jobs() .list(this.options.projectId()) - .setAllUsers(Option.ALL_USERS.getBoolean(options)) - .setFields(Option.FIELDS.getString(options)) - .setStateFilter(Option.STATE_FILTER.>get(options)) + .setAllUsers(ALL_USERS.getBoolean(options)) + .setFields(FIELDS.getString(options)) + .setStateFilter(STATE_FILTER.>get(options)) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) .setProjection(DEFAULT_PROJECTION) @@ -363,7 +363,7 @@ public Job apply(JobList.Jobs jobPb) { } @Override - public boolean cancel(String jobId) throws BigQueryException { + public boolean cancel(String jobId) { try { bigquery.jobs().cancel(this.options.projectId(), jobId).execute(); return true; @@ -377,8 +377,7 @@ public boolean cancel(String jobId) throws BigQueryException { } @Override - public GetQueryResultsResponse getQueryResults(String jobId, Map options) - throws BigQueryException { + public GetQueryResultsResponse getQueryResults(String jobId, Map options) { try { return bigquery.jobs().getQueryResults(this.options.projectId(), jobId) .setMaxResults(MAX_RESULTS.getLong(options)) @@ -397,7 +396,7 @@ public GetQueryResultsResponse getQueryResults(String jobId, Map opti } @Override - public QueryResponse query(QueryRequest request) throws BigQueryException { + public QueryResponse query(QueryRequest request) { try { return bigquery.jobs().query(this.options.projectId(), request).execute(); } catch (IOException ex) { @@ -406,7 +405,7 @@ public QueryResponse query(QueryRequest request) throws BigQueryException { } @Override - public String open(JobConfiguration configuration) throws BigQueryException { + public String open(JobConfiguration configuration) { try { Job loadJob = new Job().setConfiguration(configuration); StringBuilder builder = new StringBuilder() @@ -429,7 +428,7 @@ public String open(JobConfiguration configuration) throws BigQueryException { @Override public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, - boolean last) throws BigQueryException { + boolean last) { try { GenericUrl url = new GenericUrl(uploadId); HttpRequest httpRequest = bigquery.getRequestFactory().buildPutRequest(url, diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java index 385ee6dcc8bd..a6f512800024 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java @@ -40,9 +40,9 @@ import com.google.gcloud.RetryParams; import com.google.gcloud.WriteChannel; import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; -import com.google.gcloud.spi.BigQueryRpc; -import com.google.gcloud.spi.BigQueryRpc.Tuple; -import com.google.gcloud.spi.BigQueryRpcFactory; +import com.google.gcloud.bigquery.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpc.Tuple; +import com.google.gcloud.bigquery.spi.BigQueryRpcFactory; import org.easymock.Capture; import org.easymock.EasyMock; @@ -148,12 +148,12 @@ public class BigQueryImplTest { private static final TableRow TABLE_ROW = new TableRow().setF(ImmutableList.of(BOOLEAN_FIELD, INTEGER_FIELD)); private static final QueryRequest QUERY_REQUEST = QueryRequest.builder("SQL") - .maxResults(42L) + .pageSize(42L) .useQueryCache(false) .defaultDataset(DatasetId.of(DATASET)) .build(); private static final QueryRequest QUERY_REQUEST_WITH_PROJECT = QueryRequest.builder("SQL") - .maxResults(42L) + .pageSize(42L) .useQueryCache(false) .defaultDataset(DatasetId.of(PROJECT, DATASET)) .build(); @@ -169,9 +169,9 @@ public class BigQueryImplTest { private static final BigQuery.DatasetListOption DATASET_LIST_ALL = BigQuery.DatasetListOption.all(); private static final BigQuery.DatasetListOption DATASET_LIST_PAGE_TOKEN = - BigQuery.DatasetListOption.startPageToken("cursor"); - private static final BigQuery.DatasetListOption DATASET_LIST_MAX_RESULTS = - BigQuery.DatasetListOption.maxResults(42L); + BigQuery.DatasetListOption.pageToken("cursor"); + private static final BigQuery.DatasetListOption DATASET_LIST_PAGE_SIZE = + BigQuery.DatasetListOption.pageSize(42L); private static final Map DATASET_LIST_OPTIONS = ImmutableMap.of( BigQueryRpc.Option.ALL_DATASETS, true, BigQueryRpc.Option.PAGE_TOKEN, "cursor", @@ -188,19 +188,19 @@ public class BigQueryImplTest { BigQuery.TableOption.fields(BigQuery.TableField.SCHEMA, BigQuery.TableField.ETAG); // Table list options - private static final BigQuery.TableListOption TABLE_LIST_MAX_RESULTS = - BigQuery.TableListOption.maxResults(42L); + private static final BigQuery.TableListOption TABLE_LIST_PAGE_SIZE = + BigQuery.TableListOption.pageSize(42L); private static final BigQuery.TableListOption TABLE_LIST_PAGE_TOKEN = - BigQuery.TableListOption.startPageToken("cursor"); + BigQuery.TableListOption.pageToken("cursor"); private static final Map TABLE_LIST_OPTIONS = ImmutableMap.of( BigQueryRpc.Option.MAX_RESULTS, 42L, BigQueryRpc.Option.PAGE_TOKEN, "cursor"); // TableData list options - private static final BigQuery.TableDataListOption TABLE_DATA_LIST_MAX_RESULTS = - BigQuery.TableDataListOption.maxResults(42L); + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_PAGE_SIZE = + BigQuery.TableDataListOption.pageSize(42L); private static final BigQuery.TableDataListOption TABLE_DATA_LIST_PAGE_TOKEN = - BigQuery.TableDataListOption.startPageToken("cursor"); + BigQuery.TableDataListOption.pageToken("cursor"); private static final BigQuery.TableDataListOption TABLE_DATA_LIST_START_INDEX = BigQuery.TableDataListOption.startIndex(0L); private static final Map TABLE_DATA_LIST_OPTIONS = ImmutableMap.of( @@ -220,9 +220,9 @@ public class BigQueryImplTest { private static final BigQuery.JobListOption JOB_LIST_STATE_FILTER = BigQuery.JobListOption.stateFilter(JobStatus.State.DONE, JobStatus.State.PENDING); private static final BigQuery.JobListOption JOB_LIST_PAGE_TOKEN = - BigQuery.JobListOption.startPageToken("cursor"); - private static final BigQuery.JobListOption JOB_LIST_MAX_RESULTS = - BigQuery.JobListOption.maxResults(42L); + BigQuery.JobListOption.pageToken("cursor"); + private static final BigQuery.JobListOption JOB_LIST_PAGE_SIZE = + BigQuery.JobListOption.pageSize(42L); private static final Map JOB_LIST_OPTIONS = ImmutableMap.of( BigQueryRpc.Option.ALL_USERS, true, BigQueryRpc.Option.STATE_FILTER, ImmutableList.of("done", "pending"), @@ -235,9 +235,9 @@ public class BigQueryImplTest { private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_INDEX = BigQuery.QueryResultsOption.startIndex(1024L); private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_PAGE_TOKEN = - BigQuery.QueryResultsOption.startPageToken("cursor"); - private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_MAX_RESULTS = - BigQuery.QueryResultsOption.maxResults(0L); + BigQuery.QueryResultsOption.pageToken("cursor"); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_PAGE_SIZE = + BigQuery.QueryResultsOption.pageSize(0L); private static final Map QUERY_RESULTS_OPTIONS = ImmutableMap.of( BigQueryRpc.Option.TIMEOUT, 42L, BigQueryRpc.Option.START_INDEX, 1024L, @@ -388,7 +388,7 @@ public void testListDatasetsWithOptions() { EasyMock.expect(bigqueryRpcMock.listDatasets(DATASET_LIST_OPTIONS)).andReturn(result); EasyMock.replay(bigqueryRpcMock); Page page = bigquery.listDatasets(DATASET_LIST_ALL, DATASET_LIST_PAGE_TOKEN, - DATASET_LIST_MAX_RESULTS); + DATASET_LIST_PAGE_SIZE); assertEquals(cursor, page.nextPageCursor()); assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class)); } @@ -560,7 +560,7 @@ public void testListTablesWithOptions() { Tuple.of(cursor, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); EasyMock.expect(bigqueryRpcMock.listTables(DATASET, TABLE_LIST_OPTIONS)).andReturn(result); EasyMock.replay(bigqueryRpcMock); - Page
page = bigquery.listTables(DATASET, TABLE_LIST_MAX_RESULTS, TABLE_LIST_PAGE_TOKEN); + Page
page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(cursor, page.nextPageCursor()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), Table.class)); } @@ -733,7 +733,7 @@ public void testListTableDataWithOptions() { EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); Page> page = bigquery.listTableData(DATASET, TABLE, - TABLE_DATA_LIST_MAX_RESULTS, TABLE_DATA_LIST_PAGE_TOKEN, TABLE_DATA_LIST_START_INDEX); + TABLE_DATA_LIST_PAGE_SIZE, TABLE_DATA_LIST_PAGE_TOKEN, TABLE_DATA_LIST_START_INDEX); assertEquals(cursor, page.nextPageCursor()); assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); } @@ -859,7 +859,7 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { EasyMock.expect(bigqueryRpcMock.listJobs(JOB_LIST_OPTIONS)).andReturn(result); EasyMock.replay(bigqueryRpcMock); Page page = bigquery.listJobs(JOB_LIST_ALL_USERS, JOB_LIST_STATE_FILTER, - JOB_LIST_PAGE_TOKEN, JOB_LIST_MAX_RESULTS); + JOB_LIST_PAGE_TOKEN, JOB_LIST_PAGE_SIZE); assertEquals(cursor, page.nextPageCursor()); assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), Job.class)); } @@ -1012,7 +1012,7 @@ public void testGetQueryResultsWithOptions() { EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); QueryResponse response = bigquery.getQueryResults(queryJob, QUERY_RESULTS_OPTION_TIME, - QUERY_RESULTS_OPTION_INDEX, QUERY_RESULTS_OPTION_MAX_RESULTS, + QUERY_RESULTS_OPTION_INDEX, QUERY_RESULTS_OPTION_PAGE_SIZE, QUERY_RESULTS_OPTION_PAGE_TOKEN); assertEquals(queryJob, response.jobId()); assertEquals(true, response.jobCompleted()); diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java index 373291021b23..dd03b7899ebc 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java @@ -260,11 +260,11 @@ public void testListWithOptions() throws Exception { new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO3))); PageImpl
expectedPage = new PageImpl<>(null, "c", tableResults); expect(bigquery.options()).andReturn(mockOptions); - expect(bigquery.listTables(DATASET_INFO.datasetId(), BigQuery.TableListOption.maxResults(10L))) + expect(bigquery.listTables(DATASET_INFO.datasetId(), BigQuery.TableListOption.pageSize(10L))) .andReturn(expectedPage); replay(bigquery); initializeDataset(); - Page
tablePage = dataset.list(BigQuery.TableListOption.maxResults(10L)); + Page
tablePage = dataset.list(BigQuery.TableListOption.pageSize(10L)); assertArrayEquals(tableResults.toArray(), Iterables.toArray(tablePage.values(), Table.class)); assertEquals(expectedPage.nextPageCursor(), tablePage.nextPageCursor()); } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java index 225fc284b203..2c89ececedb8 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java @@ -18,7 +18,7 @@ import static org.junit.Assert.assertEquals; -import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpc; import org.junit.Test; diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java index 370b4d614cbf..7875dee9e315 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java @@ -29,13 +29,13 @@ public class QueryRequestTest { private static final DatasetId DATASET_ID = DatasetId.of("dataset"); private static final Boolean USE_QUERY_CACHE = true; private static final Boolean DRY_RUN = false; - private static final Long MAX_RESULTS = 42L; + private static final Long PAGE_SIZE = 42L; private static final Long MAX_WAIT_TIME = 42000L; private static final QueryRequest QUERY_REQUEST = QueryRequest.builder(QUERY) .useQueryCache(USE_QUERY_CACHE) .defaultDataset(DATASET_ID) .dryRun(DRY_RUN) - .maxResults(MAX_RESULTS) + .pageSize(PAGE_SIZE) .maxWaitTime(MAX_WAIT_TIME) .build(); @@ -65,7 +65,7 @@ public void testBuilder() { assertEquals(USE_QUERY_CACHE, QUERY_REQUEST.useQueryCache()); assertEquals(DATASET_ID, QUERY_REQUEST.defaultDataset()); assertEquals(DRY_RUN, QUERY_REQUEST.dryRun()); - assertEquals(MAX_RESULTS, QUERY_REQUEST.maxResults()); + assertEquals(PAGE_SIZE, QUERY_REQUEST.pageSize()); assertEquals(MAX_WAIT_TIME, QUERY_REQUEST.maxWaitTime()); thrown.expect(NullPointerException.class); QueryRequest.builder(null); @@ -78,7 +78,7 @@ public void testOf() { assertNull(request.useQueryCache()); assertNull(request.defaultDataset()); assertNull(request.dryRun()); - assertNull(request.maxResults()); + assertNull(request.pageSize()); assertNull(request.maxWaitTime()); thrown.expect(NullPointerException.class); QueryRequest.of(null); @@ -102,7 +102,7 @@ private void compareQueryRequest(QueryRequest expected, QueryRequest value) { assertEquals(expected.useQueryCache(), value.useQueryCache()); assertEquals(expected.defaultDataset(), value.defaultDataset()); assertEquals(expected.dryRun(), value.dryRun()); - assertEquals(expected.maxResults(), value.maxResults()); + assertEquals(expected.pageSize(), value.pageSize()); assertEquals(expected.maxWaitTime(), value.maxWaitTime()); } } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java index d877bff2138c..111df074ffa2 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java @@ -16,30 +16,19 @@ package com.google.gcloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; - import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.gcloud.AuthCredentials; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryParams; -import com.google.gcloud.WriteChannel; +import com.google.gcloud.BaseSerializationTest; +import com.google.gcloud.Restorable; import com.google.gcloud.bigquery.StandardTableDefinition.StreamingBuffer; -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; import java.io.Serializable; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; -public class SerializationTest { +public class SerializationTest extends BaseSerializationTest { private static final Acl DOMAIN_ACCESS = Acl.of(new Acl.Domain("domain"), Acl.Role.WRITER); @@ -207,7 +196,7 @@ public class SerializationTest { .useQueryCache(true) .defaultDataset(DATASET_ID) .dryRun(false) - .maxResults(42L) + .pageSize(42L) .maxWaitTime(10L) .build(); private static final QueryResult QUERY_RESULT = QueryResult.builder() @@ -230,75 +219,40 @@ public class SerializationTest { new Dataset(BIGQUERY, new DatasetInfo.BuilderImpl(DATASET_INFO)); private static final Table TABLE = new Table(BIGQUERY, new TableInfo.BuilderImpl(TABLE_INFO)); private static final Job JOB = new Job(BIGQUERY, new JobInfo.BuilderImpl(JOB_INFO)); + private static final BigQueryException BIG_QUERY_EXCEPTION = + new BigQueryException(42, "message", BIGQUERY_ERROR); - @Test - public void testServiceOptions() throws Exception { + @Override + protected Serializable[] serializableObjects() { BigQueryOptions options = BigQueryOptions.builder() .projectId("p1") .authCredentials(AuthCredentials.createForAppEngine()) .build(); - BigQueryOptions serializedCopy = serializeAndDeserialize(options); - assertEquals(options, serializedCopy); - - options = options.toBuilder() + BigQueryOptions otherOptions = options.toBuilder() .projectId("p2") - .retryParams(RetryParams.defaultInstance()) .authCredentials(null) .build(); - serializedCopy = serializeAndDeserialize(options); - assertEquals(options, serializedCopy); - } - - @Test - public void testModelAndRequests() throws Exception { - Serializable[] objects = {DOMAIN_ACCESS, GROUP_ACCESS, USER_ACCESS, VIEW_ACCESS, DATASET_ID, + return new Serializable[]{DOMAIN_ACCESS, GROUP_ACCESS, USER_ACCESS, VIEW_ACCESS, DATASET_ID, DATASET_INFO, TABLE_ID, CSV_OPTIONS, STREAMING_BUFFER, TABLE_DEFINITION, EXTERNAL_TABLE_DEFINITION, VIEW_DEFINITION, TABLE_SCHEMA, TABLE_INFO, VIEW_INFO, EXTERNAL_TABLE_INFO, INLINE_FUNCTION, URI_FUNCTION, JOB_STATISTICS, EXTRACT_STATISTICS, LOAD_STATISTICS, QUERY_STATISTICS, BIGQUERY_ERROR, JOB_STATUS, JOB_ID, COPY_JOB_CONFIGURATION, EXTRACT_JOB_CONFIGURATION, LOAD_CONFIGURATION, LOAD_JOB_CONFIGURATION, QUERY_JOB_CONFIGURATION, JOB_INFO, INSERT_ALL_REQUEST, - INSERT_ALL_RESPONSE, FIELD_VALUE, QUERY_REQUEST, QUERY_RESPONSE, + INSERT_ALL_RESPONSE, FIELD_VALUE, QUERY_REQUEST, QUERY_RESPONSE, BIG_QUERY_EXCEPTION, BigQuery.DatasetOption.fields(), BigQuery.DatasetDeleteOption.deleteContents(), BigQuery.DatasetListOption.all(), BigQuery.TableOption.fields(), - BigQuery.TableListOption.maxResults(42L), BigQuery.JobOption.fields(), - BigQuery.JobListOption.allUsers(), DATASET, TABLE, JOB}; - for (Serializable obj : objects) { - Object copy = serializeAndDeserialize(obj); - assertEquals(obj, obj); - assertEquals(obj, copy); - assertNotSame(obj, copy); - assertEquals(copy, copy); - } + BigQuery.TableListOption.pageSize(42L), BigQuery.JobOption.fields(), + BigQuery.JobListOption.allUsers(), DATASET, TABLE, JOB, options, otherOptions}; } - @Test - public void testWriteChannelState() throws IOException, ClassNotFoundException { - BigQueryOptions options = BigQueryOptions.builder() - .projectId("p2") - .retryParams(RetryParams.defaultInstance()) - .build(); + @Override + protected Restorable[] restorableObjects() { + BigQueryOptions options = BigQueryOptions.builder().projectId("p2").build(); // avoid closing when you don't want partial writes upon failure @SuppressWarnings("resource") TableDataWriteChannel writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION, "upload-id"); - RestorableState state = writer.capture(); - RestorableState deserializedState = serializeAndDeserialize(state); - assertEquals(state, deserializedState); - assertEquals(state.hashCode(), deserializedState.hashCode()); - assertEquals(state.toString(), deserializedState.toString()); - } - - @SuppressWarnings("unchecked") - private T serializeAndDeserialize(T obj) - throws IOException, ClassNotFoundException { - ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { - output.writeObject(obj); - } - try (ObjectInputStream input = - new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { - return (T) input.readObject(); - } + return new Restorable[]{writer}; } } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java index 6b7edcd76db1..4c1be470ff57 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java @@ -32,8 +32,8 @@ import com.google.gcloud.RestorableState; import com.google.gcloud.WriteChannel; -import com.google.gcloud.spi.BigQueryRpc; -import com.google.gcloud.spi.BigQueryRpcFactory; +import com.google.gcloud.bigquery.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpcFactory; import org.easymock.Capture; import org.easymock.CaptureType; diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java index 4866ee9ab8ec..c7828ebeadf4 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java @@ -286,11 +286,11 @@ public void testListWithOptions() throws Exception { initializeExpectedTable(1); expect(bigquery.options()).andReturn(mockOptions); PageImpl> tableDataPage = new PageImpl<>(null, "c", ROWS); - expect(bigquery.listTableData(TABLE_ID1, BigQuery.TableDataListOption.maxResults(10L))) + expect(bigquery.listTableData(TABLE_ID1, BigQuery.TableDataListOption.pageSize(10L))) .andReturn(tableDataPage); replay(bigquery); initializeTable(); - Page> dataPage = table.list(BigQuery.TableDataListOption.maxResults(10L)); + Page> dataPage = table.list(BigQuery.TableDataListOption.pageSize(10L)); Iterator> tableDataIterator = tableDataPage.values().iterator(); Iterator> dataIterator = dataPage.values().iterator(); assertTrue(Iterators.elementsEqual(tableDataIterator, dataIterator)); diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/it/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/it/ITBigQueryTest.java index 63a0551ece33..50780b4fc9a9 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/it/ITBigQueryTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/it/ITBigQueryTest.java @@ -348,7 +348,7 @@ public void testCreateExternalTable() throws InterruptedException { + tableName) .defaultDataset(DatasetId.of(DATASET)) .maxWaitTime(60000L) - .maxResults(1000L) + .pageSize(1000L) .build(); QueryResponse response = bigquery.query(request); while (!response.jobCompleted()) { @@ -411,7 +411,7 @@ public void testCreateViewTable() throws InterruptedException { QueryRequest request = QueryRequest.builder("SELECT * FROM " + tableName) .defaultDataset(DatasetId.of(DATASET)) .maxWaitTime(60000L) - .maxResults(1000L) + .pageSize(1000L) .build(); QueryResponse response = bigquery.query(request); while (!response.jobCompleted()) { @@ -662,7 +662,7 @@ public void testQuery() throws InterruptedException { QueryRequest request = QueryRequest.builder(query) .defaultDataset(DatasetId.of(DATASET)) .maxWaitTime(60000L) - .maxResults(1000L) + .pageSize(1000L) .build(); QueryResponse response = bigquery.query(request); while (!response.jobCompleted()) { diff --git a/gcloud-java-contrib/README.md b/gcloud-java-contrib/README.md index 7a935192891d..426417d54e87 100644 --- a/gcloud-java-contrib/README.md +++ b/gcloud-java-contrib/README.md @@ -16,16 +16,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-contrib - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-contrib:0.1.4' +compile 'com.google.gcloud:gcloud-java-contrib:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-contrib" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java-contrib" % "0.1.5" ``` Java Versions diff --git a/gcloud-java-contrib/pom.xml b/gcloud-java-contrib/pom.xml index dd976991e2af..bd4a6458dc38 100644 --- a/gcloud-java-contrib/pom.xml +++ b/gcloud-java-contrib/pom.xml @@ -10,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-contrib diff --git a/gcloud-java-core/README.md b/gcloud-java-core/README.md index bc9463b9cc2b..fc5f481f8ec3 100644 --- a/gcloud-java-core/README.md +++ b/gcloud-java-core/README.md @@ -19,16 +19,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-core - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-core:0.1.4' +compile 'com.google.gcloud:gcloud-java-core:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-core" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java-core" % "0.1.5" ``` Troubleshooting diff --git a/gcloud-java-core/pom.xml b/gcloud-java-core/pom.xml index d07a567b7e5a..6d0ed675b423 100644 --- a/gcloud-java-core/pom.xml +++ b/gcloud-java-core/pom.xml @@ -10,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-core diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java b/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java index fc5d74d0896c..27cafc181505 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java @@ -132,6 +132,12 @@ public RestorableState capture() { } } + /** + * Represents service account credentials. + * + * @see + * User accounts and service accounts + */ public static class ServiceAccountAuthCredentials extends AuthCredentials { private final String account; @@ -195,6 +201,14 @@ public RestorableState capture() { } } + /** + * Represents Application Default Credentials, which are credentials that are inferred from the + * runtime environment. + * + * @see + * Google Application Default Credentials + */ public static class ApplicationDefaultAuthCredentials extends AuthCredentials { private GoogleCredentials googleCredentials; @@ -243,6 +257,50 @@ public RestorableState capture() { } } + /** + * A placeholder for credentials to signify that requests sent to the server should not be + * authenticated. This is typically useful when using the local service emulators, such as + * {@code LocalGcdHelper} and {@code LocalResourceManagerHelper}. + */ + public static class NoAuthCredentials extends AuthCredentials { + + private static final AuthCredentials INSTANCE = new NoAuthCredentials(); + private static final NoAuthCredentialsState STATE = new NoAuthCredentialsState(); + + private static class NoAuthCredentialsState + implements RestorableState, Serializable { + + private static final long serialVersionUID = -4022100563954640465L; + + @Override + public AuthCredentials restore() { + return INSTANCE; + } + + @Override + public int hashCode() { + return getClass().getName().hashCode(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof NoAuthCredentialsState; + } + } + + private NoAuthCredentials() {} + + @Override + public GoogleCredentials credentials() { + return null; + } + + @Override + public RestorableState capture() { + return STATE; + } + } + public abstract GoogleCredentials credentials(); public static AuthCredentials createForAppEngine() { @@ -281,6 +339,15 @@ public static ServiceAccountAuthCredentials createFor(String account, PrivateKey return new ServiceAccountAuthCredentials(account, privateKey); } + /** + * Creates a placeholder denoting that no credentials should be used. This is typically useful + * when using the local service emulators, such as {@code LocalGcdHelper} and + * {@code LocalResourceManagerHelper}. + */ + public static AuthCredentials noAuth() { + return NoAuthCredentials.INSTANCE; + } + /** * Creates Service Account Credentials given a stream for credentials in JSON format. * diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java index 579340f1256e..4e0d03e0073a 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java @@ -32,6 +32,16 @@ */ public class BaseServiceException extends RuntimeException { + private static final long serialVersionUID = 759921776378760835L; + public static final int UNKNOWN_CODE = 0; + + private final int code; + private final boolean retryable; + private final String reason; + private final boolean idempotent; + private final String location; + private final String debugInfo; + protected static final class Error implements Serializable { private static final long serialVersionUID = -4019600198652965721L; @@ -79,16 +89,6 @@ public int hashCode() { } } - private static final long serialVersionUID = 759921776378760835L; - public static final int UNKNOWN_CODE = 0; - - private final int code; - private final boolean retryable; - private final String reason; - private final boolean idempotent; - private final String location; - private final String debugInfo; - public BaseServiceException(IOException exception, boolean idempotent) { super(message(exception), exception); int code = UNKNOWN_CODE; @@ -97,13 +97,17 @@ public BaseServiceException(IOException exception, boolean idempotent) { String debugInfo = null; if (exception instanceof GoogleJsonResponseException) { GoogleJsonError jsonError = ((GoogleJsonResponseException) exception).getDetails(); - Error error = error(jsonError); - code = error.code; - reason = error.reason; - if (reason != null) { - GoogleJsonError.ErrorInfo errorInfo = jsonError.getErrors().get(0); - location = errorInfo.getLocation(); - debugInfo = (String) errorInfo.get("debugInfo"); + if (jsonError != null) { + Error error = error(jsonError); + code = error.code; + reason = error.reason; + if (reason != null) { + GoogleJsonError.ErrorInfo errorInfo = jsonError.getErrors().get(0); + location = errorInfo.getLocation(); + debugInfo = (String) errorInfo.get("debugInfo"); + } + } else { + code = ((GoogleJsonResponseException) exception).getStatusCode(); } } this.code = code; @@ -194,6 +198,31 @@ protected String debugInfo() { return debugInfo; } + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof BaseServiceException)) { + return false; + } + BaseServiceException other = (BaseServiceException) obj; + return Objects.equals(getCause(), other.getCause()) + && Objects.equals(getMessage(), other.getMessage()) + && code == other.code + && retryable == other.retryable + && Objects.equals(reason, other.reason) + && idempotent == other.idempotent + && Objects.equals(location, other.location) + && Objects.equals(debugInfo, other.debugInfo); + } + + @Override + public int hashCode() { + return Objects.hash(getCause(), getMessage(), code, retryable, reason, idempotent, location, + debugInfo); + } + protected static String reason(GoogleJsonError error) { if (error.getErrors() != null && !error.getErrors().isEmpty()) { return error.getErrors().get(0).getReason(); @@ -207,7 +236,10 @@ protected static Error error(GoogleJsonError error) { protected static String message(IOException exception) { if (exception instanceof GoogleJsonResponseException) { - return ((GoogleJsonResponseException) exception).getDetails().getMessage(); + GoogleJsonError details = ((GoogleJsonResponseException) exception).getDetails(); + if (details != null) { + return details.getMessage(); + } } return exception.getMessage(); } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java b/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java index 39d4c4e75a1a..0b3c923d1eb9 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java @@ -26,6 +26,7 @@ import java.io.Serializable; import java.lang.reflect.Method; +import java.util.Objects; import java.util.Set; import java.util.concurrent.Callable; @@ -259,6 +260,26 @@ boolean shouldRetry(Exception ex) { return retryResult == Interceptor.RetryResult.RETRY; } + @Override + public int hashCode() { + return Objects.hash(interceptors, retriableExceptions, nonRetriableExceptions, retryInfo); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof ExceptionHandler)) { + return false; + } + ExceptionHandler other = (ExceptionHandler) obj; + return Objects.equals(interceptors, other.interceptors) + && Objects.equals(retriableExceptions, other.retriableExceptions) + && Objects.equals(nonRetriableExceptions, other.nonRetriableExceptions) + && Objects.equals(retryInfo, other.retryInfo); + } + /** * Returns an instance which retry any checked exception and abort on any runtime exception. */ diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/IamPolicy.java b/gcloud-java-core/src/main/java/com/google/gcloud/IamPolicy.java index 748eaba2ab4c..9cce4b23c864 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/IamPolicy.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/IamPolicy.java @@ -17,17 +17,16 @@ package com.google.gcloud; import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.io.Serializable; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; +import java.util.LinkedHashSet; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -69,12 +68,16 @@ protected Builder() {} /** * Replaces the builder's map of bindings with the given map of bindings. * - * @throws IllegalArgumentException if the provided map is null or contain any null values + * @throws NullPointerException if the given map is null or contains any null keys or values + * @throws IllegalArgumentException if any identities in the given map are null */ public final B bindings(Map> bindings) { - checkArgument(bindings != null, "The provided map of bindings cannot be null."); + checkNotNull(bindings, "The provided map of bindings cannot be null."); for (Map.Entry> binding : bindings.entrySet()) { - verifyBinding(binding.getKey(), binding.getValue()); + checkNotNull(binding.getKey(), "The role cannot be null."); + Set identities = binding.getValue(); + checkNotNull(identities, "A role cannot be assigned to a null set of identities."); + checkArgument(!identities.contains(null), "Null identities are not permitted."); } this.bindings.clear(); for (Map.Entry> binding : bindings.entrySet()) { @@ -84,78 +87,50 @@ public final B bindings(Map> bindings) { } /** - * Adds a binding to the policy. - * - * @throws IllegalArgumentException if the policy already contains a binding with the same role - * or if the role or any identities are null - */ - public final B addBinding(R role, Set identities) { - verifyBinding(role, identities); - checkArgument(!bindings.containsKey(role), - "The policy already contains a binding with the role " + role.toString() + "."); - bindings.put(role, new HashSet(identities)); - return self(); - } - - /** - * Adds a binding to the policy. - * - * @throws IllegalArgumentException if the policy already contains a binding with the same role - * or if the role or any identities are null - */ - public final B addBinding(R role, Identity first, Identity... others) { - HashSet identities = new HashSet<>(); - identities.add(first); - identities.addAll(Arrays.asList(others)); - return addBinding(role, identities); - } - - private void verifyBinding(R role, Collection identities) { - checkArgument(role != null, "The role cannot be null."); - verifyIdentities(identities); - } - - private void verifyIdentities(Collection identities) { - checkArgument(identities != null, "A role cannot be assigned to a null set of identities."); - checkArgument(!identities.contains(null), "Null identities are not permitted."); - } - - /** - * Removes the binding associated with the specified role. + * Removes the role (and all identities associated with that role) from the policy. */ - public final B removeBinding(R role) { + public final B removeRole(R role) { bindings.remove(role); return self(); } /** - * Adds one or more identities to an existing binding. + * Adds one or more identities to the policy under the role specified. * - * @throws IllegalArgumentException if the policy doesn't contain a binding with the specified - * role or any identities are null + * @throws NullPointerException if the role or any of the identities is null. */ public final B addIdentity(R role, Identity first, Identity... others) { - checkArgument(bindings.containsKey(role), - "The policy doesn't contain the role " + role.toString() + "."); - List toAdd = new LinkedList<>(); + String nullIdentityMessage = "Null identities are not permitted."; + checkNotNull(first, nullIdentityMessage); + checkNotNull(others, nullIdentityMessage); + for (Identity identity : others) { + checkNotNull(identity, nullIdentityMessage); + } + Set toAdd = new LinkedHashSet<>(); toAdd.add(first); toAdd.addAll(Arrays.asList(others)); - verifyIdentities(toAdd); - bindings.get(role).addAll(toAdd); + Set identities = bindings.get(checkNotNull(role, "The role cannot be null.")); + if (identities == null) { + identities = new HashSet(); + bindings.put(role, identities); + } + identities.addAll(toAdd); return self(); } /** - * Removes one or more identities from an existing binding. - * - * @throws IllegalArgumentException if the policy doesn't contain a binding with the specified - * role + * Removes one or more identities from an existing binding. Does nothing if the binding + * associated with the provided role doesn't exist. */ public final B removeIdentity(R role, Identity first, Identity... others) { - checkArgument(bindings.containsKey(role), - "The policy doesn't contain the role " + role.toString() + "."); - bindings.get(role).remove(first); - bindings.get(role).removeAll(Arrays.asList(others)); + Set identities = bindings.get(role); + if (identities != null) { + identities.remove(first); + identities.removeAll(Arrays.asList(others)); + } + if (identities != null && identities.isEmpty()) { + bindings.remove(role); + } return self(); } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/Identity.java b/gcloud-java-core/src/main/java/com/google/gcloud/Identity.java index d1644198f759..687a76ffc42c 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/Identity.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/Identity.java @@ -44,7 +44,7 @@ public final class Identity implements Serializable { private static final long serialVersionUID = -8181841964597657446L; private final Type type; - private final String id; + private final String value; /** * The types of IAM identities. @@ -82,9 +82,9 @@ public enum Type { DOMAIN } - private Identity(Type type, String id) { + private Identity(Type type, String value) { this.type = type; - this.id = id; + this.value = value; } public Type type() { @@ -92,7 +92,7 @@ public Type type() { } /** - * Returns the string identifier for this identity. The id corresponds to: + * Returns the string identifier for this identity. The value corresponds to: *
    *
  • email address (for identities of type {@code USER}, {@code SERVICE_ACCOUNT}, and * {@code GROUP}) @@ -101,8 +101,8 @@ public Type type() { * {@code ALL_AUTHENTICATED_USERS}) *
*/ - public String id() { - return id; + public String value() { + return value; } /** @@ -163,7 +163,7 @@ public static Identity domain(String domain) { @Override public int hashCode() { - return Objects.hash(id, type); + return Objects.hash(value, type); } @Override @@ -172,7 +172,7 @@ public boolean equals(Object obj) { return false; } Identity other = (Identity) obj; - return Objects.equals(id, other.id()) && Objects.equals(type, other.type()); + return Objects.equals(value, other.value()) && Objects.equals(type, other.type()); } /** @@ -186,13 +186,13 @@ public String strValue() { case ALL_AUTHENTICATED_USERS: return "allAuthenticatedUsers"; case USER: - return "user:" + id; + return "user:" + value; case SERVICE_ACCOUNT: - return "serviceAccount:" + id; + return "serviceAccount:" + value; case GROUP: - return "group:" + id; + return "group:" + value; case DOMAIN: - return "domain:" + id; + return "domain:" + value; default: throw new IllegalStateException("Unexpected identity type: " + type); } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java index 31e543809464..d45069434a26 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java @@ -523,9 +523,10 @@ public RetryParams retryParams() { * options. */ public HttpRequestInitializer httpRequestInitializer() { - final HttpRequestInitializer delegate = authCredentials() != null - ? new HttpCredentialsAdapter(authCredentials().credentials().createScoped(scopes())) - : null; + final HttpRequestInitializer delegate = + authCredentials() != null && authCredentials.credentials() != null + ? new HttpCredentialsAdapter(authCredentials().credentials().createScoped(scopes())) + : null; return new HttpRequestInitializer() { @Override public void initialize(HttpRequest httpRequest) throws IOException { diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/package-info.java b/gcloud-java-core/src/main/java/com/google/gcloud/package-info.java new file mode 100644 index 000000000000..d527640c99f9 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Core classes for the {@code gcloud-java} library. + */ +package com.google.gcloud; diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/BaseSerializationTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/BaseSerializationTest.java new file mode 100644 index 000000000000..e9ab3d47984b --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/BaseSerializationTest.java @@ -0,0 +1,86 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; + +/** + * Base class for serialization tests. To use this class in your tests override the + * {@code serializableObjects()} method to return all objects that must be serializable. Also + * override {@code restorableObjects()} method to return all restorable objects whose state must be + * tested for proper serialization. Both methods can return {@code null} if no such object needs to + * be tested. + */ +public abstract class BaseSerializationTest { + + /** + * Returns all objects for which correct serialization must be tested. + */ + protected abstract Serializable[] serializableObjects(); + + /** + * Returns all restorable objects whose state must be tested for proper serialization. + */ + protected abstract Restorable[] restorableObjects(); + + @Test + public void testSerializableObjects() throws Exception { + for (Serializable obj : firstNonNull(serializableObjects(), new Serializable[0])) { + Object copy = serializeAndDeserialize(obj); + assertEquals(obj, obj); + assertEquals(obj, copy); + assertEquals(obj.hashCode(), copy.hashCode()); + assertEquals(obj.toString(), copy.toString()); + assertNotSame(obj, copy); + assertEquals(copy, copy); + } + } + + @Test + public void testRestorableObjects() throws Exception { + for (Restorable restorable : firstNonNull(restorableObjects(), new Restorable[0])) { + RestorableState state = restorable.capture(); + RestorableState deserializedState = serializeAndDeserialize(state); + assertEquals(state, deserializedState); + assertEquals(state.hashCode(), deserializedState.hashCode()); + assertEquals(state.toString(), deserializedState.toString()); + } + } + + @SuppressWarnings("unchecked") + public T serializeAndDeserialize(T obj) throws IOException, ClassNotFoundException { + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { + output.writeObject(obj); + } + try (ObjectInputStream input = + new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { + return (T) input.readObject(); + } + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/IamPolicyTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/IamPolicyTest.java index db0935c4766d..235c2c2b1c85 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/IamPolicyTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/IamPolicyTest.java @@ -28,6 +28,8 @@ import org.junit.Test; +import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -46,8 +48,8 @@ public class IamPolicyTest { "editor", ImmutableSet.of(ALL_AUTH_USERS, GROUP, DOMAIN)); private static final PolicyImpl SIMPLE_POLICY = PolicyImpl.builder() - .addBinding("viewer", ImmutableSet.of(USER, SERVICE_ACCOUNT, ALL_USERS)) - .addBinding("editor", ImmutableSet.of(ALL_AUTH_USERS, GROUP, DOMAIN)) + .addIdentity("viewer", USER, SERVICE_ACCOUNT, ALL_USERS) + .addIdentity("editor", ALL_AUTH_USERS, GROUP, DOMAIN) .build(); private static final PolicyImpl FULL_POLICY = new PolicyImpl.Builder(SIMPLE_POLICY.bindings(), "etag", 1).build(); @@ -93,7 +95,7 @@ public void testBuilder() { assertEquals(editorBinding, policy.bindings()); assertEquals("etag", policy.etag()); assertEquals(1, policy.version().intValue()); - policy = SIMPLE_POLICY.toBuilder().removeBinding("editor").build(); + policy = SIMPLE_POLICY.toBuilder().removeRole("editor").build(); assertEquals(ImmutableMap.of("viewer", BINDINGS.get("viewer")), policy.bindings()); assertNull(policy.etag()); assertNull(policy.version()); @@ -105,22 +107,61 @@ public void testBuilder() { policy.bindings()); assertNull(policy.etag()); assertNull(policy.version()); - policy = PolicyImpl.builder().addBinding("owner", USER, SERVICE_ACCOUNT).build(); + policy = PolicyImpl.builder() + .removeIdentity("viewer", USER) + .addIdentity("owner", USER, SERVICE_ACCOUNT) + .addIdentity("editor", GROUP) + .removeIdentity("editor", GROUP) + .build(); assertEquals( ImmutableMap.of("owner", ImmutableSet.of(USER, SERVICE_ACCOUNT)), policy.bindings()); assertNull(policy.etag()); assertNull(policy.version()); + } + + @Test + public void testIllegalPolicies() { + try { + PolicyImpl.builder().addIdentity(null, USER); + fail("Null role should cause exception."); + } catch (NullPointerException ex) { + assertEquals("The role cannot be null.", ex.getMessage()); + } + try { + PolicyImpl.builder().addIdentity("viewer", null, USER); + fail("Null identity should cause exception."); + } catch (NullPointerException ex) { + assertEquals("Null identities are not permitted.", ex.getMessage()); + } + try { + PolicyImpl.builder().addIdentity("viewer", USER, (Identity[]) null); + fail("Null identity should cause exception."); + } catch (NullPointerException ex) { + assertEquals("Null identities are not permitted.", ex.getMessage()); + } + try { + PolicyImpl.builder().bindings(null); + fail("Null bindings map should cause exception."); + } catch (NullPointerException ex) { + assertEquals("The provided map of bindings cannot be null.", ex.getMessage()); + } try { - SIMPLE_POLICY.toBuilder().addBinding("viewer", USER); - fail("Should have failed due to duplicate role."); - } catch (IllegalArgumentException e) { - assertEquals("The policy already contains a binding with the role viewer.", e.getMessage()); + Map> bindings = new HashMap<>(); + bindings.put("viewer", null); + PolicyImpl.builder().bindings(bindings); + fail("Null set of identities should cause exception."); + } catch (NullPointerException ex) { + assertEquals("A role cannot be assigned to a null set of identities.", ex.getMessage()); } try { - SIMPLE_POLICY.toBuilder().addBinding("editor", ImmutableSet.of(USER)); - fail("Should have failed due to duplicate role."); - } catch (IllegalArgumentException e) { - assertEquals("The policy already contains a binding with the role editor.", e.getMessage()); + Map> bindings = new HashMap<>(); + Set identities = new HashSet<>(); + identities.add(null); + bindings.put("viewer", identities); + PolicyImpl.builder().bindings(bindings); + fail("Null identity should cause exception."); + } catch (IllegalArgumentException ex) { + assertEquals("Null identities are not permitted.", ex.getMessage()); } } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/IdentityTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/IdentityTest.java index 828f1c839431..a42bc9db7abd 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/IdentityTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/IdentityTest.java @@ -34,19 +34,19 @@ public class IdentityTest { @Test public void testAllUsers() { assertEquals(Identity.Type.ALL_USERS, ALL_USERS.type()); - assertNull(ALL_USERS.id()); + assertNull(ALL_USERS.value()); } @Test public void testAllAuthenticatedUsers() { assertEquals(Identity.Type.ALL_AUTHENTICATED_USERS, ALL_AUTH_USERS.type()); - assertNull(ALL_AUTH_USERS.id()); + assertNull(ALL_AUTH_USERS.value()); } @Test public void testUser() { assertEquals(Identity.Type.USER, USER.type()); - assertEquals("abc@gmail.com", USER.id()); + assertEquals("abc@gmail.com", USER.value()); } @Test(expected = NullPointerException.class) @@ -57,7 +57,7 @@ public void testUserNullEmail() { @Test public void testServiceAccount() { assertEquals(Identity.Type.SERVICE_ACCOUNT, SERVICE_ACCOUNT.type()); - assertEquals("service-account@gmail.com", SERVICE_ACCOUNT.id()); + assertEquals("service-account@gmail.com", SERVICE_ACCOUNT.value()); } @Test(expected = NullPointerException.class) @@ -68,7 +68,7 @@ public void testServiceAccountNullEmail() { @Test public void testGroup() { assertEquals(Identity.Type.GROUP, GROUP.type()); - assertEquals("group@gmail.com", GROUP.id()); + assertEquals("group@gmail.com", GROUP.value()); } @Test(expected = NullPointerException.class) @@ -79,7 +79,7 @@ public void testGroupNullEmail() { @Test public void testDomain() { assertEquals(Identity.Type.DOMAIN, DOMAIN.type()); - assertEquals("google.com", DOMAIN.id()); + assertEquals("google.com", DOMAIN.value()); } @Test(expected = NullPointerException.class) @@ -100,6 +100,6 @@ public void testIdentityToAndFromPb() { private void compareIdentities(Identity expected, Identity actual) { assertEquals(expected, actual); assertEquals(expected.type(), actual.type()); - assertEquals(expected.id(), actual.id()); + assertEquals(expected.value(), actual.value()); } } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/SerializationTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/SerializationTest.java new file mode 100644 index 000000000000..3255a17333aa --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/SerializationTest.java @@ -0,0 +1,100 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import com.google.common.collect.ImmutableList; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.Serializable; + +public class SerializationTest extends BaseSerializationTest { + + private static class SomeIamPolicy extends IamPolicy { + + private static final long serialVersionUID = 271243551016958285L; + + private static class Builder extends IamPolicy.Builder { + + @Override + public SomeIamPolicy build() { + return new SomeIamPolicy(this); + } + } + + protected SomeIamPolicy(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(); + } + } + + private static final BaseServiceException BASE_SERVICE_EXCEPTION = + new BaseServiceException(42, "message", "reason", true); + private static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.defaultInstance(); + private static final Identity IDENTITY = Identity.allAuthenticatedUsers(); + private static final PageImpl PAGE = + new PageImpl<>(null, "cursor", ImmutableList.of("string1", "string2")); + private static final RetryParams RETRY_PARAMS = RetryParams.defaultInstance(); + private static final SomeIamPolicy SOME_IAM_POLICY = new SomeIamPolicy.Builder().build(); + private static final String JSON_KEY = "{\n" + + " \"private_key_id\": \"somekeyid\",\n" + + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS" + + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg" + + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4" + + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2" + + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa" + + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF" + + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL" + + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\" + + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp" + + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF" + + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm" + + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK" + + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF" + + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR" + + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl" + + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa" + + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi" + + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG" + + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk" + + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n" + + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n" + + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n" + + " \"type\": \"service_account\"\n" + + "}"; + + @Override + protected Serializable[] serializableObjects() { + return new Serializable[]{BASE_SERVICE_EXCEPTION, EXCEPTION_HANDLER, IDENTITY, PAGE, + RETRY_PARAMS, SOME_IAM_POLICY}; + } + + @Override + protected Restorable[] restorableObjects() { + try { + return new Restorable[]{AuthCredentials.createForAppEngine(), AuthCredentials.noAuth(), + AuthCredentials.createForJson(new ByteArrayInputStream(JSON_KEY.getBytes()))}; + } catch (IOException ex) { + // never reached + throw new RuntimeException(ex); + } + } +} diff --git a/gcloud-java-datastore/README.md b/gcloud-java-datastore/README.md index dd341ba244c3..0d89a0a07e3e 100644 --- a/gcloud-java-datastore/README.md +++ b/gcloud-java-datastore/README.md @@ -22,16 +22,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-datastore - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-datastore:0.1.4' +compile 'com.google.gcloud:gcloud-java-datastore:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-datastore" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java-datastore" % "0.1.5" ``` Example Application diff --git a/gcloud-java-datastore/pom.xml b/gcloud-java-datastore/pom.xml index 452986ba5ea3..f3b46e22b3c8 100644 --- a/gcloud-java-datastore/pom.xml +++ b/gcloud-java-datastore/pom.xml @@ -10,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-datastore @@ -33,6 +33,13 @@ + + ${project.groupId} + gcloud-java-core + ${project.version} + test-jar + test + junit junit diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java index 92d18ed4787c..49a5728a4da9 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java @@ -26,7 +26,7 @@ import com.google.gcloud.RetryHelper; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.RetryParams; -import com.google.gcloud.spi.DatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpc; import com.google.protobuf.ByteString; import java.util.Arrays; diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java index 2ec0f2be8f2b..db1a5f800ce8 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java @@ -24,9 +24,9 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.gcloud.ServiceOptions; -import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpcFactory; -import com.google.gcloud.spi.DefaultDatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpcFactory; +import com.google.gcloud.datastore.spi.DefaultDatastoreRpc; import java.lang.reflect.Method; import java.util.Iterator; diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpc.java similarity index 66% rename from gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java rename to gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpc.java index fd916e0a1c87..002078550d1f 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpc.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.datastore.spi; import com.google.api.services.datastore.DatastoreV1.AllocateIdsRequest; import com.google.api.services.datastore.DatastoreV1.AllocateIdsResponse; @@ -35,16 +35,46 @@ */ public interface DatastoreRpc { - AllocateIdsResponse allocateIds(AllocateIdsRequest request) throws DatastoreException; + /** + * Sends an allocate IDs request. + * + * @throws DatastoreException upon failure + */ + AllocateIdsResponse allocateIds(AllocateIdsRequest request); + /** + * Sends a begin transaction request. + * + * @throws DatastoreException upon failure + */ BeginTransactionResponse beginTransaction(BeginTransactionRequest request) throws DatastoreException; - CommitResponse commit(CommitRequest request) throws DatastoreException; + /** + * Sends a commit request. + * + * @throws DatastoreException upon failure + */ + CommitResponse commit(CommitRequest request); - LookupResponse lookup(LookupRequest request) throws DatastoreException; + /** + * Sends a lookup request. + * + * @throws DatastoreException upon failure + */ + LookupResponse lookup(LookupRequest request); - RollbackResponse rollback(RollbackRequest request) throws DatastoreException; + /** + * Sends a rollback request. + * + * @throws DatastoreException upon failure + */ + RollbackResponse rollback(RollbackRequest request); - RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreException; + /** + * Sends a request to run a query. + * + * @throws DatastoreException upon failure + */ + RunQueryResponse runQuery(RunQueryRequest request); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpcFactory.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpcFactory.java similarity index 90% rename from gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpcFactory.java rename to gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpcFactory.java index 1815dda30f5d..0979b2203037 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpcFactory.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpcFactory.java @@ -14,9 +14,10 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.datastore.spi; import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.spi.ServiceRpcFactory; /** * An interface for Datastore RPC factory. diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DefaultDatastoreRpc.java similarity index 92% rename from gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java rename to gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DefaultDatastoreRpc.java index c82ff9689f68..093322fa4117 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DefaultDatastoreRpc.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.datastore.spi; import com.google.api.services.datastore.DatastoreV1.AllocateIdsRequest; import com.google.api.services.datastore.DatastoreV1.AllocateIdsResponse; @@ -111,8 +111,7 @@ private static DatastoreException translate( } @Override - public AllocateIdsResponse allocateIds(AllocateIdsRequest request) - throws DatastoreException { + public AllocateIdsResponse allocateIds(AllocateIdsRequest request) { try { return client.allocateIds(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -121,8 +120,7 @@ public AllocateIdsResponse allocateIds(AllocateIdsRequest request) } @Override - public BeginTransactionResponse beginTransaction(BeginTransactionRequest request) - throws DatastoreException { + public BeginTransactionResponse beginTransaction(BeginTransactionRequest request) { try { return client.beginTransaction(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -131,7 +129,7 @@ public BeginTransactionResponse beginTransaction(BeginTransactionRequest request } @Override - public CommitResponse commit(CommitRequest request) throws DatastoreException { + public CommitResponse commit(CommitRequest request) { try { return client.commit(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -140,7 +138,7 @@ public CommitResponse commit(CommitRequest request) throws DatastoreException { } @Override - public LookupResponse lookup(LookupRequest request) throws DatastoreException { + public LookupResponse lookup(LookupRequest request) { try { return client.lookup(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -149,7 +147,7 @@ public LookupResponse lookup(LookupRequest request) throws DatastoreException { } @Override - public RollbackResponse rollback(RollbackRequest request) throws DatastoreException { + public RollbackResponse rollback(RollbackRequest request) { try { return client.rollback(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -158,7 +156,7 @@ public RollbackResponse rollback(RollbackRequest request) throws DatastoreExcept } @Override - public RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreException { + public RunQueryResponse runQuery(RunQueryRequest request) { try { return client.runQuery(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java index 284a9d322793..1d188c7f4e94 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java @@ -22,9 +22,9 @@ import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; +import com.google.gcloud.datastore.spi.DatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpcFactory; import com.google.gcloud.datastore.testing.LocalGcdHelper; -import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpcFactory; import org.easymock.EasyMock; import org.junit.Before; diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java index a289610fe841..e3829a2e71ce 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java @@ -32,14 +32,15 @@ import com.google.api.services.datastore.DatastoreV1.RunQueryResponse; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; +import com.google.gcloud.AuthCredentials; import com.google.gcloud.RetryParams; import com.google.gcloud.datastore.Query.ResultType; import com.google.gcloud.datastore.StructuredQuery.OrderBy; import com.google.gcloud.datastore.StructuredQuery.Projection; import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; +import com.google.gcloud.datastore.spi.DatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpcFactory; import com.google.gcloud.datastore.testing.LocalGcdHelper; -import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpcFactory; import com.google.protobuf.ByteString; import org.easymock.EasyMock; @@ -128,6 +129,7 @@ public void setUp() { options = DatastoreOptions.builder() .projectId(PROJECT_ID) .host("http://localhost:" + PORT) + .authCredentials(AuthCredentials.noAuth()) .retryParams(RetryParams.noRetries()) .build(); datastore = options.service(); diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java index 3976be2cc383..b9e78800ffab 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java @@ -17,28 +17,17 @@ package com.google.gcloud.datastore; import static java.nio.charset.StandardCharsets.UTF_8; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; import com.google.api.services.datastore.DatastoreV1; -import com.google.common.collect.ImmutableMultimap; -import com.google.common.collect.Multimap; import com.google.gcloud.AuthCredentials; -import com.google.gcloud.RetryParams; +import com.google.gcloud.BaseSerializationTest; +import com.google.gcloud.Restorable; import com.google.gcloud.datastore.StructuredQuery.CompositeFilter; import com.google.gcloud.datastore.StructuredQuery.OrderBy; import com.google.gcloud.datastore.StructuredQuery.Projection; import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; - -public class SerializationTest { +public class SerializationTest extends BaseSerializationTest { private static final IncompleteKey INCOMPLETE_KEY1 = IncompleteKey.builder("ds", "k").ancestors(PathElement.of("p", 1)).build(); @@ -113,83 +102,31 @@ public class SerializationTest { .addValue(new NullValue()) .build(); private static final ProjectionEntity PROJECTION_ENTITY = ProjectionEntity.fromPb(ENTITY1.toPb()); + private static final DatastoreException DATASTORE_EXCEPTION = + new DatastoreException(42, "message", "reason"); - @SuppressWarnings("rawtypes") - private static final Multimap TYPE_TO_VALUES = - ImmutableMultimap.builder() - .put(ValueType.NULL, NULL_VALUE) - .put(ValueType.KEY, KEY_VALUE) - .put(ValueType.STRING, STRING_VALUE) - .putAll(ValueType.ENTITY, EMBEDDED_ENTITY_VALUE1, EMBEDDED_ENTITY_VALUE2, - EMBEDDED_ENTITY_VALUE3) - .put(ValueType.LIST, LIST_VALUE) - .put(ValueType.LONG, LONG_VALUE) - .put(ValueType.DOUBLE, DOUBLE_VALUE) - .put(ValueType.BOOLEAN, BOOLEAN_VALUE) - .put(ValueType.DATE_TIME, DATE_AND_TIME_VALUE) - .put(ValueType.BLOB, BLOB_VALUE) - .put(ValueType.RAW_VALUE, RAW_VALUE) - .build(); - - @Test - public void testServiceOptions() throws Exception { + @Override + protected java.io.Serializable[] serializableObjects() { DatastoreOptions options = DatastoreOptions.builder() .authCredentials(AuthCredentials.createForAppEngine()) .normalizeDataset(false) .projectId("ds1") .build(); - DatastoreOptions serializedCopy = serializeAndDeserialize(options); - assertEquals(options, serializedCopy); - - options = options.toBuilder() + DatastoreOptions otherOptions = options.toBuilder() .namespace("ns1") - .retryParams(RetryParams.defaultInstance()) .authCredentials(null) .force(true) .build(); - serializedCopy = serializeAndDeserialize(options); - assertEquals(options, serializedCopy); - } - - @Test - public void testValues() throws Exception { - for (ValueType valueType : ValueType.values()) { - for (Value value : TYPE_TO_VALUES.get(valueType)) { - Value copy = serializeAndDeserialize(value); - assertEquals(value, value); - assertEquals(value, copy); - assertNotSame(value, copy); - assertEquals(copy, copy); - assertEquals(value.get(), copy.get()); - } - } - } - - @Test - public void testTypes() throws Exception { - Serializable[] types = { KEY1, KEY2, INCOMPLETE_KEY1, INCOMPLETE_KEY2, ENTITY1, ENTITY2, - ENTITY3, EMBEDDED_ENTITY, PROJECTION_ENTITY, DATE_TIME1, BLOB1, CURSOR1, GQL1, GQL2, - QUERY1, QUERY2, QUERY3}; - for (Serializable obj : types) { - Object copy = serializeAndDeserialize(obj); - assertEquals(obj, obj); - assertEquals(obj, copy); - assertNotSame(obj, copy); - assertEquals(copy, copy); - } + return new java.io.Serializable[]{KEY1, KEY2, INCOMPLETE_KEY1, INCOMPLETE_KEY2, ENTITY1, + ENTITY2, ENTITY3, EMBEDDED_ENTITY, PROJECTION_ENTITY, DATE_TIME1, BLOB1, CURSOR1, GQL1, + GQL2, QUERY1, QUERY2, QUERY3, NULL_VALUE, KEY_VALUE, STRING_VALUE, EMBEDDED_ENTITY_VALUE1, + EMBEDDED_ENTITY_VALUE2, EMBEDDED_ENTITY_VALUE3, LIST_VALUE, LONG_VALUE, DOUBLE_VALUE, + BOOLEAN_VALUE, DATE_AND_TIME_VALUE, BLOB_VALUE, RAW_VALUE, DATASTORE_EXCEPTION, options, + otherOptions}; } - private T serializeAndDeserialize(T obj) - throws IOException, ClassNotFoundException { - ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { - output.writeObject(obj); - } - try (ObjectInputStream input = - new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { - @SuppressWarnings("unchecked") - T result = (T) input.readObject(); - return result; - } + @Override + protected Restorable[] restorableObjects() { + return null; } } diff --git a/gcloud-java-examples/README.md b/gcloud-java-examples/README.md index 5e11fd2b0cb7..fc9ce9ef653d 100644 --- a/gcloud-java-examples/README.md +++ b/gcloud-java-examples/README.md @@ -19,16 +19,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-examples - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-examples:0.1.4' +compile 'com.google.gcloud:gcloud-java-examples:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-examples" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java-examples" % "0.1.5" ``` To run examples from your command line: diff --git a/gcloud-java-examples/pom.xml b/gcloud-java-examples/pom.xml index 862d48c89eaa..111308658c2e 100644 --- a/gcloud-java-examples/pom.xml +++ b/gcloud-java-examples/pom.xml @@ -10,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-examples diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java index c8fbe7289f9c..fe27ee3cf63b 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java @@ -43,7 +43,7 @@ import com.google.gcloud.bigquery.TableInfo; import com.google.gcloud.bigquery.ViewDefinition; import com.google.gcloud.bigquery.WriteChannelConfiguration; -import com.google.gcloud.spi.BigQueryRpc.Tuple; +import com.google.gcloud.bigquery.spi.BigQueryRpc.Tuple; import java.nio.channels.FileChannel; import java.nio.file.Paths; diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java index f421bc832441..ba2d1291b229 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java @@ -84,7 +84,7 @@ public static void main(String... args) throws InterruptedException { // Create a query request QueryRequest queryRequest = QueryRequest.builder("SELECT * FROM my_dataset_id.my_table_id") .maxWaitTime(60000L) - .maxResults(1000L) + .pageSize(1000L) .build(); // Request query to be executed and wait for results QueryResponse queryResponse = bigquery.query(queryRequest); diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/ModifyPolicy.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/ModifyPolicy.java new file mode 100644 index 000000000000..f97adf5b0916 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/ModifyPolicy.java @@ -0,0 +1,60 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.resourcemanager.snippets; + +import com.google.gcloud.Identity; +import com.google.gcloud.resourcemanager.Policy; +import com.google.gcloud.resourcemanager.Policy.ProjectRole; +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +/** + * A snippet for Google Cloud Resource Manager showing how to modify a project's IAM policy. + */ +public class ModifyPolicy { + + public static void main(String... args) { + // Create Resource Manager service object + // By default, credentials are inferred from the runtime environment. + ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); + + // Get a project from the server + String projectId = "some-project-id"; // Use an existing project's ID + Project project = resourceManager.get(projectId); + + // Get the project's policy + Policy policy = project.getPolicy(); + + // Add a viewer + Policy.Builder modifiedPolicy = policy.toBuilder(); + Identity newViewer = Identity.user(""); + modifiedPolicy.addIdentity(ProjectRole.VIEWER.value(), newViewer); + + // Write policy + Policy updatedPolicy = project.replacePolicy(modifiedPolicy.build()); + + // Print policy + System.out.printf("Updated policy for %s: %n%s%n", projectId, updatedPolicy); + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java index e73cfc427129..a7260134202d 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java @@ -20,7 +20,6 @@ import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; import com.google.gcloud.ReadChannel; import com.google.gcloud.WriteChannel; -import com.google.gcloud.spi.StorageRpc.Tuple; import com.google.gcloud.storage.Blob; import com.google.gcloud.storage.BlobId; import com.google.gcloud.storage.BlobInfo; @@ -31,6 +30,7 @@ import com.google.gcloud.storage.Storage.CopyRequest; import com.google.gcloud.storage.Storage.SignUrlOption; import com.google.gcloud.storage.StorageOptions; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; import java.io.FileOutputStream; import java.io.IOException; diff --git a/gcloud-java-resourcemanager/README.md b/gcloud-java-resourcemanager/README.md index cd48d6699311..a2539df7adab 100644 --- a/gcloud-java-resourcemanager/README.md +++ b/gcloud-java-resourcemanager/README.md @@ -22,16 +22,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-resourcemanager - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-resourcemanager:0.1.4' +compile 'com.google.gcloud:gcloud-java-resourcemanager:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-resourcemanager" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java-resourcemanager" % "0.1.5" ``` Example Application @@ -163,9 +163,46 @@ while (projectIterator.hasNext()) { } ``` +#### Managing IAM Policies +You can edit [Google Cloud IAM](https://cloud.google.com/iam/) (Identity and Access Management) +policies on the project-level using this library as well. We recommend using the read-modify-write +pattern to make policy changes. This entails reading the project's current policy, updating it +locally, and then sending the modified policy for writing, as shown in the snippet below. First, +add these imports: + +```java +import com.google.gcloud.Identity; +import com.google.gcloud.resourcemanager.Policy; +import com.google.gcloud.resourcemanager.Policy.Role; +``` + +Assuming you have completed the steps above to create the `ResourceManager` service object and load +a project from the server, you just need to add the following code: + +```java +// Get the project's policy +Policy policy = project.getPolicy(); + +// Add a viewer +Policy.Builder modifiedPolicy = policy.toBuilder(); +Identity newViewer = Identity.user(""); +if (policy.bindings().containsKey(Role.viewer())) { + modifiedPolicy.addIdentity(Role.viewer(), newViewer); +} else { + modifiedPolicy.addBinding(Role.viewer(), newViewer); +} + +// Write policy +Policy updatedPolicy = project.replacePolicy(modifiedPolicy.build()); +``` + +Note that the policy you pass in to `replacePolicy` overwrites the original policy. For example, if +the original policy has two bindings and you call `replacePolicy` with a new policy containing only +one binding, the two original bindings are lost. + #### Complete source code -We put together all the code shown above into two programs. Both programs assume that you are +We put together all the code shown above into three programs. The programs assume that you are running from your own desktop and used the Google Cloud SDK to authenticate yourself. The first program creates a project if it does not exist. Complete source code can be found at @@ -175,6 +212,10 @@ The second program updates a project if it exists and lists all projects the use view. Complete source code can be found at [UpdateAndListProjects.java](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/UpdateAndListProjects.java). +The third program modifies the IAM policy associated with a project using the read-modify-write +pattern. Complete source code can be found at +[ModifyPolicy.java](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/ModifyPolicy.java) + Java Versions ------------- diff --git a/gcloud-java-resourcemanager/pom.xml b/gcloud-java-resourcemanager/pom.xml index 40a865f4db68..c0c48af48f1e 100644 --- a/gcloud-java-resourcemanager/pom.xml +++ b/gcloud-java-resourcemanager/pom.xml @@ -10,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-resourcemanager @@ -33,6 +33,13 @@ + + ${project.groupId} + gcloud-java-core + ${project.version} + test-jar + test + junit junit diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java index f48c057ba049..72d62d7fc224 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java @@ -19,7 +19,7 @@ import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.base.MoreObjects; -import com.google.gcloud.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; import java.io.Serializable; import java.util.Objects; diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Policy.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Policy.java index 0d7118dcbbd7..219d74262319 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Policy.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Policy.java @@ -17,7 +17,6 @@ package com.google.gcloud.resourcemanager; import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.CaseFormat; import com.google.common.base.Function; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; @@ -41,14 +40,20 @@ * * @see Policy */ -public class Policy extends IamPolicy { +public class Policy extends IamPolicy { private static final long serialVersionUID = -5573557282693961850L; /** - * Represents legacy roles in an IAM Policy. + * The project-level roles in an IAM policy. This enum is not an exhaustive list of all roles + * you can use in an IAM policy. You can also use service-specific roles (e.g. + * "roles/pubsub.editor"). See the Supported Cloud Platform Services page for links + * to service-specific roles. + * + * @see Supported Cloud + * Platform Services */ - public enum Role { + public enum ProjectRole { /** * Permissions for read-only actions that preserve state. @@ -69,31 +74,29 @@ public enum Role { */ OWNER("roles/owner"); - private String strValue; + private final String value; - private Role(String strValue) { - this.strValue = strValue; + private ProjectRole(String value) { + this.value = value; } - String strValue() { - return strValue; - } - - static Role fromStr(String roleStr) { - return Role.valueOf(CaseFormat.LOWER_CAMEL.to( - CaseFormat.UPPER_UNDERSCORE, roleStr.substring("roles/".length()))); + /** + * Returns the string value associated with the role. + */ + public String value() { + return value; } } /** * Builder for an IAM Policy. */ - public static class Builder extends IamPolicy.Builder { + public static class Builder extends IamPolicy.Builder { private Builder() {} @VisibleForTesting - Builder(Map> bindings, String etag, Integer version) { + Builder(Map> bindings, String etag, Integer version) { bindings(bindings).etag(etag).version(version); } @@ -116,15 +119,20 @@ public Builder toBuilder() { return new Builder(bindings(), etag(), version()); } + @Override + public String toString() { + return toPb().toString(); + } + com.google.api.services.cloudresourcemanager.model.Policy toPb() { com.google.api.services.cloudresourcemanager.model.Policy policyPb = new com.google.api.services.cloudresourcemanager.model.Policy(); List bindingPbList = new LinkedList<>(); - for (Map.Entry> binding : bindings().entrySet()) { + for (Map.Entry> binding : bindings().entrySet()) { com.google.api.services.cloudresourcemanager.model.Binding bindingPb = new com.google.api.services.cloudresourcemanager.model.Binding(); - bindingPb.setRole(binding.getKey().strValue()); + bindingPb.setRole(binding.getKey()); bindingPb.setMembers( Lists.transform( new ArrayList<>(binding.getValue()), @@ -144,11 +152,11 @@ public String apply(Identity identity) { static Policy fromPb( com.google.api.services.cloudresourcemanager.model.Policy policyPb) { - Map> bindings = new HashMap<>(); + Map> bindings = new HashMap<>(); for (com.google.api.services.cloudresourcemanager.model.Binding bindingPb : policyPb.getBindings()) { bindings.put( - Role.fromStr(bindingPb.getRole()), + bindingPb.getRole(), ImmutableSet.copyOf( Lists.transform( bindingPb.getMembers(), diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java index 4d12a31274c0..bf9cf0e01a6d 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.io.ObjectInputStream; +import java.util.List; import java.util.Map; import java.util.Objects; @@ -157,10 +158,10 @@ public Project reload() { * completes, the project is not retrievable by the {@link ResourceManager#get} and * {@link ResourceManager#list} methods. The caller must have modify permissions for this project. * - * @see Cloud - * Resource Manager delete * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager delete */ public void delete() { resourceManager.delete(projectId()); @@ -174,10 +175,10 @@ public void delete() { * state of {@link ProjectInfo.State#DELETE_IN_PROGRESS}, the project cannot be restored. The * caller must have modify permissions for this project. * - * @see Cloud - * Resource Manager undelete * @throws ResourceManagerException upon failure (including when the project can't be restored) + * @see Cloud + * Resource Manager undelete */ public void undelete() { resourceManager.undelete(projectId()); @@ -188,16 +189,68 @@ public void undelete() { * *

The caller must have modify permissions for this project. * - * @see Cloud - * Resource Manager update * @return the Project representing the new project metadata * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager update */ public Project replace() { return resourceManager.replace(this); } + /** + * Returns the IAM access control policy for this project. Returns {@code null} if the resource + * does not exist or if you do not have adequate permission to view the project or get the policy. + * + * @return the IAM policy for the project + * @throws ResourceManagerException upon failure + * @see + * Resource Manager getIamPolicy + */ + public Policy getPolicy() { + return resourceManager.getPolicy(projectId()); + } + + /** + * Sets the IAM access control policy for this project. Replaces any existing policy. It is + * recommended that you use the read-modify-write pattern. See code samples and important details + * of replacing policies in the documentation for {@link ResourceManager#replacePolicy}. + * + * @return the newly set IAM policy for this project + * @throws ResourceManagerException upon failure + * @see + * Resource Manager setIamPolicy + */ + public Policy replacePolicy(Policy newPolicy) { + return resourceManager.replacePolicy(projectId(), newPolicy); + } + + /** + * Returns the permissions that a caller has on this project. You typically don't call this method + * if you're using Google Cloud Platform directly to manage permissions. This method is intended + * for integration with your proprietary software, such as a customized graphical user interface. + * For example, the Cloud Platform Console tests IAM permissions internally to determine which UI + * should be available to the logged-in user. Each service that supports IAM lists the possible + * permissions; see the Supported Cloud Platform services page below for links to these + * lists. + * + * @return a list of booleans representing whether the caller has the permissions specified (in + * the order of the given permissions) + * @throws ResourceManagerException upon failure + * @see + * Resource Manager testIamPermissions + * @see Supported Cloud Platform + * Services + */ + List testPermissions(List permissions) { + return resourceManager.testPermissions(projectId(), permissions); + } + @Override public Builder toBuilder() { return new Builder(this); diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java index 3d27f2a33ac8..70eeb9c8eb50 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java @@ -18,10 +18,12 @@ import com.google.common.base.Joiner; import com.google.common.collect.Sets; +import com.google.gcloud.IamPolicy; import com.google.gcloud.Page; import com.google.gcloud.Service; -import com.google.gcloud.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import java.util.List; import java.util.Set; /** @@ -168,19 +170,19 @@ public static ProjectListOption fields(ProjectField... fields) { } /** - * Create a new project. + * Creates a new project. * *

Initially, the project resource is owned by its creator exclusively. The creator can later * grant permission to others to read or update the project. Several APIs are activated * automatically for the project, including Google Cloud Storage. * - * @see Cloud - * Resource Manager create * @return Project object representing the new project's metadata. The returned object will * include the following read-only fields supplied by the server: project number, lifecycle * state, and creation time. * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager create */ Project create(ProjectInfo project); @@ -201,10 +203,10 @@ public static ProjectListOption fields(ProjectField... fields) { * completes, the project is not retrievable by the {@link ResourceManager#get} and * {@link ResourceManager#list} methods. The caller must have modify permissions for this project. * - * @see Cloud - * Resource Manager delete * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager delete */ void delete(String projectId); @@ -214,10 +216,9 @@ public static ProjectListOption fields(ProjectField... fields) { *

Returns {@code null} if the project is not found or if the user doesn't have read * permissions for the project. * - * @see Cloud - * Resource Manager get * @throws ResourceManagerException upon failure + * @see + * Cloud Resource Manager get */ Project get(String projectId, ProjectGetOption... options); @@ -228,11 +229,11 @@ public static ProjectListOption fields(ProjectField... fields) { * at the end of the list. Use {@link ProjectListOption} to filter this list, set page size, and * set page tokens. * - * @see Cloud - * Resource Manager list * @return {@code Page}, a page of projects * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager list */ Page list(ProjectListOption... options); @@ -241,11 +242,11 @@ public static ProjectListOption fields(ProjectField... fields) { * *

The caller must have modify permissions for this project. * - * @see Cloud - * Resource Manager update * @return the Project representing the new project metadata * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager update */ Project replace(ProjectInfo newProject); @@ -257,10 +258,87 @@ public static ProjectListOption fields(ProjectField... fields) { * state of {@link ProjectInfo.State#DELETE_IN_PROGRESS}, the project cannot be restored. The * caller must have modify permissions for this project. * - * @see Cloud - * Resource Manager undelete * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager undelete */ void undelete(String projectId); + + /** + * Returns the IAM access control policy for the specified project. Returns {@code null} if the + * resource does not exist or if you do not have adequate permission to view the project or get + * the policy. + * + * @throws ResourceManagerException upon failure + * @see + * Resource Manager getIamPolicy + */ + Policy getPolicy(String projectId); + + /** + * Sets the IAM access control policy for the specified project. Replaces any existing policy. The + * following constraints apply: + *

    + *
  • Projects currently support only user:{emailid} and serviceAccount:{emailid} + * members in a binding of a policy. + *
  • To be added as an owner, a user must be invited via Cloud Platform console and must accept + * the invitation. + *
  • Members cannot be added to more than one role in the same policy. + *
  • There must be at least one owner who has accepted the Terms of Service (ToS) agreement in + * the policy. An attempt to set a policy that removes the last ToS-accepted owner from the + * policy will fail. + *
  • Calling this method requires enabling the App Engine Admin API. + *
+ * Note: Removing service accounts from policies or changing their roles can render services + * completely inoperable. It is important to understand how the service account is being used + * before removing or updating its roles. + * + *

It is recommended that you use the read-modify-write pattern. This pattern entails reading + * the project's current policy, updating it locally, and then sending the modified policy for + * writing. Cloud IAM solves the problem of conflicting processes simultaneously attempting to + * modify a policy by using the {@link IamPolicy#etag etag} property. This property is used to + * verify whether the policy has changed since the last request. When you make a request to Cloud + * IAM with an etag value, Cloud IAM compares the etag value in the request with the existing etag + * value associated with the policy. It writes the policy only if the etag values match. If the + * etags don't match, a {@code ResourceManagerException} is thrown, denoting that the server + * aborted update. If an etag is not provided, the policy is overwritten blindly. + * + *

An example of using the read-write-modify pattern is as follows: + *

 {@code
+   * Policy currentPolicy = resourceManager.getPolicy("my-project-id");
+   * Policy modifiedPolicy =
+   *     current.toBuilder().removeIdentity(Role.VIEWER, Identity.user("user@gmail.com"));
+   * Policy newPolicy = resourceManager.replacePolicy("my-project-id", modified);
+   * }
+   * 
+ * + * @throws ResourceManagerException upon failure + * @see + * Resource Manager setIamPolicy + */ + Policy replacePolicy(String projectId, Policy newPolicy); + + /** + * Returns the permissions that a caller has on the specified project. You typically don't call + * this method if you're using Google Cloud Platform directly to manage permissions. This method + * is intended for integration with your proprietary software, such as a customized graphical user + * interface. For example, the Cloud Platform Console tests IAM permissions internally to + * determine which UI should be available to the logged-in user. Each service that supports IAM + * lists the possible permissions; see the Supported Cloud Platform services page below for + * links to these lists. + * + * @return A list of booleans representing whether the caller has the permissions specified (in + * the order of the given permissions) + * @throws ResourceManagerException upon failure + * @see + * Resource Manager testIamPermissions + * @see Supported Cloud Platform + * Services + */ + List testPermissions(String projectId, List permissions); } diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java index 4176b4e610ba..e4663cb74cb9 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java @@ -29,9 +29,10 @@ import com.google.gcloud.PageImpl; import com.google.gcloud.PageImpl.NextPageFetcher; import com.google.gcloud.RetryHelper.RetryHelperException; -import com.google.gcloud.spi.ResourceManagerRpc; -import com.google.gcloud.spi.ResourceManagerRpc.Tuple; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Tuple; +import java.util.List; import java.util.Map; import java.util.concurrent.Callable; @@ -55,8 +56,8 @@ public com.google.api.services.cloudresourcemanager.model.Project call() { return resourceManagerRpc.create(project.toPb()); } }, options().retryParams(), EXCEPTION_HANDLER)); - } catch (RetryHelperException e) { - throw ResourceManagerException.translateAndThrow(e); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); } } @@ -70,8 +71,8 @@ public Void call() { return null; } }, options().retryParams(), EXCEPTION_HANDLER); - } catch (RetryHelperException e) { - throw ResourceManagerException.translateAndThrow(e); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); } } @@ -87,8 +88,8 @@ public com.google.api.services.cloudresourcemanager.model.Project call() { } }, options().retryParams(), EXCEPTION_HANDLER); return answer == null ? null : Project.fromPb(this, answer); - } catch (RetryHelperException e) { - throw ResourceManagerException.translateAndThrow(e); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); } } @@ -146,8 +147,8 @@ public Project apply( }); return new PageImpl<>( new ProjectPageFetcher(serviceOptions, cursor, optionsMap), cursor, projects); - } catch (RetryHelperException e) { - throw ResourceManagerException.translateAndThrow(e); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); } } @@ -161,8 +162,8 @@ public com.google.api.services.cloudresourcemanager.model.Project call() { return resourceManagerRpc.replace(newProject.toPb()); } }, options().retryParams(), EXCEPTION_HANDLER)); - } catch (RetryHelperException e) { - throw ResourceManagerException.translateAndThrow(e); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); } } @@ -176,8 +177,55 @@ public Void call() { return null; } }, options().retryParams(), EXCEPTION_HANDLER); - } catch (RetryHelperException e) { - throw ResourceManagerException.translateAndThrow(e); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public Policy getPolicy(final String projectId) { + try { + com.google.api.services.cloudresourcemanager.model.Policy answer = + runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Policy call() { + return resourceManagerRpc.getPolicy(projectId); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Policy.fromPb(answer); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public Policy replacePolicy(final String projectId, final Policy newPolicy) { + try { + return Policy.fromPb(runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Policy call() { + return resourceManagerRpc.replacePolicy(projectId, newPolicy.toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public List testPermissions(final String projectId, final List permissions) { + try { + return runWithRetries( + new Callable>() { + @Override + public List call() { + return resourceManagerRpc.testPermissions(projectId, permissions); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); } } diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java index 5c0c4baf1ecb..c744864147c2 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java @@ -18,9 +18,9 @@ import com.google.common.collect.ImmutableSet; import com.google.gcloud.ServiceOptions; -import com.google.gcloud.spi.DefaultResourceManagerRpc; -import com.google.gcloud.spi.ResourceManagerRpc; -import com.google.gcloud.spi.ResourceManagerRpcFactory; +import com.google.gcloud.resourcemanager.spi.DefaultResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpcFactory; import java.util.Set; diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/DefaultResourceManagerRpc.java similarity index 53% rename from gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java rename to gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/DefaultResourceManagerRpc.java index 61c622fa0c33..9f92ff545874 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/DefaultResourceManagerRpc.java @@ -1,9 +1,10 @@ -package com.google.gcloud.spi; +package com.google.gcloud.resourcemanager.spi; -import static com.google.gcloud.spi.ResourceManagerRpc.Option.FIELDS; -import static com.google.gcloud.spi.ResourceManagerRpc.Option.FILTER; -import static com.google.gcloud.spi.ResourceManagerRpc.Option.PAGE_SIZE; -import static com.google.gcloud.spi.ResourceManagerRpc.Option.PAGE_TOKEN; +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Option.FIELDS; +import static com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Option.FILTER; +import static com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Option.PAGE_SIZE; +import static com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Option.PAGE_TOKEN; import static java.net.HttpURLConnection.HTTP_FORBIDDEN; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; @@ -11,13 +12,22 @@ import com.google.api.client.http.HttpTransport; import com.google.api.client.json.jackson.JacksonFactory; import com.google.api.services.cloudresourcemanager.Cloudresourcemanager; +import com.google.api.services.cloudresourcemanager.model.GetIamPolicyRequest; import com.google.api.services.cloudresourcemanager.model.ListProjectsResponse; +import com.google.api.services.cloudresourcemanager.model.Policy; import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.api.services.cloudresourcemanager.model.SetIamPolicyRequest; +import com.google.api.services.cloudresourcemanager.model.TestIamPermissionsRequest; +import com.google.api.services.cloudresourcemanager.model.TestIamPermissionsResponse; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; import com.google.gcloud.resourcemanager.ResourceManagerException; import com.google.gcloud.resourcemanager.ResourceManagerOptions; import java.io.IOException; +import java.util.List; import java.util.Map; +import java.util.Set; public class DefaultResourceManagerRpc implements ResourceManagerRpc { @@ -38,7 +48,7 @@ private static ResourceManagerException translate(IOException exception) { } @Override - public Project create(Project project) throws ResourceManagerException { + public Project create(Project project) { try { return resourceManager.projects().create(project).execute(); } catch (IOException ex) { @@ -47,7 +57,7 @@ public Project create(Project project) throws ResourceManagerException { } @Override - public void delete(String projectId) throws ResourceManagerException { + public void delete(String projectId) { try { resourceManager.projects().delete(projectId).execute(); } catch (IOException ex) { @@ -56,7 +66,7 @@ public void delete(String projectId) throws ResourceManagerException { } @Override - public Project get(String projectId, Map options) throws ResourceManagerException { + public Project get(String projectId, Map options) { try { return resourceManager.projects() .get(projectId) @@ -74,8 +84,7 @@ public Project get(String projectId, Map options) throws ResourceMana } @Override - public Tuple> list(Map options) - throws ResourceManagerException { + public Tuple> list(Map options) { try { ListProjectsResponse response = resourceManager.projects() .list() @@ -92,7 +101,7 @@ public Tuple> list(Map options) } @Override - public void undelete(String projectId) throws ResourceManagerException { + public void undelete(String projectId) { try { resourceManager.projects().undelete(projectId).execute(); } catch (IOException ex) { @@ -101,12 +110,58 @@ public void undelete(String projectId) throws ResourceManagerException { } @Override - public Project replace(Project project) throws ResourceManagerException { + public Project replace(Project project) { try { return resourceManager.projects().update(project.getProjectId(), project).execute(); } catch (IOException ex) { throw translate(ex); } } -} + @Override + public Policy getPolicy(String projectId) throws ResourceManagerException { + try { + return resourceManager.projects() + .getIamPolicy(projectId, new GetIamPolicyRequest()) + .execute(); + } catch (IOException ex) { + ResourceManagerException translated = translate(ex); + if (translated.code() == HTTP_FORBIDDEN) { + // Service returns permission denied if policy doesn't exist. + return null; + } else { + throw translated; + } + } + } + + @Override + public Policy replacePolicy(String projectId, Policy newPolicy) throws ResourceManagerException { + try { + return resourceManager.projects() + .setIamPolicy(projectId, new SetIamPolicyRequest().setPolicy(newPolicy)).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public List testPermissions(String projectId, List permissions) + throws ResourceManagerException { + try { + TestIamPermissionsResponse response = resourceManager.projects() + .testIamPermissions( + projectId, new TestIamPermissionsRequest().setPermissions(permissions)) + .execute(); + Set permissionsOwned = + ImmutableSet.copyOf(firstNonNull(response.getPermissions(), ImmutableList.of())); + ImmutableList.Builder answer = ImmutableList.builder(); + for (String p : permissions) { + answer.add(permissionsOwned.contains(p)); + } + return answer.build(); + } catch (IOException ex) { + throw translate(ex); + } + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpc.java new file mode 100644 index 000000000000..d6ec068a92a3 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpc.java @@ -0,0 +1,149 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager.spi; + +import com.google.api.services.cloudresourcemanager.model.Policy; +import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.gcloud.resourcemanager.ResourceManagerException; + +import java.util.List; +import java.util.Map; + +public interface ResourceManagerRpc { + + enum Option { + FILTER("filter"), + FIELDS("fields"), + PAGE_SIZE("pageSize"), + PAGE_TOKEN("pageToken"); + + private final String value; + + Option(String value) { + this.value = value; + } + + public String value() { + return value; + } + + @SuppressWarnings("unchecked") + T get(Map options) { + return (T) options.get(this); + } + + String getString(Map options) { + return get(options); + } + + Integer getInt(Map options) { + return get(options); + } + } + + class Tuple { + private final X x; + private final Y y; + + private Tuple(X x, Y y) { + this.x = x; + this.y = y; + } + + public static Tuple of(X x, Y y) { + return new Tuple<>(x, y); + } + + public X x() { + return x; + } + + public Y y() { + return y; + } + } + + /** + * Creates a new project. + * + * @throws ResourceManagerException upon failure + */ + Project create(Project project); + + /** + * Marks the project identified by the specified project ID for deletion. + * + * @throws ResourceManagerException upon failure + */ + void delete(String projectId); + + /** + * Retrieves the project identified by the specified project ID. Returns {@code null} if the + * project is not found or if the user doesn't have read permissions for the project. + * + * @throws ResourceManagerException upon failure + */ + Project get(String projectId, Map options); + + /** + * Lists the projects visible to the current user. + * + * @throws ResourceManagerException upon failure + */ + Tuple> list(Map options); + + /** + * Restores the project identified by the specified project ID. Undelete will only succeed if the + * project has a lifecycle state of {@code DELETE_REQUESTED} state. The caller must have modify + * permissions for this project. + * + * @throws ResourceManagerException upon failure + */ + void undelete(String projectId); + + /** + * Replaces the attributes of the project. The caller must have modify permissions for this + * project. + * + * @throws ResourceManagerException upon failure + */ + Project replace(Project project); + + /** + * Returns the IAM policy associated with a project. + * + * @throws ResourceManagerException upon failure + */ + Policy getPolicy(String projectId); + + /** + * Replaces the IAM policy associated with the given project. + * + * @throws ResourceManagerException upon failure + */ + Policy replacePolicy(String projectId, Policy newPolicy); + + /** + * Tests whether the caller has the given permissions. Returns a list of booleans corresponding to + * whether or not the user has the permission in the same position of input list. + * + * @throws ResourceManagerException upon failure + */ + List testPermissions(String projectId, List permissions); + + // TODO(ajaykannan): implement "Organization" functionality when available (issue #319) +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpcFactory.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpcFactory.java similarity index 90% rename from gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpcFactory.java rename to gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpcFactory.java index c2c607c0c205..4dbd1a00d4c7 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpcFactory.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpcFactory.java @@ -14,9 +14,10 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.resourcemanager.spi; import com.google.gcloud.resourcemanager.ResourceManagerOptions; +import com.google.gcloud.spi.ServiceRpcFactory; /** * An interface for Resource Manager RPC factory. diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java index a077eb6144a5..4d466e55a897 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java @@ -5,13 +5,19 @@ import static java.net.HttpURLConnection.HTTP_OK; import com.google.api.client.json.JsonFactory; +import com.google.api.services.cloudresourcemanager.model.Binding; +import com.google.api.services.cloudresourcemanager.model.Policy; import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.api.services.cloudresourcemanager.model.SetIamPolicyRequest; +import com.google.api.services.cloudresourcemanager.model.TestIamPermissionsRequest; +import com.google.api.services.cloudresourcemanager.model.TestIamPermissionsResponse; import com.google.common.base.Joiner; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.io.ByteStreams; +import com.google.gcloud.AuthCredentials; import com.google.gcloud.resourcemanager.ResourceManagerOptions; import com.sun.net.httpserver.Headers; @@ -29,11 +35,13 @@ import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.UUID; import java.util.concurrent.ConcurrentSkipListMap; import java.util.logging.Level; import java.util.logging.Logger; @@ -45,7 +53,26 @@ * Utility to create a local Resource Manager mock for testing. * *

The mock runs in a separate thread, listening for HTTP requests on the local machine at an - * ephemeral port. + * ephemeral port. While this mock attempts to simulate the Cloud Resource Manager, there are some + * divergences in behavior. The following is a non-exhaustive list of some of those behavioral + * differences: + * + *

    + *
  • This mock assumes you have adequate permissions for any action. Related to this, + * testIamPermissions always indicates that the caller has all permissions listed in the + * request. + *
  • IAM policies are set to an empty policy with version 0 (only legacy roles supported) upon + * project creation. The actual service will not have an empty list of bindings and may also + * set your version to 1. + *
  • There is no input validation for the policy provided when replacing a policy or calling + * testIamPermissions. + *
  • In this mock, projects never move from the DELETE_REQUESTED lifecycle state to + * DELETE_IN_PROGRESS without an explicit call to the utility method + * {@link #changeLifecycleState}. Similarly, a project is never completely removed without an + * explicit call to the utility method {@link #removeProject}. + *
  • The messages in the error responses given by this mock do not necessarily match the messages + * given by the actual service. + *
*/ @SuppressWarnings("restriction") public class LocalResourceManagerHelper { @@ -77,6 +104,7 @@ public class LocalResourceManagerHelper { private final HttpServer server; private final ConcurrentSkipListMap projects = new ConcurrentSkipListMap<>(); + private final Map policies = new HashMap<>(); private final int port; private static class Response { @@ -98,6 +126,7 @@ String body() { } private enum Error { + ABORTED(409, "global", "aborted", "ABORTED"), ALREADY_EXISTS(409, "global", "alreadyExists", "ALREADY_EXISTS"), PERMISSION_DENIED(403, "global", "forbidden", "PERMISSION_DENIED"), FAILED_PRECONDITION(400, "global", "failedPrecondition", "FAILED_PRECONDITION"), @@ -149,13 +178,7 @@ public void handle(HttpExchange exchange) { try { switch (requestMethod) { case "POST": - if (path.endsWith(":undelete")) { - response = undelete(projectIdFromUri(path)); - } else { - String requestBody = - decodeContent(exchange.getRequestHeaders(), exchange.getRequestBody()); - response = create(jsonFactory.fromString(requestBody, Project.class)); - } + response = handlePost(exchange, path); break; case "DELETE": response = delete(projectIdFromUri(path)); @@ -186,6 +209,30 @@ public void handle(HttpExchange exchange) { } } + private Response handlePost(HttpExchange exchange, String path) throws IOException { + String requestBody = decodeContent(exchange.getRequestHeaders(), exchange.getRequestBody()); + if (!path.contains(":")) { + return create(jsonFactory.fromString(requestBody, Project.class)); + } else { + switch (path.split(":", 2)[1]) { + case "undelete": + return undelete(projectIdFromUri(path)); + case "getIamPolicy": + return getPolicy(projectIdFromUri(path)); + case "setIamPolicy": + return replacePolicy(projectIdFromUri(path), + jsonFactory.fromString(requestBody, SetIamPolicyRequest.class).getPolicy()); + case "testIamPermissions": + return testPermissions(projectIdFromUri(path), + jsonFactory.fromString(requestBody, TestIamPermissionsRequest.class) + .getPermissions()); + default: + return Error.BAD_REQUEST.response( + "The server could not understand the following request URI: POST " + path); + } + } + } + private static void writeResponse(HttpExchange exchange, Response response) { exchange.getResponseHeaders().set("Content-type", "application/json; charset=UTF-8"); OutputStream outputStream = exchange.getResponseBody(); @@ -258,7 +305,7 @@ private static Map parseListOptions(String query) throws IOExcep options.put("pageToken", argEntry[1]); break; case "pageSize": - int pageSize = Integer.valueOf(argEntry[1]); + int pageSize = Integer.parseInt(argEntry[1]); if (pageSize < 1) { throw new IOException("Page size must be greater than 0."); } @@ -316,7 +363,7 @@ private static boolean isValidIdOrLabel(String value, int minLength, int maxLeng return value.length() >= minLength && value.length() <= maxLength; } - Response create(Project project) { + synchronized Response create(Project project) { String customErrorMessage = checkForProjectErrors(project); if (customErrorMessage != null) { return Error.INVALID_ARGUMENT.response(customErrorMessage); @@ -328,6 +375,11 @@ Response create(Project project) { return Error.ALREADY_EXISTS.response( "A project with the same project ID (" + project.getProjectId() + ") already exists."); } + Policy emptyPolicy = new Policy() + .setBindings(Collections.emptyList()) + .setEtag(UUID.randomUUID().toString()) + .setVersion(0); + policies.put(project.getProjectId(), emptyPolicy); try { String createdProjectStr = jsonFactory.toString(project); return new Response(HTTP_OK, createdProjectStr); @@ -539,6 +591,53 @@ synchronized Response undelete(String projectId) { return response; } + synchronized Response getPolicy(String projectId) { + Policy policy = policies.get(projectId); + if (policy == null) { + return Error.PERMISSION_DENIED.response("Project " + projectId + " not found."); + } + try { + return new Response(HTTP_OK, jsonFactory.toString(policy)); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response( + "Error when serializing the IAM policy for " + projectId); + } + } + + synchronized Response replacePolicy(String projectId, Policy policy) { + Policy originalPolicy = policies.get(projectId); + if (originalPolicy == null) { + return Error.PERMISSION_DENIED.response("Error when replacing the policy for " + projectId + + " because the project was not found."); + } + String etag = policy.getEtag(); + if (etag != null && !originalPolicy.getEtag().equals(etag)) { + return Error.ABORTED.response("Policy etag mismatch when replacing the policy for project " + + projectId + ", please retry the read."); + } + policy.setEtag(UUID.randomUUID().toString()); + policy.setVersion(originalPolicy.getVersion()); + policies.put(projectId, policy); + try { + return new Response(HTTP_OK, jsonFactory.toString(policy)); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response( + "Error when serializing the policy for project " + projectId); + } + } + + synchronized Response testPermissions(String projectId, List permissions) { + if (!projects.containsKey(projectId)) { + return Error.PERMISSION_DENIED.response("Project " + projectId + " not found."); + } + try { + return new Response(HTTP_OK, + jsonFactory.toString(new TestIamPermissionsResponse().setPermissions(permissions))); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response("Error when serializing permissions " + permissions); + } + } + private LocalResourceManagerHelper() { try { server = HttpServer.create(new InetSocketAddress(0), 0); @@ -550,17 +649,21 @@ private LocalResourceManagerHelper() { } /** - * Creates a LocalResourceManagerHelper object that listens to requests on the local machine. + * Creates a {@code LocalResourceManagerHelper} object that listens to requests on the local + * machine. */ public static LocalResourceManagerHelper create() { return new LocalResourceManagerHelper(); } /** - * Returns a ResourceManagerOptions instance that sets the host to use the mock server. + * Returns a {@link ResourceManagerOptions} instance that sets the host to use the mock server. */ public ResourceManagerOptions options() { - return ResourceManagerOptions.builder().host("http://localhost:" + port).build(); + return ResourceManagerOptions.builder() + .host("http://localhost:" + port) + .authCredentials(AuthCredentials.noAuth()) + .build(); } /** @@ -606,6 +709,7 @@ public synchronized boolean changeLifecycleState(String projectId, String lifecy public synchronized boolean removeProject(String projectId) { // Because this method is synchronized, any code that relies on non-atomic read/write operations // should not fail if that code is also synchronized. - return projects.remove(checkNotNull(projectId)) != null; + policies.remove(checkNotNull(projectId)); + return projects.remove(projectId) != null; } } diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java deleted file mode 100644 index 52dfc2d2368e..000000000000 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.spi; - -import com.google.api.services.cloudresourcemanager.model.Project; -import com.google.gcloud.resourcemanager.ResourceManagerException; - -import java.util.Map; - -public interface ResourceManagerRpc { - - enum Option { - FILTER("filter"), - FIELDS("fields"), - PAGE_SIZE("pageSize"), - PAGE_TOKEN("pageToken"); - - private final String value; - - Option(String value) { - this.value = value; - } - - public String value() { - return value; - } - - @SuppressWarnings("unchecked") - T get(Map options) { - return (T) options.get(this); - } - - String getString(Map options) { - return get(options); - } - - Integer getInt(Map options) { - return get(options); - } - } - - class Tuple { - private final X x; - private final Y y; - - private Tuple(X x, Y y) { - this.x = x; - this.y = y; - } - - public static Tuple of(X x, Y y) { - return new Tuple<>(x, y); - } - - public X x() { - return x; - } - - public Y y() { - return y; - } - } - - Project create(Project project) throws ResourceManagerException; - - void delete(String projectId) throws ResourceManagerException; - - Project get(String projectId, Map options) throws ResourceManagerException; - - Tuple> list(Map options) throws ResourceManagerException; - - void undelete(String projectId) throws ResourceManagerException; - - Project replace(Project project) throws ResourceManagerException; - - // TODO(ajaykannan): implement "Organization" functionality when available (issue #319) -} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java index a3418fff98ab..75df0ef9e3ae 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java @@ -2,24 +2,29 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.google.api.services.cloudresourcemanager.model.Binding; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.gcloud.resourcemanager.spi.DefaultResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Tuple; import com.google.gcloud.resourcemanager.testing.LocalResourceManagerHelper; -import com.google.gcloud.spi.DefaultResourceManagerRpc; -import com.google.gcloud.spi.ResourceManagerRpc; -import com.google.gcloud.spi.ResourceManagerRpc.Tuple; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; public class LocalResourceManagerHelperTest { @@ -45,7 +50,12 @@ public class LocalResourceManagerHelperTest { .setLabels(ImmutableMap.of("k1", "v1", "k2", "v2")); private static final com.google.api.services.cloudresourcemanager.model.Project PROJECT_WITH_PARENT = - copyFrom(COMPLETE_PROJECT).setProjectId("project-with-parent-id").setParent(PARENT); + copyFrom(COMPLETE_PROJECT).setProjectId("project-with-parent-id").setParent(PARENT); + private static final List BINDINGS = ImmutableList.of( + new Binding().setRole("roles/owner").setMembers(ImmutableList.of("user:me@gmail.com")), + new Binding().setRole("roles/viewer").setMembers(ImmutableList.of("group:group@gmail.com"))); + private static final com.google.api.services.cloudresourcemanager.model.Policy POLICY = + new com.google.api.services.cloudresourcemanager.model.Policy().setBindings(BINDINGS); @BeforeClass public static void beforeClass() { @@ -92,6 +102,13 @@ public void testCreate() { assertNull(returnedProject.getParent()); assertNotNull(returnedProject.getProjectNumber()); assertNotNull(returnedProject.getCreateTime()); + com.google.api.services.cloudresourcemanager.model.Policy policy = + rpc.getPolicy(PARTIAL_PROJECT.getProjectId()); + assertEquals(Collections.emptyList(), policy.getBindings()); + assertNotNull(policy.getEtag()); + assertEquals(0, policy.getVersion().intValue()); + rpc.replacePolicy(PARTIAL_PROJECT.getProjectId(), POLICY); + assertEquals(POLICY.getBindings(), rpc.getPolicy(PARTIAL_PROJECT.getProjectId()).getBindings()); try { rpc.create(PARTIAL_PROJECT); fail("Should fail, project already exists."); @@ -99,6 +116,8 @@ public void testCreate() { assertEquals(409, e.code()); assertTrue(e.getMessage().startsWith("A project with the same project ID") && e.getMessage().endsWith("already exists.")); + assertEquals( + POLICY.getBindings(), rpc.getPolicy(PARTIAL_PROJECT.getProjectId()).getBindings()); } returnedProject = rpc.create(PROJECT_WITH_PARENT); compareReadWriteFields(PROJECT_WITH_PARENT, returnedProject); @@ -609,6 +628,58 @@ public void testUndeleteWhenDeleteInProgress() { } } + @Test + public void testGetPolicy() { + assertNull(rpc.getPolicy("nonexistent-project")); + rpc.create(PARTIAL_PROJECT); + com.google.api.services.cloudresourcemanager.model.Policy policy = + rpc.getPolicy(PARTIAL_PROJECT.getProjectId()); + assertEquals(Collections.emptyList(), policy.getBindings()); + assertNotNull(policy.getEtag()); + } + + @Test + public void testReplacePolicy() { + try { + rpc.replacePolicy("nonexistent-project", POLICY); + fail("Project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("project was not found")); + } + rpc.create(PARTIAL_PROJECT); + com.google.api.services.cloudresourcemanager.model.Policy invalidPolicy = + new com.google.api.services.cloudresourcemanager.model.Policy().setEtag("wrong-etag"); + try { + rpc.replacePolicy(PARTIAL_PROJECT.getProjectId(), invalidPolicy); + fail("Invalid etag."); + } catch (ResourceManagerException e) { + assertEquals(409, e.code()); + assertTrue(e.getMessage().startsWith("Policy etag mismatch")); + } + String originalEtag = rpc.getPolicy(PARTIAL_PROJECT.getProjectId()).getEtag(); + com.google.api.services.cloudresourcemanager.model.Policy newPolicy = + rpc.replacePolicy(PARTIAL_PROJECT.getProjectId(), POLICY); + assertEquals(POLICY.getBindings(), newPolicy.getBindings()); + assertNotNull(newPolicy.getEtag()); + assertNotEquals(originalEtag, newPolicy.getEtag()); + } + + @Test + public void testTestPermissions() { + List permissions = ImmutableList.of("resourcemanager.projects.get"); + try { + rpc.testPermissions("nonexistent-project", permissions); + fail("Nonexistent project."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertEquals("Project nonexistent-project not found.", e.getMessage()); + } + rpc.create(PARTIAL_PROJECT); + assertEquals(ImmutableList.of(true), + rpc.testPermissions(PARTIAL_PROJECT.getProjectId(), permissions)); + } + @Test public void testChangeLifecycleStatus() { assertFalse(RESOURCE_MANAGER_HELPER.changeLifecycleState( @@ -632,8 +703,10 @@ public void testChangeLifecycleStatus() { public void testRemoveProject() { assertFalse(RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId())); rpc.create(COMPLETE_PROJECT); + assertNotNull(rpc.getPolicy(COMPLETE_PROJECT.getProjectId())); assertTrue(RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId())); assertNull(rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS)); + assertNull(rpc.getPolicy(COMPLETE_PROJECT.getProjectId())); } private void compareReadWriteFields( diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/PolicyTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/PolicyTest.java index 05d1b85bdbed..04826dd9540f 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/PolicyTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/PolicyTest.java @@ -17,9 +17,10 @@ package com.google.gcloud.resourcemanager; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; -import com.google.common.collect.ImmutableSet; import com.google.gcloud.Identity; +import com.google.gcloud.resourcemanager.Policy.ProjectRole; import org.junit.Test; @@ -33,8 +34,10 @@ public class PolicyTest { private static final Identity GROUP = Identity.group("group@gmail.com"); private static final Identity DOMAIN = Identity.domain("google.com"); private static final Policy SIMPLE_POLICY = Policy.builder() - .addBinding(Policy.Role.VIEWER, ImmutableSet.of(USER, SERVICE_ACCOUNT, ALL_USERS)) - .addBinding(Policy.Role.EDITOR, ImmutableSet.of(ALL_AUTH_USERS, GROUP, DOMAIN)) + .addIdentity(ProjectRole.OWNER.value(), USER) + .addIdentity(ProjectRole.VIEWER.value(), ALL_USERS) + .addIdentity(ProjectRole.EDITOR.value(), ALL_AUTH_USERS, DOMAIN) + .addIdentity("roles/some-role", SERVICE_ACCOUNT, GROUP) .build(); private static final Policy FULL_POLICY = new Policy.Builder(SIMPLE_POLICY.bindings(), "etag", 1).build(); @@ -50,4 +53,16 @@ public void testPolicyToAndFromPb() { assertEquals(FULL_POLICY, Policy.fromPb(FULL_POLICY.toPb())); assertEquals(SIMPLE_POLICY, Policy.fromPb(SIMPLE_POLICY.toPb())); } + + @Test + public void testEquals() { + Policy copy = Policy.builder() + .addIdentity(ProjectRole.OWNER.value(), USER) + .addIdentity(ProjectRole.VIEWER.value(), ALL_USERS) + .addIdentity(ProjectRole.EDITOR.value(), ALL_AUTH_USERS, DOMAIN) + .addIdentity("roles/some-role", SERVICE_ACCOUNT, GROUP) + .build(); + assertEquals(SIMPLE_POLICY, copy); + assertNotEquals(SIMPLE_POLICY, FULL_POLICY); + } } diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java index 4e239acc45ef..0f4c205dde17 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java @@ -25,12 +25,17 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.gcloud.Identity; +import com.google.gcloud.resourcemanager.Policy.ProjectRole; +import com.google.gcloud.resourcemanager.ProjectInfo.ResourceId; import org.junit.After; import org.junit.Before; import org.junit.Test; +import java.util.List; import java.util.Map; public class ProjectTest { @@ -47,6 +52,13 @@ public class ProjectTest { .createTimeMillis(CREATE_TIME_MILLIS) .state(STATE) .build(); + private static final Identity USER = Identity.user("abc@gmail.com"); + private static final Identity SERVICE_ACCOUNT = + Identity.serviceAccount("service-account@gmail.com"); + private static final Policy POLICY = Policy.builder() + .addIdentity(ProjectRole.OWNER.value(), USER) + .addIdentity(ProjectRole.EDITOR.value(), SERVICE_ACCOUNT) + .build(); private ResourceManager serviceMockReturnsOptions = createStrictMock(ResourceManager.class); private ResourceManagerOptions mockOptions = createMock(ResourceManagerOptions.class); @@ -84,12 +96,12 @@ public void testToBuilder() { @Test public void testBuilder() { - initializeExpectedProject(4); - expect(resourceManager.options()).andReturn(mockOptions).times(4); + expect(resourceManager.options()).andReturn(mockOptions).times(7); replay(resourceManager); Project.Builder builder = - new Project.Builder(new Project(resourceManager, new ProjectInfo.BuilderImpl(PROJECT_ID))); - Project project = builder.name(NAME) + new Project.Builder(new Project(resourceManager, new ProjectInfo.BuilderImpl("wrong-id"))); + Project project = builder.projectId(PROJECT_ID) + .name(NAME) .labels(LABELS) .projectNumber(PROJECT_NUMBER) .createTimeMillis(CREATE_TIME_MILLIS) @@ -102,6 +114,23 @@ public void testBuilder() { assertEquals(CREATE_TIME_MILLIS, project.createTimeMillis()); assertEquals(STATE, project.state()); assertEquals(resourceManager.options(), project.resourceManager().options()); + assertNull(project.parent()); + ResourceId parent = new ResourceId("id", "type"); + project = project.toBuilder() + .clearLabels() + .addLabel("k3", "v3") + .addLabel("k4", "v4") + .removeLabel("k4") + .parent(parent) + .build(); + assertEquals(PROJECT_ID, project.projectId()); + assertEquals(NAME, project.name()); + assertEquals(ImmutableMap.of("k3", "v3"), project.labels()); + assertEquals(PROJECT_NUMBER, project.projectNumber()); + assertEquals(CREATE_TIME_MILLIS, project.createTimeMillis()); + assertEquals(STATE, project.state()); + assertEquals(resourceManager.options(), project.resourceManager().options()); + assertEquals(parent, project.parent()); } @Test @@ -187,6 +216,39 @@ public void testReplace() { compareProjectInfos(expectedReplacedProject, actualReplacedProject); } + @Test + public void testGetPolicy() { + expect(resourceManager.options()).andReturn(mockOptions).times(1); + expect(resourceManager.getPolicy(PROJECT_ID)).andReturn(POLICY); + replay(resourceManager); + initializeProject(); + assertEquals(POLICY, project.getPolicy()); + } + + @Test + public void testReplacePolicy() { + expect(resourceManager.options()).andReturn(mockOptions).times(1); + expect(resourceManager.replacePolicy(PROJECT_ID, POLICY)).andReturn(POLICY); + replay(resourceManager); + initializeProject(); + assertEquals(POLICY, project.replacePolicy(POLICY)); + } + + @Test + public void testTestPermissions() { + List response = ImmutableList.of(true, true); + String getPermission = "resourcemanager.projects.get"; + String deletePermission = "resourcemanager.projects.delete"; + expect(resourceManager.options()).andReturn(mockOptions).times(1); + expect(resourceManager.testPermissions( + PROJECT_ID, ImmutableList.of(getPermission, deletePermission))) + .andReturn(response); + replay(resourceManager); + initializeProject(); + assertEquals( + response, project.testPermissions(ImmutableList.of(getPermission, deletePermission))); + } + private void compareProjects(Project expected, Project value) { assertEquals(expected, value); compareProjectInfos(expected, value); diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java index 1bc233311a4d..7d52901aa372 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java @@ -18,21 +18,25 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.gcloud.Identity; import com.google.gcloud.Page; +import com.google.gcloud.resourcemanager.Policy.ProjectRole; import com.google.gcloud.resourcemanager.ProjectInfo.ResourceId; import com.google.gcloud.resourcemanager.ResourceManager.ProjectField; import com.google.gcloud.resourcemanager.ResourceManager.ProjectGetOption; import com.google.gcloud.resourcemanager.ResourceManager.ProjectListOption; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpcFactory; import com.google.gcloud.resourcemanager.testing.LocalResourceManagerHelper; -import com.google.gcloud.spi.ResourceManagerRpc; -import com.google.gcloud.spi.ResourceManagerRpcFactory; import org.easymock.EasyMock; import org.junit.AfterClass; @@ -43,6 +47,7 @@ import org.junit.rules.ExpectedException; import java.util.Iterator; +import java.util.List; import java.util.Map; public class ResourceManagerImplTest { @@ -65,6 +70,12 @@ public class ResourceManagerImplTest { .parent(PARENT) .build(); private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + private static final Policy POLICY = + Policy.builder() + .addIdentity(ProjectRole.OWNER.value(), Identity.user("me@gmail.com")) + .addIdentity( + ProjectRole.EDITOR.value(), Identity.serviceAccount("serviceaccount@gmail.com")) + .build(); @Rule public ExpectedException thrown = ExpectedException.none(); @@ -320,6 +331,58 @@ public void testUndelete() { } } + @Test + public void testGetPolicy() { + assertNull(RESOURCE_MANAGER.getPolicy(COMPLETE_PROJECT.projectId())); + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + RESOURCE_MANAGER.replacePolicy(COMPLETE_PROJECT.projectId(), POLICY); + Policy retrieved = RESOURCE_MANAGER.getPolicy(COMPLETE_PROJECT.projectId()); + assertEquals(POLICY.bindings(), retrieved.bindings()); + assertNotNull(retrieved.etag()); + assertEquals(0, retrieved.version().intValue()); + } + + @Test + public void testReplacePolicy() { + try { + RESOURCE_MANAGER.replacePolicy("nonexistent-project", POLICY); + fail("Project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().endsWith("project was not found.")); + } + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + Policy oldPolicy = RESOURCE_MANAGER.getPolicy(PARTIAL_PROJECT.projectId()); + RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.projectId(), POLICY); + try { + RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.projectId(), oldPolicy); + fail("Policy with an invalid etag didn't cause error."); + } catch (ResourceManagerException e) { + assertEquals(409, e.code()); + assertTrue(e.getMessage().contains("Policy etag mismatch")); + } + String originalEtag = RESOURCE_MANAGER.getPolicy(PARTIAL_PROJECT.projectId()).etag(); + Policy newPolicy = RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.projectId(), POLICY); + assertEquals(POLICY.bindings(), newPolicy.bindings()); + assertNotNull(newPolicy.etag()); + assertNotEquals(originalEtag, newPolicy.etag()); + } + + @Test + public void testTestPermissions() { + List permissions = ImmutableList.of("resourcemanager.projects.get"); + try { + RESOURCE_MANAGER.testPermissions("nonexistent-project", permissions); + fail("Nonexistent project"); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertEquals("Project nonexistent-project not found.", e.getMessage()); + } + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + assertEquals(ImmutableList.of(true), + RESOURCE_MANAGER.testPermissions(PARTIAL_PROJECT.projectId(), permissions)); + } + @Test public void testRetryableException() { ResourceManagerRpcFactory rpcFactoryMock = EasyMock.createMock(ResourceManagerRpcFactory.class); diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java index 35b72ae1713f..4bc1bcede195 100644 --- a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java @@ -16,26 +16,17 @@ package com.google.gcloud.resourcemanager; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; - import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; +import com.google.gcloud.BaseSerializationTest; import com.google.gcloud.Identity; import com.google.gcloud.PageImpl; -import com.google.gcloud.RetryParams; - -import org.junit.Test; +import com.google.gcloud.Restorable; +import com.google.gcloud.resourcemanager.Policy.ProjectRole; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Collections; -public class SerializationTest { +public class SerializationTest extends BaseSerializationTest { private static final ResourceManager RESOURCE_MANAGER = ResourceManagerOptions.defaultInstance().service(); @@ -56,44 +47,24 @@ public class SerializationTest { private static final ResourceManager.ProjectListOption PROJECT_LIST_OPTION = ResourceManager.ProjectListOption.filter("name:*"); private static final Policy POLICY = Policy.builder() - .addBinding(Policy.Role.VIEWER, ImmutableSet.of(Identity.user("abc@gmail.com"))) + .addIdentity(ProjectRole.VIEWER.value(), Identity.user("abc@gmail.com")) .build(); + private static final ResourceManagerException RESOURCE_MANAGER_EXCEPTION = + new ResourceManagerException(42, "message"); - @Test - public void testServiceOptions() throws Exception { + @Override + protected Serializable[] serializableObjects() { ResourceManagerOptions options = ResourceManagerOptions.builder().build(); - ResourceManagerOptions serializedCopy = serializeAndDeserialize(options); - assertEquals(options, serializedCopy); - options = options.toBuilder() + ResourceManagerOptions otherOptions = options.toBuilder() .projectId("some-unnecessary-project-ID") - .retryParams(RetryParams.defaultInstance()) .build(); - serializedCopy = serializeAndDeserialize(options); - assertEquals(options, serializedCopy); - } - - @Test - public void testModelAndRequests() throws Exception { - Serializable[] objects = {PARTIAL_PROJECT_INFO, FULL_PROJECT_INFO, PROJECT, PAGE_RESULT, - PROJECT_GET_OPTION, PROJECT_LIST_OPTION, POLICY}; - for (Serializable obj : objects) { - Object copy = serializeAndDeserialize(obj); - assertEquals(obj, obj); - assertEquals(obj, copy); - assertNotSame(obj, copy); - assertEquals(copy, copy); - } + return new Serializable[]{PARTIAL_PROJECT_INFO, FULL_PROJECT_INFO, PROJECT, PAGE_RESULT, + PROJECT_GET_OPTION, PROJECT_LIST_OPTION, POLICY, RESOURCE_MANAGER_EXCEPTION, options, + otherOptions}; } - @SuppressWarnings("unchecked") - private T serializeAndDeserialize(T obj) throws IOException, ClassNotFoundException { - ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { - output.writeObject(obj); - } - try (ObjectInputStream input = - new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { - return (T) input.readObject(); - } + @Override + protected Restorable[] restorableObjects() { + return null; } } diff --git a/gcloud-java-storage/README.md b/gcloud-java-storage/README.md index f7973544bba2..0ee05b31c10c 100644 --- a/gcloud-java-storage/README.md +++ b/gcloud-java-storage/README.md @@ -22,16 +22,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java-storage - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java-storage:0.1.4' +compile 'com.google.gcloud:gcloud-java-storage:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java-storage" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java-storage" % "0.1.5" ``` Example Application diff --git a/gcloud-java-storage/pom.xml b/gcloud-java-storage/pom.xml index f18283b70bc8..16427d50de3a 100644 --- a/gcloud-java-storage/pom.xml +++ b/gcloud-java-storage/pom.xml @@ -10,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-storage @@ -37,6 +37,13 @@
+ + ${project.groupId} + gcloud-java-core + ${project.version} + test-jar + test + junit junit diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java index aea424ca4063..b6f668dada82 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java @@ -22,14 +22,15 @@ import com.google.api.services.storage.model.StorageObject; import com.google.common.base.Function; +import com.google.gcloud.AuthCredentials; import com.google.gcloud.ReadChannel; import com.google.gcloud.WriteChannel; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.Tuple; import com.google.gcloud.storage.Storage.BlobTargetOption; import com.google.gcloud.storage.Storage.BlobWriteOption; import com.google.gcloud.storage.Storage.CopyRequest; import com.google.gcloud.storage.Storage.SignUrlOption; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; import java.io.IOException; import java.io.ObjectInputStream; @@ -49,7 +50,7 @@ * {@link BlobInfo}. *

*/ -public final class Blob extends BlobInfo { +public class Blob extends BlobInfo { private static final long serialVersionUID = -6806832496717441434L; @@ -290,6 +291,12 @@ Builder updateTime(Long updateTime) { return this; } + @Override + Builder isDirectory(boolean isDirectory) { + infoBuilder.isDirectory(isDirectory); + return this; + } + @Override public Blob build() { return new Blob(storage, infoBuilder); @@ -447,16 +454,48 @@ public WriteChannel writer(BlobWriteOption... options) { } /** - * Generates a signed URL for this blob. If you want to allow access to for a fixed amount of time - * for this blob, you can use this method to generate a URL that is only valid within a certain - * time period. This is particularly useful if you don't want publicly accessible blobs, but don't - * want to require users to explicitly log in. + * Generates a signed URL for this blob. If you want to allow access for a fixed amount of time to + * this blob, you can use this method to generate a URL that is only valid within a certain time + * period. This is particularly useful if you don't want publicly accessible blobs, but also don't + * want to require users to explicitly log in. Signing a URL requires a service account and its + * associated private key. If a {@link AuthCredentials.ServiceAccountAuthCredentials} was passed + * to {@link StorageOptions.Builder#authCredentials(AuthCredentials)} or the default credentials + * are being used and the environment variable {@code GOOGLE_APPLICATION_CREDENTIALS} is set, then + * {@code signUrl} will use that service account and associated key to sign the URL. If the + * credentials passed to {@link StorageOptions} do not expose a private key (this is the case for + * App Engine credentials, Compute Engine credentials and Google Cloud SDK credentials) then + * {@code signUrl} will throw an {@link IllegalArgumentException} unless a service account with + * associated key is passed using the {@code SignUrlOption.serviceAccount()} option. The service + * account and private key passed with {@code SignUrlOption.serviceAccount()} have priority over + * any credentials set with {@link StorageOptions.Builder#authCredentials(AuthCredentials)}. + * + *

Example usage of creating a signed URL that is valid for 2 weeks, using the default + * credentials for signing the URL: + *

 {@code
+   * blob.signUrl(14, TimeUnit.DAYS);
+   * }
+ * + *

Example usage of creating a signed URL passing the {@code SignUrlOption.serviceAccount()} + * option, that will be used for signing the URL: + *

 {@code
+   * blob.signUrl(14, TimeUnit.DAYS, SignUrlOption.serviceAccount(
+   *     AuthCredentials.createForJson(new FileInputStream("/path/to/key.json"))));
+   * }
* * @param duration time until the signed URL expires, expressed in {@code unit}. The finer * granularity supported is 1 second, finer granularities will be truncated * @param unit time unit of the {@code duration} parameter * @param options optional URL signing options * @return a signed URL for this bucket and the specified options + * @throws IllegalArgumentException if + * {@link SignUrlOption#serviceAccount(AuthCredentials.ServiceAccountAuthCredentials)} was not + * used and no service account was provided to {@link StorageOptions} + * @throws IllegalArgumentException if the key associated to the provided service account is + * invalid + * @throws IllegalArgumentException if {@link SignUrlOption#withMd5()} option is used and + * {@link #md5()} is {@code null} + * @throws IllegalArgumentException if {@link SignUrlOption#withContentType()} option is used and + * {@link #contentType()} is {@code null} * @see Signed-URLs */ public URL signUrl(long duration, TimeUnit unit, SignUrlOption... options) { @@ -476,13 +515,13 @@ public Builder toBuilder() { } @Override - public boolean equals(Object obj) { + public final boolean equals(Object obj) { return obj instanceof Blob && Objects.equals(toPb(), ((Blob) obj).toPb()) && Objects.equals(options, ((Blob) obj).options); } @Override - public int hashCode() { + public final int hashCode() { return Objects.hash(super.hashCode(), options); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java index 54fabe87d766..cf509c8f0961 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java @@ -78,6 +78,7 @@ public StorageObject apply(BlobInfo blobInfo) { private final String contentDisposition; private final String contentLanguage; private final Integer componentCount; + private final boolean isDirectory; /** * This class is meant for internal use only. Users are discouraged from using this class. @@ -187,6 +188,8 @@ public abstract static class Builder { abstract Builder updateTime(Long updateTime); + abstract Builder isDirectory(boolean isDirectory); + /** * Creates a {@code BlobInfo} object. */ @@ -215,6 +218,7 @@ static final class BuilderImpl extends Builder { private Long metageneration; private Long deleteTime; private Long updateTime; + private Boolean isDirectory; BuilderImpl(BlobId blobId) { this.blobId = blobId; @@ -241,6 +245,7 @@ static final class BuilderImpl extends Builder { metageneration = blobInfo.metageneration; deleteTime = blobInfo.deleteTime; updateTime = blobInfo.updateTime; + isDirectory = blobInfo.isDirectory; } @Override @@ -364,6 +369,12 @@ Builder updateTime(Long updateTime) { return this; } + @Override + Builder isDirectory(boolean isDirectory) { + this.isDirectory = isDirectory; + return this; + } + @Override public BlobInfo build() { checkNotNull(blobId); @@ -392,6 +403,7 @@ public BlobInfo build() { metageneration = builder.metageneration; deleteTime = builder.deleteTime; updateTime = builder.updateTime; + isDirectory = firstNonNull(builder.isDirectory, Boolean.FALSE); } /** @@ -588,6 +600,18 @@ public Long updateTime() { return updateTime; } + /** + * Returns {@code true} if the current blob represents a directory. This can only happen if the + * blob is returned by {@link Storage#list(String, Storage.BlobListOption...)} when the + * {@link Storage.BlobListOption#currentDirectory()} option is used. When this is the case only + * {@link #blobId()} and {@link #size()} are set for the current blob: {@link BlobId#name()} ends + * with the '/' character, {@link BlobId#generation()} returns {@code null} and {@link #size()} is + * {@code 0}. + */ + public boolean isDirectory() { + return isDirectory; + } + /** * Returns a builder for the current blob. */ @@ -761,6 +785,9 @@ public Acl apply(ObjectAccessControl objectAccessControl) { } })); } + if (storageObject.containsKey("isDirectory")) { + builder.isDirectory(Boolean.TRUE); + } return builder.build(); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java index 2b9643434ecc..f9c6f912563d 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java @@ -23,8 +23,8 @@ import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.Tuple; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; import java.io.IOException; import java.io.Serializable; @@ -127,7 +127,7 @@ public Tuple call() { return storageRpc.read(storageObject, requestOptions, position, toRead); } }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); - if (lastEtag != null && !Objects.equals(result.x(), lastEtag)) { + if (result.y().length > 0 && lastEtag != null && !Objects.equals(result.x(), lastEtag)) { StringBuilder messageBuilder = new StringBuilder(); messageBuilder.append("Blob ").append(blob).append(" was updated while reading"); throw new StorageException(0, messageBuilder.toString()); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java index d1d12ec77638..30b0ec870f51 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java @@ -23,7 +23,7 @@ import com.google.gcloud.RestorableState; import com.google.gcloud.RetryHelper; import com.google.gcloud.WriteChannel; -import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc; import java.util.Map; diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java index d318626f4207..e44bd60d785c 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java @@ -22,13 +22,12 @@ import static com.google.gcloud.storage.Bucket.BucketSourceOption.toSourceOptions; import com.google.common.base.Function; -import com.google.common.base.MoreObjects; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.gcloud.Page; -import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BucketTargetOption; +import com.google.gcloud.storage.spi.StorageRpc; import java.io.IOException; import java.io.InputStream; @@ -633,15 +632,13 @@ public List get(String blobName1, String blobName2, String... blobNames) { * * @param blob a blob name * @param content the blob content - * @param contentType the blob content type. If {@code null} then - * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. + * @param contentType the blob content type * @param options options for blob creation * @return a complete blob information * @throws StorageException upon failure */ public Blob create(String blob, byte[] content, String contentType, BlobTargetOption... options) { - BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)) - .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); + BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)).contentType(contentType).build(); StorageRpc.Tuple target = BlobTargetOption.toTargetOptions(blobInfo, options); return storage.create(target.x(), content, target.y()); @@ -654,16 +651,51 @@ public Blob create(String blob, byte[] content, String contentType, BlobTargetOp * * @param blob a blob name * @param content the blob content as a stream - * @param contentType the blob content type. If {@code null} then - * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. + * @param contentType the blob content type * @param options options for blob creation * @return a complete blob information * @throws StorageException upon failure */ public Blob create(String blob, InputStream content, String contentType, BlobWriteOption... options) { - BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)) - .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); + BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)).contentType(contentType).build(); + StorageRpc.Tuple write = + BlobWriteOption.toWriteOptions(blobInfo, options); + return storage.create(write.x(), content, write.y()); + } + + /** + * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. + * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} + * is recommended as it uses resumable upload. MD5 and CRC32C hashes of {@code content} are + * computed and used for validating transferred data. + * + * @param blob a blob name + * @param content the blob content + * @param options options for blob creation + * @return a complete blob information + * @throws StorageException upon failure + */ + public Blob create(String blob, byte[] content, BlobTargetOption... options) { + BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)).build(); + StorageRpc.Tuple target = + BlobTargetOption.toTargetOptions(blobInfo, options); + return storage.create(target.x(), content, target.y()); + } + + /** + * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. + * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} + * is recommended as it uses resumable upload. + * + * @param blob a blob name + * @param content the blob content as a stream + * @param options options for blob creation + * @return a complete blob information + * @throws StorageException upon failure + */ + public Blob create(String blob, InputStream content, BlobWriteOption... options) { + BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)).build(); StorageRpc.Tuple write = BlobWriteOption.toWriteOptions(blobInfo, options); return storage.create(write.x(), content, write.y()); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java index 7eb91d0910a2..743630b6c4c2 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java @@ -22,9 +22,9 @@ import com.google.gcloud.Restorable; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.RewriteRequest; -import com.google.gcloud.spi.StorageRpc.RewriteResponse; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.RewriteRequest; +import com.google.gcloud.storage.spi.StorageRpc.RewriteResponse; import java.io.Serializable; import java.util.Map; @@ -32,7 +32,13 @@ import java.util.concurrent.Callable; /** - * Google Storage blob copy writer. This class holds the result of a copy request. If source and + * Google Storage blob copy writer. A {@code CopyWriter} object allows to copy both blob's data and + * information. To override source blob's information supply a {@code BlobInfo} to the + * {@code CopyRequest} using either + * {@link Storage.CopyRequest.Builder#target(BlobInfo, Storage.BlobTargetOption...)} or + * {@link Storage.CopyRequest.Builder#target(BlobInfo, Iterable)}. + * + *

This class holds the result of a copy request. If source and * destination blobs share the same location and storage class the copy is completed in one RPC call * otherwise one or more {@link #copyChunk} calls are necessary to complete the copy. In addition, * {@link CopyWriter#result()} can be used to automatically complete the copy and return information @@ -65,11 +71,11 @@ public class CopyWriter implements Restorable { * * @throws StorageException upon failure */ - public BlobInfo result() { + public Blob result() { while (!isDone()) { copyChunk(); } - return BlobInfo.fromPb(rewriteResponse.result); + return Blob.fromPb(serviceOptions.service(), rewriteResponse.result); } /** @@ -120,8 +126,10 @@ public RestorableState capture() { serviceOptions, BlobId.fromPb(rewriteResponse.rewriteRequest.source), rewriteResponse.rewriteRequest.sourceOptions, + rewriteResponse.rewriteRequest.overrideInfo, BlobInfo.fromPb(rewriteResponse.rewriteRequest.target), rewriteResponse.rewriteRequest.targetOptions) + .result(rewriteResponse.result != null ? BlobInfo.fromPb(rewriteResponse.result) : null) .blobSize(blobSize()) .isDone(isDone()) .megabytesCopiedPerChunk(rewriteResponse.rewriteRequest.megabytesRewrittenPerCall) @@ -132,11 +140,12 @@ public RestorableState capture() { static class StateImpl implements RestorableState, Serializable { - private static final long serialVersionUID = 8279287678903181701L; + private static final long serialVersionUID = 1693964441435822700L; private final StorageOptions serviceOptions; private final BlobId source; private final Map sourceOptions; + private final boolean overrideInfo; private final BlobInfo target; private final Map targetOptions; private final BlobInfo result; @@ -150,6 +159,7 @@ static class StateImpl implements RestorableState, Serializable { this.serviceOptions = builder.serviceOptions; this.source = builder.source; this.sourceOptions = builder.sourceOptions; + this.overrideInfo = builder.overrideInfo; this.target = builder.target; this.targetOptions = builder.targetOptions; this.result = builder.result; @@ -165,6 +175,7 @@ static class Builder { private final StorageOptions serviceOptions; private final BlobId source; private final Map sourceOptions; + private final boolean overrideInfo; private final BlobInfo target; private final Map targetOptions; private BlobInfo result; @@ -175,11 +186,12 @@ static class Builder { private Long megabytesCopiedPerChunk; private Builder(StorageOptions options, BlobId source, - Map sourceOptions, - BlobInfo target, Map targetOptions) { + Map sourceOptions, boolean overrideInfo, BlobInfo target, + Map targetOptions) { this.serviceOptions = options; this.source = source; this.sourceOptions = sourceOptions; + this.overrideInfo = overrideInfo; this.target = target; this.targetOptions = targetOptions; } @@ -220,15 +232,15 @@ RestorableState build() { } static Builder builder(StorageOptions options, BlobId source, - Map sourceOptions, BlobInfo target, + Map sourceOptions, boolean overrideInfo, BlobInfo target, Map targetOptions) { - return new Builder(options, source, sourceOptions, target, targetOptions); + return new Builder(options, source, sourceOptions, overrideInfo, target, targetOptions); } @Override public CopyWriter restore() { - RewriteRequest rewriteRequest = new RewriteRequest( - source.toPb(), sourceOptions, target.toPb(), targetOptions, megabytesCopiedPerChunk); + RewriteRequest rewriteRequest = new RewriteRequest(source.toPb(), sourceOptions, + overrideInfo, target.toPb(), targetOptions, megabytesCopiedPerChunk); RewriteResponse rewriteResponse = new RewriteResponse(rewriteRequest, result != null ? result.toPb() : null, blobSize, isDone, rewriteToken, totalBytesCopied); @@ -237,8 +249,9 @@ public CopyWriter restore() { @Override public int hashCode() { - return Objects.hash(serviceOptions, source, sourceOptions, target, targetOptions, result, - blobSize, isDone, megabytesCopiedPerChunk, rewriteToken, totalBytesCopied); + return Objects.hash(serviceOptions, source, sourceOptions, overrideInfo, target, + targetOptions, result, blobSize, isDone, megabytesCopiedPerChunk, rewriteToken, + totalBytesCopied); } @Override @@ -253,6 +266,7 @@ public boolean equals(Object obj) { return Objects.equals(this.serviceOptions, other.serviceOptions) && Objects.equals(this.source, other.source) && Objects.equals(this.sourceOptions, other.sourceOptions) + && Objects.equals(this.overrideInfo, other.overrideInfo) && Objects.equals(this.target, other.target) && Objects.equals(this.targetOptions, other.targetOptions) && Objects.equals(this.result, other.result) @@ -267,10 +281,14 @@ public boolean equals(Object obj) { public String toString() { return MoreObjects.toStringHelper(this) .add("source", source) + .add("overrideInfo", overrideInfo) .add("target", target) - .add("isDone", isDone) - .add("totalBytesRewritten", totalBytesCopied) + .add("result", result) .add("blobSize", blobSize) + .add("isDone", isDone) + .add("rewriteToken", rewriteToken) + .add("totalBytesCopied", totalBytesCopied) + .add("megabytesCopiedPerChunk", megabytesCopiedPerChunk) .toString(); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java index 2ec8426bfa9f..65c55da7efc8 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java @@ -19,7 +19,7 @@ import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.base.MoreObjects; -import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc; import java.io.Serializable; import java.util.Objects; diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java index 98f3450b7f10..78f421e94e52 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java @@ -24,13 +24,14 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import com.google.gcloud.AuthCredentials; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; import com.google.gcloud.Page; import com.google.gcloud.ReadChannel; import com.google.gcloud.Service; import com.google.gcloud.WriteChannel; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.Tuple; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; import java.io.InputStream; import java.io.Serializable; @@ -52,8 +53,6 @@ */ public interface Storage extends Service { - String DEFAULT_CONTENT_TYPE = "application/octet-stream"; - enum PredefinedAcl { AUTHENTICATED_READ("authenticatedRead"), ALL_AUTHENTICATED_USERS("allAuthenticatedUsers"), @@ -626,16 +625,16 @@ private BucketListOption(StorageRpc.Option option, Object value) { } /** - * Returns an option to specify the maximum number of buckets to be returned. + * Returns an option to specify the maximum number of buckets returned per page. */ - public static BucketListOption maxResults(long maxResults) { - return new BucketListOption(StorageRpc.Option.MAX_RESULTS, maxResults); + public static BucketListOption pageSize(long pageSize) { + return new BucketListOption(StorageRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing buckets. */ - public static BucketListOption startPageToken(String pageToken) { + public static BucketListOption pageToken(String pageToken) { return new BucketListOption(StorageRpc.Option.PAGE_TOKEN, pageToken); } @@ -672,16 +671,16 @@ private BlobListOption(StorageRpc.Option option, Object value) { } /** - * Returns an option to specify the maximum number of blobs to be returned. + * Returns an option to specify the maximum number of blobs returned per page. */ - public static BlobListOption maxResults(long maxResults) { - return new BlobListOption(StorageRpc.Option.MAX_RESULTS, maxResults); + public static BlobListOption pageSize(long pageSize) { + return new BlobListOption(StorageRpc.Option.MAX_RESULTS, pageSize); } /** * Returns an option to specify the page token from which to start listing blobs. */ - public static BlobListOption startPageToken(String pageToken) { + public static BlobListOption pageToken(String pageToken) { return new BlobListOption(StorageRpc.Option.PAGE_TOKEN, pageToken); } @@ -694,10 +693,17 @@ public static BlobListOption prefix(String prefix) { } /** - * Returns an option to specify whether blob listing should include subdirectories or not. + * If specified, results are returned in a directory-like mode. Blobs whose names, after a + * possible {@link #prefix(String)}, do not contain the '/' delimiter are returned as is. Blobs + * whose names, after a possible {@link #prefix(String)}, contain the '/' delimiter, will have + * their name truncated after the delimiter and will be returned as {@link Blob} objects where + * only {@link Blob#blobId()}, {@link Blob#size()} and {@link Blob#isDirectory()} are set. For + * such directory blobs, ({@link BlobId#generation()} returns {@code null}), {@link Blob#size()} + * returns {@code 0} while {@link Blob#isDirectory()} returns {@code true}. Duplicate directory + * blobs are omitted. */ - public static BlobListOption recursive(boolean recursive) { - return new BlobListOption(StorageRpc.Option.DELIMITER, recursive); + public static BlobListOption currentDirectory() { + return new BlobListOption(StorageRpc.Option.DELIMITER, true); } /** @@ -956,6 +962,7 @@ class CopyRequest implements Serializable { private final BlobId source; private final List sourceOptions; + private final boolean overrideInfo; private final BlobInfo target; private final List targetOptions; private final Long megabytesCopiedPerChunk; @@ -965,6 +972,7 @@ public static class Builder { private final Set sourceOptions = new LinkedHashSet<>(); private final Set targetOptions = new LinkedHashSet<>(); private BlobId source; + private boolean overrideInfo; private BlobInfo target; private Long megabytesCopiedPerChunk; @@ -1013,39 +1021,38 @@ public Builder sourceOptions(Iterable options) { * * @return the builder */ - public Builder target(BlobId target) { - this.target = BlobInfo.builder(target).build(); + public Builder target(BlobId targetId) { + this.overrideInfo = false; + this.target = BlobInfo.builder(targetId).build(); return this; } /** * Sets the copy target and target options. {@code target} parameter is used to override - * source blob information (e.g. {@code contentType}, {@code contentLanguage}). {@code - * target.contentType} is a required field. + * source blob information (e.g. {@code contentType}, {@code contentLanguage}). Target blob + * information is set exactly to {@code target}, no information is inherited from the source + * blob. * * @return the builder - * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ - public Builder target(BlobInfo target, BlobTargetOption... options) - throws IllegalArgumentException { - checkContentType(target); - this.target = target; + public Builder target(BlobInfo target, BlobTargetOption... options) { + this.overrideInfo = true; + this.target = checkNotNull(target); Collections.addAll(targetOptions, options); return this; } /** * Sets the copy target and target options. {@code target} parameter is used to override - * source blob information (e.g. {@code contentType}, {@code contentLanguage}). {@code - * target.contentType} is a required field. + * source blob information (e.g. {@code contentType}, {@code contentLanguage}). Target blob + * information is set exactly to {@code target}, no information is inherited from the source + * blob. * * @return the builder - * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ - public Builder target(BlobInfo target, Iterable options) - throws IllegalArgumentException { - checkContentType(target); - this.target = target; + public Builder target(BlobInfo target, Iterable options) { + this.overrideInfo = true; + this.target = checkNotNull(target); Iterables.addAll(targetOptions, options); return this; } @@ -1066,8 +1073,6 @@ public Builder megabytesCopiedPerChunk(Long megabytesCopiedPerChunk) { * Creates a {@code CopyRequest} object. */ public CopyRequest build() { - checkNotNull(source); - checkNotNull(target); return new CopyRequest(this); } } @@ -1075,6 +1080,7 @@ public CopyRequest build() { private CopyRequest(Builder builder) { source = checkNotNull(builder.source); sourceOptions = ImmutableList.copyOf(builder.sourceOptions); + overrideInfo = builder.overrideInfo; target = checkNotNull(builder.target); targetOptions = ImmutableList.copyOf(builder.targetOptions); megabytesCopiedPerChunk = builder.megabytesCopiedPerChunk; @@ -1101,6 +1107,17 @@ public BlobInfo target() { return target; } + /** + * Returns whether to override the target blob information with {@link #target()}. + * If {@code true}, the value of {@link #target()} is used to replace source blob information + * (e.g. {@code contentType}, {@code contentLanguage}). Target blob information is set exactly + * to this value, no information is inherited from the source blob. If {@code false}, target + * blob information is inherited from the source blob. + */ + public boolean overrideInfo() { + return overrideInfo; + } + /** * Returns blob's target options. */ @@ -1119,34 +1136,27 @@ public Long megabytesCopiedPerChunk() { /** * Creates a copy request. {@code target} parameter is used to override source blob information - * (e.g. {@code contentType}, {@code contentLanguage}). {@code target.contentType} is a required - * field. + * (e.g. {@code contentType}, {@code contentLanguage}). * * @param sourceBucket name of the bucket containing the source blob * @param sourceBlob name of the source blob * @param target a {@code BlobInfo} object for the target blob * @return a copy request - * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ - public static CopyRequest of(String sourceBucket, String sourceBlob, BlobInfo target) - throws IllegalArgumentException { - checkContentType(target); + public static CopyRequest of(String sourceBucket, String sourceBlob, BlobInfo target) { return builder().source(sourceBucket, sourceBlob).target(target).build(); } /** - * Creates a copy request. {@code target} parameter is used to override source blob information - * (e.g. {@code contentType}, {@code contentLanguage}). {@code target.contentType} is a required - * field. + * Creates a copy request. {@code target} parameter is used to replace source blob information + * (e.g. {@code contentType}, {@code contentLanguage}). Target blob information is set exactly + * to {@code target}, no information is inherited from the source blob. * * @param sourceBlobId a {@code BlobId} object for the source blob * @param target a {@code BlobInfo} object for the target blob * @return a copy request - * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ - public static CopyRequest of(BlobId sourceBlobId, BlobInfo target) - throws IllegalArgumentException { - checkContentType(target); + public static CopyRequest of(BlobId sourceBlobId, BlobInfo target) { return builder().source(sourceBlobId).target(target).build(); } @@ -1208,14 +1218,10 @@ public static CopyRequest of(BlobId sourceBlobId, BlobId targetBlobId) { public static Builder builder() { return new Builder(); } - - private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentException { - checkArgument(blobInfo.contentType() != null, "Blob content type can not be null"); - } } /** - * Create a new bucket. + * Creates a new bucket. * * @return a complete bucket * @throws StorageException upon failure @@ -1223,7 +1229,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Bucket create(BucketInfo bucketInfo, BucketTargetOption... options); /** - * Create a new blob with no content. + * Creates a new blob with no content. * * @return a [@code Blob} with complete information * @throws StorageException upon failure @@ -1231,7 +1237,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob create(BlobInfo blobInfo, BlobTargetOption... options); /** - * Create a new blob. Direct upload is used to upload {@code content}. For large content, + * Creates a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. MD5 and CRC32C hashes of * {@code content} are computed and used for validating transferred data. * @@ -1242,7 +1248,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options); /** - * Create a new blob. Direct upload is used to upload {@code content}. For large content, + * Creates a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. By default any md5 and crc32c * values in the given {@code blobInfo} are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. The given @@ -1254,49 +1260,50 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options); /** - * Return the requested bucket or {@code null} if not found. + * Returns the requested bucket or {@code null} if not found. * * @throws StorageException upon failure */ Bucket get(String bucket, BucketGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ Blob get(String bucket, String blob, BlobGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ Blob get(BlobId blob, BlobGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ Blob get(BlobId blob); /** - * List the project's buckets. + * Lists the project's buckets. * * @throws StorageException upon failure */ Page list(BucketListOption... options); /** - * List the bucket's blobs. + * Lists the bucket's blobs. If the {@link BlobListOption#currentDirectory()} option is provided, + * results are returned in a directory-like mode. * * @throws StorageException upon failure */ Page list(String bucket, BlobListOption... options); /** - * Update bucket information. + * Updates bucket information. * * @return the updated bucket * @throws StorageException upon failure @@ -1304,7 +1311,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Bucket update(BucketInfo bucketInfo, BucketTargetOption... options); /** - * Update blob information. Original metadata are merged with metadata in the provided + * Updates blob information. Original metadata are merged with metadata in the provided * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. * @@ -1321,7 +1328,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob update(BlobInfo blobInfo, BlobTargetOption... options); /** - * Update blob information. Original metadata are merged with metadata in the provided + * Updates blob information. Original metadata are merged with metadata in the provided * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. * @@ -1338,7 +1345,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob update(BlobInfo blobInfo); /** - * Delete the requested bucket. + * Deletes the requested bucket. * * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure @@ -1346,7 +1353,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx boolean delete(String bucket, BucketSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure @@ -1354,7 +1361,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx boolean delete(String bucket, String blob, BlobSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure @@ -1362,7 +1369,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx boolean delete(BlobId blob, BlobSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure @@ -1370,7 +1377,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx boolean delete(BlobId blob); /** - * Send a compose request. + * Sends a compose request. * * @return the composed blob * @throws StorageException upon failure @@ -1378,12 +1385,18 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob compose(ComposeRequest composeRequest); /** - * Sends a copy request. Returns a {@link CopyWriter} object for the provided - * {@code CopyRequest}. If source and destination objects share the same location and storage - * class the source blob is copied with one request and {@link CopyWriter#result()} immediately - * returns, regardless of the {@link CopyRequest#megabytesCopiedPerChunk} parameter. - * If source and destination have different location or storage class {@link CopyWriter#result()} - * might issue multiple RPC calls depending on blob's size. + * Sends a copy request. This method copies both blob's data and information. To override source + * blob's information supply a {@code BlobInfo} to the + * {@code CopyRequest} using either + * {@link Storage.CopyRequest.Builder#target(BlobInfo, Storage.BlobTargetOption...)} or + * {@link Storage.CopyRequest.Builder#target(BlobInfo, Iterable)}. + * + *

This method returns a {@link CopyWriter} object for the provided {@code CopyRequest}. If + * source and destination objects share the same location and storage class the source blob is + * copied with one request and {@link CopyWriter#result()} immediately returns, regardless of the + * {@link CopyRequest#megabytesCopiedPerChunk} parameter. If source and destination have different + * location or storage class {@link CopyWriter#result()} might issue multiple RPC calls depending + * on blob's size. * *

Example usage of copy: *

 {@code BlobInfo blob = service.copy(copyRequest).result();}
@@ -1422,7 +1435,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx
   byte[] readAllBytes(BlobId blob, BlobSourceOption... options);
 
   /**
-   * Send a batch request.
+   * Sends a batch request.
    *
    * @return the batch response
    * @throws StorageException upon failure
@@ -1430,7 +1443,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx
   BatchResponse submit(BatchRequest batchRequest);
 
   /**
-   * Return a channel for reading the blob's content. The blob's latest generation is read. If the
+   * Returns a channel for reading the blob's content. The blob's latest generation is read. If the
    * blob changes while reading (i.e. {@link BlobInfo#etag()} changes), subsequent calls to
    * {@code blobReadChannel.read(ByteBuffer)} may throw {@link StorageException}.
    *
@@ -1443,7 +1456,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx
   ReadChannel reader(String bucket, String blob, BlobSourceOption... options);
 
   /**
-   * Return a channel for reading the blob's content. If {@code blob.generation()} is set
+   * Returns a channel for reading the blob's content. If {@code blob.generation()} is set
    * data corresponding to that generation is read. If {@code blob.generation()} is {@code null}
    * the blob's latest generation is read. If the blob changes while reading (i.e.
    * {@link BlobInfo#etag()} changes), subsequent calls to {@code blobReadChannel.read(ByteBuffer)}
@@ -1459,7 +1472,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx
   ReadChannel reader(BlobId blob, BlobSourceOption... options);
 
   /**
-   * Create a blob and return a channel for writing its content. By default any md5 and crc32c
+   * Creates a blob and return a channel for writing its content. By default any md5 and crc32c
    * values in the given {@code blobInfo} are ignored unless requested via the
    * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options.
    *
@@ -1468,23 +1481,48 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx
   WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options);
 
   /**
-   * Generates a signed URL for a blob.
-   * If you have a blob that you want to allow access to for a fixed
-   * amount of time, you can use this method to generate a URL that
-   * is only valid within a certain time period.
-   * This is particularly useful if you don't want publicly
-   * accessible blobs, but don't want to require users to explicitly log in.
+   * Generates a signed URL for a blob. If you have a blob that you want to allow access to for a
+   * fixed amount of time, you can use this method to generate a URL that is only valid within a
+   * certain time period. This is particularly useful if you don't want publicly accessible blobs,
+   * but also don't want to require users to explicitly log in. Signing a URL requires a service
+   * account and its associated private key. If a {@link ServiceAccountAuthCredentials} was passed
+   * to {@link StorageOptions.Builder#authCredentials(AuthCredentials)} or the default credentials
+   * are being used and the environment variable {@code GOOGLE_APPLICATION_CREDENTIALS} is set, then
+   * {@code signUrl} will use that service account and associated key to sign the URL. If the
+   * credentials passed to {@link StorageOptions} do not expose a private key (this is the case for
+   * App Engine credentials, Compute Engine credentials and Google Cloud SDK credentials) then
+   * {@code signUrl} will throw an {@link IllegalArgumentException} unless a service account with
+   * associated key is passed using the {@code SignUrlOption.serviceAccount()} option. The service
+   * account and private key passed with {@code SignUrlOption.serviceAccount()} have priority over
+   * any credentials set with {@link StorageOptions.Builder#authCredentials(AuthCredentials)}.
    *
-   * 

Example usage of creating a signed URL that is valid for 2 weeks: + *

Example usage of creating a signed URL that is valid for 2 weeks, using the default + * credentials for signing the URL: *

 {@code
    * service.signUrl(BlobInfo.builder("bucket", "name").build(), 14, TimeUnit.DAYS);
    * }
* + *

Example usage of creating a signed URL passing the {@code SignUrlOption.serviceAccount()} + * option, that will be used for signing the URL: + *

 {@code
+   * service.signUrl(BlobInfo.builder("bucket", "name").build(), 14, TimeUnit.DAYS,
+   *     SignUrlOption.serviceAccount(
+   *         AuthCredentials.createForJson(new FileInputStream("/path/to/key.json"))));
+   * }
+ * * @param blobInfo the blob associated with the signed URL * @param duration time until the signed URL expires, expressed in {@code unit}. The finest * granularity supported is 1 second, finer granularities will be truncated * @param unit time unit of the {@code duration} parameter * @param options optional URL signing options + * @throws IllegalArgumentException if {@code SignUrlOption.serviceAccount()} was not used and no + * service account was provided to {@link StorageOptions} + * @throws IllegalArgumentException if the key associated to the provided service account is + * invalid + * @throws IllegalArgumentException if {@code SignUrlOption.withMd5()} option is used and + * {@code blobInfo.md5()} is {@code null} + * @throws IllegalArgumentException if {@code SignUrlOption.withContentType()} option is used and + * {@code blobInfo.contentType()} is {@code null} * @see Signed-URLs */ URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java index de77cba021a1..cf709ba5e293 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java @@ -19,15 +19,15 @@ import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.gcloud.RetryHelper.runWithRetries; -import static com.google.gcloud.spi.StorageRpc.Option.DELIMITER; -import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.DELIMITER; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_GENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_METAGENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_GENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.api.services.storage.model.StorageObject; @@ -48,9 +48,9 @@ import com.google.gcloud.PageImpl.NextPageFetcher; import com.google.gcloud.ReadChannel; import com.google.gcloud.RetryHelper.RetryHelperException; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.RewriteResponse; -import com.google.gcloud.spi.StorageRpc.Tuple; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.RewriteResponse; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; import java.io.ByteArrayInputStream; import java.io.InputStream; @@ -76,6 +76,7 @@ final class StorageImpl extends BaseService implements Storage { private static final byte[] EMPTY_BYTE_ARRAY = {}; private static final String EMPTY_BYTE_ARRAY_MD5 = "1B2M2Y8AsgTpgAmY7PhCfg=="; private static final String EMPTY_BYTE_ARRAY_CRC32C = "AAAAAA=="; + private static final String PATH_DELIMITER = "/"; private static final Function, Boolean> DELETE_FUNCTION = new Function, Boolean>() { @@ -412,15 +413,16 @@ public CopyWriter copy(final CopyRequest copyRequest) { final StorageObject source = copyRequest.source().toPb(); final Map sourceOptions = optionMap(copyRequest.source().generation(), null, copyRequest.sourceOptions(), true); - final StorageObject target = copyRequest.target().toPb(); + final StorageObject targetObject = copyRequest.target().toPb(); final Map targetOptions = optionMap(copyRequest.target().generation(), copyRequest.target().metageneration(), copyRequest.targetOptions()); try { RewriteResponse rewriteResponse = runWithRetries(new Callable() { @Override public RewriteResponse call() { - return storageRpc.openRewrite(new StorageRpc.RewriteRequest(source, sourceOptions, target, - targetOptions, copyRequest.megabytesCopiedPerChunk())); + return storageRpc.openRewrite(new StorageRpc.RewriteRequest(source, sourceOptions, + copyRequest.overrideInfo(), targetObject, targetOptions, + copyRequest.megabytesCopiedPerChunk())); } }, options().retryParams(), EXCEPTION_HANDLER); return new CopyWriter(options(), rewriteResponse); @@ -669,7 +671,7 @@ private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.O } Boolean value = (Boolean) temp.remove(DELIMITER); if (Boolean.TRUE.equals(value)) { - temp.put(DELIMITER, options().pathDelimiter()); + temp.put(DELIMITER, PATH_DELIMITER); } if (useAsSource) { addToOptionMap(IF_GENERATION_MATCH, IF_SOURCE_GENERATION_MATCH, generation, temp); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java index bd30cb173366..e7e1c2778fa9 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java @@ -16,14 +16,12 @@ package com.google.gcloud.storage; -import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableSet; import com.google.gcloud.ServiceOptions; -import com.google.gcloud.spi.DefaultStorageRpc; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpcFactory; +import com.google.gcloud.storage.spi.DefaultStorageRpc; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpcFactory; -import java.util.Objects; import java.util.Set; public class StorageOptions extends ServiceOptions { @@ -31,9 +29,6 @@ public class StorageOptions extends ServiceOptions SCOPES = ImmutableSet.of(GCS_SCOPE); - private static final String DEFAULT_PATH_DELIMITER = "/"; - - private final String pathDelimiter; public static class DefaultStorageFactory implements StorageFactory { @@ -58,24 +53,10 @@ public StorageRpc create(StorageOptions options) { public static class Builder extends ServiceOptions.Builder { - private String pathDelimiter; - private Builder() {} private Builder(StorageOptions options) { super(options); - pathDelimiter = options.pathDelimiter; - } - - /** - * Sets the path delimiter for the storage service. - * - * @param pathDelimiter the path delimiter to set - * @return the builder - */ - public Builder pathDelimiter(String pathDelimiter) { - this.pathDelimiter = pathDelimiter; - return this; } @Override @@ -86,7 +67,6 @@ public StorageOptions build() { private StorageOptions(Builder builder) { super(StorageFactory.class, StorageRpcFactory.class, builder); - pathDelimiter = MoreObjects.firstNonNull(builder.pathDelimiter, DEFAULT_PATH_DELIMITER); } @SuppressWarnings("unchecked") @@ -106,13 +86,6 @@ protected Set scopes() { return SCOPES; } - /** - * Returns the storage service's path delimiter. - */ - public String pathDelimiter() { - return pathDelimiter; - } - /** * Returns a default {@code StorageOptions} instance. */ @@ -128,16 +101,12 @@ public Builder toBuilder() { @Override public int hashCode() { - return baseHashCode() ^ Objects.hash(pathDelimiter); + return baseHashCode(); } @Override public boolean equals(Object obj) { - if (!(obj instanceof StorageOptions)) { - return false; - } - StorageOptions other = (StorageOptions) obj; - return baseEquals(other) && Objects.equals(pathDelimiter, other.pathDelimiter); + return obj instanceof StorageOptions && baseEquals((StorageOptions) obj); } public static Builder builder() { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/DefaultStorageRpc.java similarity index 86% rename from gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java rename to gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/DefaultStorageRpc.java index dc84a1de5559..8d06832534e2 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/DefaultStorageRpc.java @@ -12,25 +12,27 @@ * the License. */ -package com.google.gcloud.spi; - -import static com.google.gcloud.spi.StorageRpc.Option.DELIMITER; -import static com.google.gcloud.spi.StorageRpc.Option.FIELDS; -import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.MAX_RESULTS; -import static com.google.gcloud.spi.StorageRpc.Option.PAGE_TOKEN; -import static com.google.gcloud.spi.StorageRpc.Option.PREDEFINED_ACL; -import static com.google.gcloud.spi.StorageRpc.Option.PREDEFINED_DEFAULT_OBJECT_ACL; -import static com.google.gcloud.spi.StorageRpc.Option.PREFIX; -import static com.google.gcloud.spi.StorageRpc.Option.VERSIONS; +package com.google.gcloud.storage.spi; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.gcloud.storage.spi.StorageRpc.Option.DELIMITER; +import static com.google.gcloud.storage.spi.StorageRpc.Option.FIELDS; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_GENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_METAGENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_GENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.MAX_RESULTS; +import static com.google.gcloud.storage.spi.StorageRpc.Option.PAGE_TOKEN; +import static com.google.gcloud.storage.spi.StorageRpc.Option.PREDEFINED_ACL; +import static com.google.gcloud.storage.spi.StorageRpc.Option.PREDEFINED_DEFAULT_OBJECT_ACL; +import static com.google.gcloud.storage.spi.StorageRpc.Option.PREFIX; +import static com.google.gcloud.storage.spi.StorageRpc.Option.VERSIONS; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE; import com.google.api.client.googleapis.batch.json.JsonBatchCallback; import com.google.api.client.googleapis.json.GoogleJsonError; @@ -56,8 +58,9 @@ import com.google.api.services.storage.model.ComposeRequest.SourceObjects.ObjectPreconditions; import com.google.api.services.storage.model.Objects; import com.google.api.services.storage.model.StorageObject; -import com.google.common.base.MoreObjects; +import com.google.common.base.Function; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gcloud.storage.StorageException; @@ -66,6 +69,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.math.BigInteger; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -99,7 +103,7 @@ private static StorageException translate(GoogleJsonError exception) { } @Override - public Bucket create(Bucket bucket, Map options) throws StorageException { + public Bucket create(Bucket bucket, Map options) { try { return storage.buckets() .insert(this.options.projectId(), bucket) @@ -114,7 +118,7 @@ public Bucket create(Bucket bucket, Map options) throws StorageExcept @Override public StorageObject create(StorageObject storageObject, final InputStream content, - Map options) throws StorageException { + Map options) { try { Storage.Objects.Insert insert = storage.objects() .insert(storageObject.getBucket(), storageObject, @@ -150,7 +154,7 @@ public Tuple> list(Map options) { } @Override - public Tuple> list(String bucket, Map options) { + public Tuple> list(final String bucket, Map options) { try { Objects objects = storage.objects() .list(bucket) @@ -162,13 +166,30 @@ public Tuple> list(String bucket, Map .setPageToken(PAGE_TOKEN.getString(options)) .setFields(FIELDS.getString(options)) .execute(); - return Tuple.>of( - objects.getNextPageToken(), objects.getItems()); + Iterable storageObjects = Iterables.concat( + firstNonNull(objects.getItems(), ImmutableList.of()), + objects.getPrefixes() != null + ? Lists.transform(objects.getPrefixes(), objectFromPrefix(bucket)) + : ImmutableList.of()); + return Tuple.of(objects.getNextPageToken(), storageObjects); } catch (IOException ex) { throw translate(ex); } } + private static Function objectFromPrefix(final String bucket) { + return new Function() { + @Override + public StorageObject apply(String prefix) { + return new StorageObject() + .set("isDirectory", true) + .setBucket(bucket) + .setName(prefix) + .setSize(BigInteger.ZERO); + } + }; + } + @Override public Bucket get(Bucket bucket, Map options) { try { @@ -296,11 +317,8 @@ private Storage.Objects.Delete deleteRequest(StorageObject blob, Map @Override public StorageObject compose(Iterable sources, StorageObject target, - Map targetOptions) throws StorageException { + Map targetOptions) { ComposeRequest request = new ComposeRequest(); - if (target.getContentType() == null) { - target.setContentType("application/octet-stream"); - } request.setDestination(target); List sourceObjects = new ArrayList<>(); for (StorageObject source : sources) { @@ -327,8 +345,7 @@ public StorageObject compose(Iterable sources, StorageObject targ } @Override - public byte[] load(StorageObject from, Map options) - throws StorageException { + public byte[] load(StorageObject from, Map options) { try { Storage.Objects.Get getRequest = storage.objects() .get(from.getBucket(), from.getName()) @@ -347,7 +364,7 @@ public byte[] load(StorageObject from, Map options) } @Override - public BatchResponse batch(BatchRequest request) throws StorageException { + public BatchResponse batch(BatchRequest request) { List>>> partitionedToDelete = Lists.partition(request.toDelete, MAX_BATCH_DELETES); Iterator>>> iterator = partitionedToDelete.iterator(); @@ -437,7 +454,7 @@ public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { @Override public Tuple read(StorageObject from, Map options, long position, - int bytes) throws StorageException { + int bytes) { try { Get req = storage.objects() .get(from.getBucket(), from.getName()) @@ -454,20 +471,32 @@ public Tuple read(StorageObject from, Map options, lo String etag = req.getLastResponseHeaders().getETag(); return Tuple.of(etag, output.toByteArray()); } catch (IOException ex) { - throw translate(ex); + StorageException serviceException = translate(ex); + if (serviceException.code() == SC_REQUESTED_RANGE_NOT_SATISFIABLE) { + return Tuple.of(null, new byte[0]); + } + throw serviceException; } } @Override public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, - boolean last) throws StorageException { + boolean last) { try { + if (length == 0 && !last) { + return; + } GenericUrl url = new GenericUrl(uploadId); HttpRequest httpRequest = storage.getRequestFactory().buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); long limit = destOffset + length; StringBuilder range = new StringBuilder("bytes "); - range.append(destOffset).append('-').append(limit - 1).append('/'); + if (length == 0) { + range.append('*'); + } else { + range.append(destOffset).append('-').append(limit - 1); + } + range.append('/'); if (last) { range.append(limit); } else { @@ -501,8 +530,7 @@ public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destO } @Override - public String open(StorageObject object, Map options) - throws StorageException { + public String open(StorageObject object, Map options) { try { Insert req = storage.objects().insert(object.getBucket(), object); GenericUrl url = req.buildHttpRequest().getUrl(); @@ -523,7 +551,7 @@ public String open(StorageObject object, Map options) HttpRequest httpRequest = requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, object)); httpRequest.getHeaders().set("X-Upload-Content-Type", - MoreObjects.firstNonNull(object.getContentType(), "application/octet-stream")); + firstNonNull(object.getContentType(), "application/octet-stream")); HttpResponse response = httpRequest.execute(); if (response.getStatusCode() != 200) { GoogleJsonError error = new GoogleJsonError(); @@ -538,22 +566,22 @@ public String open(StorageObject object, Map options) } @Override - public RewriteResponse openRewrite(RewriteRequest rewriteRequest) throws StorageException { + public RewriteResponse openRewrite(RewriteRequest rewriteRequest) { return rewrite(rewriteRequest, null); } @Override - public RewriteResponse continueRewrite(RewriteResponse previousResponse) throws StorageException { + public RewriteResponse continueRewrite(RewriteResponse previousResponse) { return rewrite(previousResponse.rewriteRequest, previousResponse.rewriteToken); } - private RewriteResponse rewrite(RewriteRequest req, String token) throws StorageException { + private RewriteResponse rewrite(RewriteRequest req, String token) { try { Long maxBytesRewrittenPerCall = req.megabytesRewrittenPerCall != null ? req.megabytesRewrittenPerCall * MEGABYTE : null; com.google.api.services.storage.model.RewriteResponse rewriteResponse = storage.objects() .rewrite(req.source.getBucket(), req.source.getName(), req.target.getBucket(), - req.target.getName(), req.target.getContentType() != null ? req.target : null) + req.target.getName(), req.overrideInfo ? req.target : null) .setSourceGeneration(req.source.getGeneration()) .setRewriteToken(token) .setMaxBytesRewrittenPerCall(maxBytesRewrittenPerCall) diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpc.java similarity index 72% rename from gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java rename to gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpc.java index e15a27114810..74f8171de87f 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpc.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.storage.spi; import static com.google.common.base.MoreObjects.firstNonNull; @@ -138,15 +138,17 @@ class RewriteRequest { public final StorageObject source; public final Map sourceOptions; + public final boolean overrideInfo; public final StorageObject target; public final Map targetOptions; public final Long megabytesRewrittenPerCall; public RewriteRequest(StorageObject source, Map sourceOptions, - StorageObject target, Map targetOptions, + boolean overrideInfo, StorageObject target, Map targetOptions, Long megabytesRewrittenPerCall) { this.source = source; this.sourceOptions = sourceOptions; + this.overrideInfo = overrideInfo; this.target = target; this.targetOptions = targetOptions; this.megabytesRewrittenPerCall = megabytesRewrittenPerCall; @@ -163,6 +165,7 @@ public boolean equals(Object obj) { final RewriteRequest other = (RewriteRequest) obj; return Objects.equals(this.source, other.source) && Objects.equals(this.sourceOptions, other.sourceOptions) + && Objects.equals(this.overrideInfo, other.overrideInfo) && Objects.equals(this.target, other.target) && Objects.equals(this.targetOptions, other.targetOptions) && Objects.equals(this.megabytesRewrittenPerCall, other.megabytesRewrittenPerCall); @@ -170,7 +173,8 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(source, sourceOptions, target, targetOptions, megabytesRewrittenPerCall); + return Objects.hash(source, sourceOptions, overrideInfo, target, targetOptions, + megabytesRewrittenPerCall); } } @@ -217,57 +221,134 @@ public int hashCode() { } } - Bucket create(Bucket bucket, Map options) throws StorageException; + /** + * Creates a new bucket. + * + * @throws StorageException upon failure + */ + Bucket create(Bucket bucket, Map options); - StorageObject create(StorageObject object, InputStream content, Map options) - throws StorageException; + /** + * Creates a new storage object. + * + * @throws StorageException upon failure + */ + StorageObject create(StorageObject object, InputStream content, Map options); - Tuple> list(Map options) throws StorageException; + /** + * Lists the project's buckets. + * + * @throws StorageException upon failure + */ + Tuple> list(Map options); - Tuple> list(String bucket, Map options) - throws StorageException; + /** + * Lists the bucket's blobs. + * + * @throws StorageException upon failure + */ + Tuple> list(String bucket, Map options); /** * Returns the requested bucket or {@code null} if not found. * * @throws StorageException upon failure */ - Bucket get(Bucket bucket, Map options) throws StorageException; + Bucket get(Bucket bucket, Map options); /** * Returns the requested storage object or {@code null} if not found. * * @throws StorageException upon failure */ - StorageObject get(StorageObject object, Map options) - throws StorageException; + StorageObject get(StorageObject object, Map options); - Bucket patch(Bucket bucket, Map options) throws StorageException; + /** + * Updates bucket information. + * + * @throws StorageException upon failure + */ + Bucket patch(Bucket bucket, Map options); - StorageObject patch(StorageObject storageObject, Map options) - throws StorageException; + /** + * Updates the storage object's information. Original metadata are merged with metadata in the + * provided {@code storageObject}. + * + * @throws StorageException upon failure + */ + StorageObject patch(StorageObject storageObject, Map options); - boolean delete(Bucket bucket, Map options) throws StorageException; + /** + * Deletes the requested bucket. + * + * @return {@code true} if the bucket was deleted, {@code false} if it was not found + * @throws StorageException upon failure + */ + boolean delete(Bucket bucket, Map options); - boolean delete(StorageObject object, Map options) throws StorageException; + /** + * Deletes the requested storage object. + * + * @return {@code true} if the storage object was deleted, {@code false} if it was not found + * @throws StorageException upon failure + */ + boolean delete(StorageObject object, Map options); - BatchResponse batch(BatchRequest request) throws StorageException; + /** + * Sends a batch request. + * + * @throws StorageException upon failure + */ + BatchResponse batch(BatchRequest request); + /** + * Sends a compose request. + * + * @throws StorageException upon failure + */ StorageObject compose(Iterable sources, StorageObject target, - Map targetOptions) throws StorageException; + Map targetOptions); - byte[] load(StorageObject storageObject, Map options) - throws StorageException; + /** + * Reads all the bytes from a storage object. + * + * @throws StorageException upon failure + */ + byte[] load(StorageObject storageObject, Map options); - Tuple read(StorageObject from, Map options, long position, int bytes) - throws StorageException; + /** + * Reads the given amount of bytes from a storage object at the given position. + * + * @throws StorageException upon failure + */ + Tuple read(StorageObject from, Map options, long position, int bytes); - String open(StorageObject object, Map options) throws StorageException; + /** + * Opens a resumable upload channel for a given storage object. + * + * @throws StorageException upon failure + */ + String open(StorageObject object, Map options); + /** + * Writes the provided bytes to a storage object at the provided location. + * + * @throws StorageException upon failure + */ void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, - boolean last) throws StorageException; + boolean last); - RewriteResponse openRewrite(RewriteRequest rewriteRequest) throws StorageException; + /** + * Sends a rewrite request to open a rewrite channel. + * + * @throws StorageException upon failure + */ + RewriteResponse openRewrite(RewriteRequest rewriteRequest); - RewriteResponse continueRewrite(RewriteResponse previousResponse) throws StorageException; + /** + * Continues rewriting on an already open rewrite channel. + * + * @throws StorageException upon failure + */ + RewriteResponse continueRewrite(RewriteResponse previousResponse); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpcFactory.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpcFactory.java similarity index 91% rename from gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpcFactory.java rename to gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpcFactory.java index f4959d617d17..19b98e6273db 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpcFactory.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpcFactory.java @@ -14,8 +14,9 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.storage.spi; +import com.google.gcloud.spi.ServiceRpcFactory; import com.google.gcloud.storage.StorageOptions; /** diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java index a1cc01f4287c..029181c6c07b 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java @@ -20,7 +20,11 @@ import static com.google.gcloud.storage.Acl.Role.READER; import static com.google.gcloud.storage.Acl.Role.WRITER; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import com.google.api.services.storage.model.StorageObject; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.gcloud.storage.Acl.Project; @@ -28,6 +32,7 @@ import org.junit.Test; +import java.math.BigInteger; import java.util.List; import java.util.Map; @@ -76,6 +81,10 @@ public class BlobInfoTest { .size(SIZE) .updateTime(UPDATE_TIME) .build(); + private static final BlobInfo DIRECTORY_INFO = BlobInfo.builder("b", "n/") + .size(0L) + .isDirectory(true) + .build(); @Test public void testToBuilder() { @@ -118,6 +127,30 @@ public void testBuilder() { assertEquals(SELF_LINK, BLOB_INFO.selfLink()); assertEquals(SIZE, BLOB_INFO.size()); assertEquals(UPDATE_TIME, BLOB_INFO.updateTime()); + assertFalse(BLOB_INFO.isDirectory()); + assertEquals("b", DIRECTORY_INFO.bucket()); + assertEquals("n/", DIRECTORY_INFO.name()); + assertNull(DIRECTORY_INFO.acl()); + assertNull(DIRECTORY_INFO.componentCount()); + assertNull(DIRECTORY_INFO.contentType()); + assertNull(DIRECTORY_INFO.cacheControl()); + assertNull(DIRECTORY_INFO.contentDisposition()); + assertNull(DIRECTORY_INFO.contentEncoding()); + assertNull(DIRECTORY_INFO.contentLanguage()); + assertNull(DIRECTORY_INFO.crc32c()); + assertNull(DIRECTORY_INFO.deleteTime()); + assertNull(DIRECTORY_INFO.etag()); + assertNull(DIRECTORY_INFO.generation()); + assertNull(DIRECTORY_INFO.id()); + assertNull(DIRECTORY_INFO.md5()); + assertNull(DIRECTORY_INFO.mediaLink()); + assertNull(DIRECTORY_INFO.metadata()); + assertNull(DIRECTORY_INFO.metageneration()); + assertNull(DIRECTORY_INFO.owner()); + assertNull(DIRECTORY_INFO.selfLink()); + assertEquals(0L, (long) DIRECTORY_INFO.size()); + assertNull(DIRECTORY_INFO.updateTime()); + assertTrue(DIRECTORY_INFO.isDirectory()); } private void compareBlobs(BlobInfo expected, BlobInfo value) { @@ -151,6 +184,35 @@ public void testToPbAndFromPb() { compareBlobs(BLOB_INFO, BlobInfo.fromPb(BLOB_INFO.toPb())); BlobInfo blobInfo = BlobInfo.builder(BlobId.of("b", "n")).build(); compareBlobs(blobInfo, BlobInfo.fromPb(blobInfo.toPb())); + StorageObject object = new StorageObject() + .setName("n/") + .setBucket("b") + .setSize(BigInteger.ZERO) + .set("isDirectory", true); + blobInfo = BlobInfo.fromPb(object); + assertEquals("b", blobInfo.bucket()); + assertEquals("n/", blobInfo.name()); + assertNull(blobInfo.acl()); + assertNull(blobInfo.componentCount()); + assertNull(blobInfo.contentType()); + assertNull(blobInfo.cacheControl()); + assertNull(blobInfo.contentDisposition()); + assertNull(blobInfo.contentEncoding()); + assertNull(blobInfo.contentLanguage()); + assertNull(blobInfo.crc32c()); + assertNull(blobInfo.deleteTime()); + assertNull(blobInfo.etag()); + assertNull(blobInfo.generation()); + assertNull(blobInfo.id()); + assertNull(blobInfo.md5()); + assertNull(blobInfo.mediaLink()); + assertNull(blobInfo.metadata()); + assertNull(blobInfo.metageneration()); + assertNull(blobInfo.owner()); + assertNull(blobInfo.selfLink()); + assertEquals(0L, (long) blobInfo.size()); + assertNull(blobInfo.updateTime()); + assertTrue(blobInfo.isDirectory()); } @Test diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java index 5dc947df51f8..1b0f36a864a2 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java @@ -30,8 +30,8 @@ import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpcFactory; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpcFactory; import org.junit.After; import org.junit.Before; diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java index c7508593f8c9..d6c97ca9ca03 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java @@ -95,6 +95,10 @@ public class BlobTest { .updateTime(UPDATE_TIME) .build(); private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").metageneration(42L).build(); + private static final BlobInfo DIRECTORY_INFO = BlobInfo.builder("b", "n/") + .size(0L) + .isDirectory(true) + .build(); private Storage storage; private Blob blob; @@ -229,6 +233,7 @@ public void testCopyToBucket() throws Exception { assertEquals(copyWriter, returnedCopyWriter); assertEquals(capturedCopyRequest.getValue().source(), blob.blobId()); assertEquals(capturedCopyRequest.getValue().target(), target); + assertFalse(capturedCopyRequest.getValue().overrideInfo()); assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); } @@ -247,6 +252,7 @@ public void testCopyTo() throws Exception { assertEquals(copyWriter, returnedCopyWriter); assertEquals(capturedCopyRequest.getValue().source(), blob.blobId()); assertEquals(capturedCopyRequest.getValue().target(), target); + assertFalse(capturedCopyRequest.getValue().overrideInfo()); assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); } @@ -254,9 +260,9 @@ public void testCopyTo() throws Exception { @Test public void testCopyToBlobId() throws Exception { initializeExpectedBlob(2); + BlobInfo target = BlobInfo.builder(BlobId.of("bt", "nt")).build(); BlobId targetId = BlobId.of("bt", "nt"); CopyWriter copyWriter = createMock(CopyWriter.class); - BlobInfo target = BlobInfo.builder(targetId).build(); Capture capturedCopyRequest = Capture.newInstance(); expect(storage.options()).andReturn(mockOptions); expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter); @@ -266,6 +272,7 @@ public void testCopyToBlobId() throws Exception { assertEquals(copyWriter, returnedCopyWriter); assertEquals(capturedCopyRequest.getValue().source(), blob.blobId()); assertEquals(capturedCopyRequest.getValue().target(), target); + assertFalse(capturedCopyRequest.getValue().overrideInfo()); assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); } @@ -305,18 +312,20 @@ public void testSignUrl() throws Exception { @Test public void testToBuilder() { - expect(storage.options()).andReturn(mockOptions).times(4); + expect(storage.options()).andReturn(mockOptions).times(6); replay(storage); Blob fullBlob = new Blob(storage, new BlobInfo.BuilderImpl(FULL_BLOB_INFO)); assertEquals(fullBlob, fullBlob.toBuilder().build()); Blob simpleBlob = new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO)); assertEquals(simpleBlob, simpleBlob.toBuilder().build()); + Blob directory = new Blob(storage, new BlobInfo.BuilderImpl(DIRECTORY_INFO)); + assertEquals(directory, directory.toBuilder().build()); } @Test public void testBuilder() { initializeExpectedBlob(4); - expect(storage.options()).andReturn(mockOptions).times(2); + expect(storage.options()).andReturn(mockOptions).times(4); replay(storage); Blob.Builder builder = new Blob.Builder(new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO))); Blob blob = builder.acl(ACL) @@ -360,5 +369,33 @@ public void testBuilder() { assertEquals(SELF_LINK, blob.selfLink()); assertEquals(SIZE, blob.size()); assertEquals(UPDATE_TIME, blob.updateTime()); + assertFalse(blob.isDirectory()); + builder = new Blob.Builder(new Blob(storage, new BlobInfo.BuilderImpl(DIRECTORY_INFO))); + blob = builder.blobId(BlobId.of("b", "n/")) + .isDirectory(true) + .size(0L) + .build(); + assertEquals("b", blob.bucket()); + assertEquals("n/", blob.name()); + assertNull(blob.acl()); + assertNull(blob.componentCount()); + assertNull(blob.contentType()); + assertNull(blob.cacheControl()); + assertNull(blob.contentDisposition()); + assertNull(blob.contentEncoding()); + assertNull(blob.contentLanguage()); + assertNull(blob.crc32c()); + assertNull(blob.deleteTime()); + assertNull(blob.etag()); + assertNull(blob.id()); + assertNull(blob.md5()); + assertNull(blob.mediaLink()); + assertNull(blob.metadata()); + assertNull(blob.metageneration()); + assertNull(blob.owner()); + assertNull(blob.selfLink()); + assertEquals(0L, (long) blob.size()); + assertNull(blob.updateTime()); + assertTrue(blob.isDirectory()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java index e499f6b9de52..18ec64a9575f 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java @@ -34,8 +34,8 @@ import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; import com.google.gcloud.WriteChannel; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpcFactory; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpcFactory; import org.easymock.Capture; import org.easymock.CaptureType; diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java index 236411e0c2d8..53056c39c0dc 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java @@ -293,16 +293,16 @@ public void testCreate() throws Exception { } @Test - public void testCreateNullContentType() throws Exception { + public void testCreateNoContentType() throws Exception { initializeExpectedBucket(5); - BlobInfo info = BlobInfo.builder("b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build(); + BlobInfo info = BlobInfo.builder("b", "n").build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; expect(storage.options()).andReturn(mockOptions); expect(storage.create(info, content)).andReturn(expectedBlob); replay(storage); initializeBucket(); - Blob blob = bucket.create("n", content, null); + Blob blob = bucket.create("n", content); assertEquals(expectedBlob, blob); } @@ -388,9 +388,9 @@ public void testCreateFromStream() throws Exception { } @Test - public void testCreateFromStreamNullContentType() throws Exception { + public void testCreateFromStreamNoContentType() throws Exception { initializeExpectedBucket(5); - BlobInfo info = BlobInfo.builder("b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build(); + BlobInfo info = BlobInfo.builder("b", "n").build(); Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info)); byte[] content = {0xD, 0xE, 0xA, 0xD}; InputStream streamContent = new ByteArrayInputStream(content); @@ -398,7 +398,7 @@ public void testCreateFromStreamNullContentType() throws Exception { expect(storage.create(info, streamContent)).andReturn(expectedBlob); replay(storage); initializeBucket(); - Blob blob = bucket.create("n", streamContent, null); + Blob blob = bucket.create("n", streamContent); assertEquals(expectedBlob, blob); } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java index b7e8d14e53a1..9f8edfb84162 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java @@ -18,6 +18,8 @@ import static com.google.gcloud.storage.Storage.PredefinedAcl.PUBLIC_READ; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import com.google.common.collect.ImmutableList; import com.google.gcloud.storage.Storage.BlobSourceOption; @@ -53,6 +55,7 @@ public void testCopyRequest() { assertEquals(1, copyRequest1.sourceOptions().size()); assertEquals(BlobSourceOption.generationMatch(1), copyRequest1.sourceOptions().get(0)); assertEquals(TARGET_BLOB_INFO, copyRequest1.target()); + assertTrue(copyRequest1.overrideInfo()); assertEquals(1, copyRequest1.targetOptions().size()); assertEquals(BlobTargetOption.predefinedAcl(PUBLIC_READ), copyRequest1.targetOptions().get(0)); @@ -62,6 +65,7 @@ public void testCopyRequest() { .build(); assertEquals(SOURCE_BLOB_ID, copyRequest2.source()); assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest2.target()); + assertFalse(copyRequest2.overrideInfo()); Storage.CopyRequest copyRequest3 = Storage.CopyRequest.builder() .source(SOURCE_BLOB_ID) @@ -69,6 +73,7 @@ public void testCopyRequest() { .build(); assertEquals(SOURCE_BLOB_ID, copyRequest3.source()); assertEquals(TARGET_BLOB_INFO, copyRequest3.target()); + assertTrue(copyRequest3.overrideInfo()); assertEquals(ImmutableList.of(BlobTargetOption.predefinedAcl(PUBLIC_READ)), copyRequest3.targetOptions()); } @@ -78,52 +83,36 @@ public void testCopyRequestOf() { Storage.CopyRequest copyRequest1 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_INFO); assertEquals(SOURCE_BLOB_ID, copyRequest1.source()); assertEquals(TARGET_BLOB_INFO, copyRequest1.target()); + assertTrue(copyRequest1.overrideInfo()); Storage.CopyRequest copyRequest2 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_NAME); assertEquals(SOURCE_BLOB_ID, copyRequest2.source()); - assertEquals(BlobInfo.builder(SOURCE_BUCKET_NAME, TARGET_BLOB_NAME).build(), + assertEquals(BlobInfo.builder(BlobId.of(SOURCE_BUCKET_NAME, TARGET_BLOB_NAME)).build(), copyRequest2.target()); + assertFalse(copyRequest2.overrideInfo()); Storage.CopyRequest copyRequest3 = Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_INFO); assertEquals(SOURCE_BLOB_ID, copyRequest3.source()); assertEquals(TARGET_BLOB_INFO, copyRequest3.target()); + assertTrue(copyRequest3.overrideInfo()); Storage.CopyRequest copyRequest4 = Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_NAME); assertEquals(SOURCE_BLOB_ID, copyRequest4.source()); - assertEquals(BlobInfo.builder(SOURCE_BUCKET_NAME, TARGET_BLOB_NAME).build(), + assertEquals(BlobInfo.builder(BlobId.of(SOURCE_BUCKET_NAME, TARGET_BLOB_NAME)).build(), copyRequest4.target()); + assertFalse(copyRequest4.overrideInfo()); Storage.CopyRequest copyRequest5 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_ID); assertEquals(SOURCE_BLOB_ID, copyRequest5.source()); assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest5.target()); + assertFalse(copyRequest5.overrideInfo()); Storage.CopyRequest copyRequest6 = Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_ID); assertEquals(SOURCE_BLOB_ID, copyRequest6.source()); assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest6.target()); - } - - @Test - public void testCopyRequestFail() { - thrown.expect(IllegalArgumentException.class); - Storage.CopyRequest.builder() - .source(SOURCE_BLOB_ID) - .target(BlobInfo.builder(TARGET_BLOB_ID).build()) - .build(); - } - - @Test - public void testCopyRequestOfBlobInfoFail() { - thrown.expect(IllegalArgumentException.class); - Storage.CopyRequest.of(SOURCE_BLOB_ID, BlobInfo.builder(TARGET_BLOB_ID).build()); - } - - @Test - public void testCopyRequestOfStringFail() { - thrown.expect(IllegalArgumentException.class); - Storage.CopyRequest.of( - SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, BlobInfo.builder(TARGET_BLOB_ID).build()); + assertFalse(copyRequest6.overrideInfo()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java index 1b1ffd987de6..8ccb81688b65 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java @@ -27,10 +27,10 @@ import com.google.common.collect.ImmutableMap; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.RewriteRequest; -import com.google.gcloud.spi.StorageRpc.RewriteResponse; -import com.google.gcloud.spi.StorageRpcFactory; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.RewriteRequest; +import com.google.gcloud.storage.spi.StorageRpc.RewriteResponse; +import com.google.gcloud.storage.spi.StorageRpcFactory; import org.easymock.EasyMock; import org.junit.After; @@ -48,20 +48,29 @@ public class CopyWriterTest { private static final BlobId BLOB_ID = BlobId.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME); private static final BlobInfo BLOB_INFO = BlobInfo.builder(DESTINATION_BUCKET_NAME, DESTINATION_BLOB_NAME).build(); - private static final BlobInfo RESULT = + private static final BlobInfo RESULT_INFO = BlobInfo.builder(DESTINATION_BUCKET_NAME, DESTINATION_BLOB_NAME).contentType("type").build(); private static final Map EMPTY_OPTIONS = ImmutableMap.of(); - private static final RewriteRequest REQUEST = new StorageRpc.RewriteRequest(BLOB_ID.toPb(), - EMPTY_OPTIONS, BLOB_INFO.toPb(), EMPTY_OPTIONS, null); - private static final RewriteResponse RESPONSE = new StorageRpc.RewriteResponse(REQUEST, - null, 42L, false, "token", 21L); - private static final RewriteResponse RESPONSE_DONE = new StorageRpc.RewriteResponse(REQUEST, - RESULT.toPb(), 42L, true, "token", 42L); + private static final RewriteRequest REQUEST_WITH_OBJECT = + new StorageRpc.RewriteRequest(BLOB_ID.toPb(), EMPTY_OPTIONS, true, BLOB_INFO.toPb(), + EMPTY_OPTIONS, null); + private static final RewriteRequest REQUEST_WITHOUT_OBJECT = + new StorageRpc.RewriteRequest(BLOB_ID.toPb(), EMPTY_OPTIONS, false, BLOB_INFO.toPb(), + EMPTY_OPTIONS, null); + private static final RewriteResponse RESPONSE_WITH_OBJECT = new RewriteResponse( + REQUEST_WITH_OBJECT, null, 42L, false, "token", 21L); + private static final RewriteResponse RESPONSE_WITHOUT_OBJECT = new RewriteResponse( + REQUEST_WITHOUT_OBJECT, null, 42L, false, "token", 21L); + private static final RewriteResponse RESPONSE_WITH_OBJECT_DONE = + new RewriteResponse(REQUEST_WITH_OBJECT, RESULT_INFO.toPb(), 42L, true, "token", 42L); + private static final RewriteResponse RESPONSE_WITHOUT_OBJECT_DONE = + new RewriteResponse(REQUEST_WITHOUT_OBJECT, RESULT_INFO.toPb(), 42L, true, "token", 42L); private StorageOptions options; private StorageRpcFactory rpcFactoryMock; private StorageRpc storageRpcMock; private CopyWriter copyWriter; + private Blob result; @Before public void setUp() { @@ -75,6 +84,7 @@ public void setUp() { .serviceRpcFactory(rpcFactoryMock) .retryParams(RetryParams.noRetries()) .build(); + result = new Blob(options.service(), new BlobInfo.BuilderImpl(RESULT_INFO)); } @After @@ -83,41 +93,111 @@ public void tearDown() throws Exception { } @Test - public void testRewrite() { - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + public void testRewriteWithObject() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITH_OBJECT)) + .andReturn(RESPONSE_WITH_OBJECT_DONE); EasyMock.replay(storageRpcMock); - copyWriter = new CopyWriter(options, RESPONSE); - assertEquals(RESULT, copyWriter.result()); + copyWriter = new CopyWriter(options, RESPONSE_WITH_OBJECT); + assertEquals(result, copyWriter.result()); assertTrue(copyWriter.isDone()); assertEquals(42L, copyWriter.totalBytesCopied()); assertEquals(42L, copyWriter.blobSize()); } @Test - public void testRewriteMultipleRequests() { - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE); - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + public void testRewriteWithoutObject() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITHOUT_OBJECT)) + .andReturn(RESPONSE_WITHOUT_OBJECT_DONE); EasyMock.replay(storageRpcMock); - copyWriter = new CopyWriter(options, RESPONSE); - assertEquals(RESULT, copyWriter.result()); + copyWriter = new CopyWriter(options, RESPONSE_WITHOUT_OBJECT); + assertEquals(result, copyWriter.result()); assertTrue(copyWriter.isDone()); assertEquals(42L, copyWriter.totalBytesCopied()); assertEquals(42L, copyWriter.blobSize()); } @Test - public void testSaveAndRestore() { - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE); - EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + public void testRewriteWithObjectMultipleRequests() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITH_OBJECT)) + .andReturn(RESPONSE_WITH_OBJECT); + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITH_OBJECT)) + .andReturn(RESPONSE_WITH_OBJECT_DONE); EasyMock.replay(storageRpcMock); - copyWriter = new CopyWriter(options, RESPONSE); + copyWriter = new CopyWriter(options, RESPONSE_WITH_OBJECT); + assertEquals(result, copyWriter.result()); + assertTrue(copyWriter.isDone()); + assertEquals(42L, copyWriter.totalBytesCopied()); + assertEquals(42L, copyWriter.blobSize()); + } + + @Test + public void testRewriteWithoutObjectMultipleRequests() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITHOUT_OBJECT)) + .andReturn(RESPONSE_WITHOUT_OBJECT); + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITHOUT_OBJECT)) + .andReturn(RESPONSE_WITHOUT_OBJECT_DONE); + EasyMock.replay(storageRpcMock); + copyWriter = new CopyWriter(options, RESPONSE_WITHOUT_OBJECT); + assertEquals(result, copyWriter.result()); + assertTrue(copyWriter.isDone()); + assertEquals(42L, copyWriter.totalBytesCopied()); + assertEquals(42L, copyWriter.blobSize()); + } + + @Test + public void testSaveAndRestoreWithObject() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITH_OBJECT)) + .andReturn(RESPONSE_WITH_OBJECT); + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITH_OBJECT)) + .andReturn(RESPONSE_WITH_OBJECT_DONE); + EasyMock.replay(storageRpcMock); + copyWriter = new CopyWriter(options, RESPONSE_WITH_OBJECT); + copyWriter.copyChunk(); + assertTrue(!copyWriter.isDone()); + assertEquals(21L, copyWriter.totalBytesCopied()); + assertEquals(42L, copyWriter.blobSize()); + RestorableState rewriterState = copyWriter.capture(); + CopyWriter restoredRewriter = rewriterState.restore(); + assertEquals(result, restoredRewriter.result()); + assertTrue(restoredRewriter.isDone()); + assertEquals(42L, restoredRewriter.totalBytesCopied()); + assertEquals(42L, restoredRewriter.blobSize()); + } + + @Test + public void testSaveAndRestoreWithoutObject() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITHOUT_OBJECT)) + .andReturn(RESPONSE_WITHOUT_OBJECT); + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITHOUT_OBJECT)) + .andReturn(RESPONSE_WITHOUT_OBJECT_DONE); + EasyMock.replay(storageRpcMock); + copyWriter = new CopyWriter(options, RESPONSE_WITHOUT_OBJECT); copyWriter.copyChunk(); assertTrue(!copyWriter.isDone()); assertEquals(21L, copyWriter.totalBytesCopied()); assertEquals(42L, copyWriter.blobSize()); RestorableState rewriterState = copyWriter.capture(); CopyWriter restoredRewriter = rewriterState.restore(); - assertEquals(RESULT, restoredRewriter.result()); + assertEquals(result, restoredRewriter.result()); + assertTrue(restoredRewriter.isDone()); + assertEquals(42L, restoredRewriter.totalBytesCopied()); + assertEquals(42L, restoredRewriter.blobSize()); + } + + @Test + public void testSaveAndRestoreWithResult() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE_WITH_OBJECT)) + .andReturn(RESPONSE_WITH_OBJECT_DONE); + EasyMock.replay(storageRpcMock); + copyWriter = new CopyWriter(options, RESPONSE_WITH_OBJECT); + copyWriter.copyChunk(); + assertEquals(result, copyWriter.result()); + assertTrue(copyWriter.isDone()); + assertEquals(42L, copyWriter.totalBytesCopied()); + assertEquals(42L, copyWriter.blobSize()); + RestorableState rewriterState = copyWriter.capture(); + CopyWriter restoredRewriter = rewriterState.restore(); + assertEquals(result, restoredRewriter.result()); assertTrue(restoredRewriter.isDone()); assertEquals(42L, restoredRewriter.totalBytesCopied()); assertEquals(42L, restoredRewriter.blobSize()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java index 2703ddb401c5..5924174ab138 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java @@ -18,7 +18,7 @@ import static org.junit.Assert.assertEquals; -import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc; import org.junit.Test; diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java index 154554a029fe..146922a9dae9 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java @@ -132,7 +132,8 @@ public void testForceDelete() throws InterruptedException, ExecutionException { @Test public void testForceDeleteTimeout() throws InterruptedException, ExecutionException { Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(blobPage).anyTimes(); + EasyMock.expect(storageMock.list(BUCKET_NAME, BlobListOption.versions(true))) + .andReturn(blobPage).anyTimes(); for (BlobInfo info : blobList) { EasyMock.expect(storageMock.delete(info.blobId())).andReturn(true).anyTimes(); } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java index c9b957bb936a..613cb81c3549 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java @@ -16,31 +16,20 @@ package com.google.gcloud.storage; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; - import com.google.common.collect.ImmutableMap; import com.google.gcloud.AuthCredentials; +import com.google.gcloud.BaseSerializationTest; import com.google.gcloud.PageImpl; import com.google.gcloud.ReadChannel; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryParams; -import com.google.gcloud.WriteChannel; -import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.Restorable; import com.google.gcloud.storage.Acl.Project.ProjectRole; +import com.google.gcloud.storage.spi.StorageRpc; -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Collections; import java.util.Map; -public class SerializationTest { +public class SerializationTest extends BaseSerializationTest { private static final Storage STORAGE = StorageOptions.builder().projectId("p").build().service(); private static final Acl.Domain ACL_DOMAIN = new Acl.Domain("domain"); @@ -63,8 +52,9 @@ public class SerializationTest { Collections.>emptyList()); private static final PageImpl PAGE_RESULT = new PageImpl<>(null, "c", Collections.singletonList(BLOB)); + private static final StorageException STORAGE_EXCEPTION = new StorageException(42, "message"); private static final Storage.BlobListOption BLOB_LIST_OPTIONS = - Storage.BlobListOption.maxResults(100); + Storage.BlobListOption.pageSize(100); private static final Storage.BlobSourceOption BLOB_SOURCE_OPTIONS = Storage.BlobSourceOption.generationMatch(1); private static final Storage.BlobTargetOption BLOB_TARGET_OPTIONS = @@ -77,83 +67,32 @@ public class SerializationTest { Storage.BucketTargetOption.metagenerationNotMatch(); private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); - @Test - public void testServiceOptions() throws Exception { + @Override + protected Serializable[] serializableObjects() { StorageOptions options = StorageOptions.builder() .projectId("p1") .authCredentials(AuthCredentials.createForAppEngine()) .build(); - StorageOptions serializedCopy = serializeAndDeserialize(options); - assertEquals(options, serializedCopy); - - options = options.toBuilder() + StorageOptions otherOptions = options.toBuilder() .projectId("p2") - .retryParams(RetryParams.defaultInstance()) .authCredentials(null) - .pathDelimiter(":") .build(); - serializedCopy = serializeAndDeserialize(options); - assertEquals(options, serializedCopy); - } - - @Test - public void testModelAndRequests() throws Exception { - Serializable[] objects = {ACL_DOMAIN, ACL_GROUP, ACL_PROJECT_, ACL_USER, ACL_RAW, ACL, + return new Serializable[]{ACL_DOMAIN, ACL_GROUP, ACL_PROJECT_, ACL_USER, ACL_RAW, ACL, BLOB_INFO, BLOB, BUCKET_INFO, BUCKET, ORIGIN, CORS, BATCH_REQUEST, BATCH_RESPONSE, PAGE_RESULT, BLOB_LIST_OPTIONS, BLOB_SOURCE_OPTIONS, BLOB_TARGET_OPTIONS, - BUCKET_LIST_OPTIONS, BUCKET_SOURCE_OPTIONS, BUCKET_TARGET_OPTIONS}; - for (Serializable obj : objects) { - Object copy = serializeAndDeserialize(obj); - assertEquals(obj, obj); - assertEquals(obj, copy); - assertNotSame(obj, copy); - assertEquals(copy, copy); - } + BUCKET_LIST_OPTIONS, BUCKET_SOURCE_OPTIONS, BUCKET_TARGET_OPTIONS, STORAGE_EXCEPTION, + options, otherOptions}; } - @Test - public void testReadChannelState() throws IOException, ClassNotFoundException { - StorageOptions options = StorageOptions.builder() - .projectId("p2") - .retryParams(RetryParams.defaultInstance()) - .build(); + @Override + protected Restorable[] restorableObjects() { + StorageOptions options = StorageOptions.builder().projectId("p2").build(); ReadChannel reader = new BlobReadChannel(options, BlobId.of("b", "n"), EMPTY_RPC_OPTIONS); - RestorableState state = reader.capture(); - RestorableState deserializedState = serializeAndDeserialize(state); - assertEquals(state, deserializedState); - assertEquals(state.hashCode(), deserializedState.hashCode()); - assertEquals(state.toString(), deserializedState.toString()); - reader.close(); - } - - @Test - public void testWriteChannelState() throws IOException, ClassNotFoundException { - StorageOptions options = StorageOptions.builder() - .projectId("p2") - .retryParams(RetryParams.defaultInstance()) - .build(); // avoid closing when you don't want partial writes to GCS upon failure @SuppressWarnings("resource") BlobWriteChannel writer = new BlobWriteChannel(options, BlobInfo.builder(BlobId.of("b", "n")).build(), "upload-id"); - RestorableState state = writer.capture(); - RestorableState deserializedState = serializeAndDeserialize(state); - assertEquals(state, deserializedState); - assertEquals(state.hashCode(), deserializedState.hashCode()); - assertEquals(state.toString(), deserializedState.toString()); - } - - @SuppressWarnings("unchecked") - private T serializeAndDeserialize(T obj) - throws IOException, ClassNotFoundException { - ByteArrayOutputStream bytes = new ByteArrayOutputStream(); - try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { - output.writeObject(obj); - } - try (ObjectInputStream input = - new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { - return (T) input.readObject(); - } + return new Restorable[]{reader, writer}; } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java index 612664de14ae..3cc99e3bf884 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java @@ -37,10 +37,10 @@ import com.google.gcloud.RetryParams; import com.google.gcloud.ServiceOptions; import com.google.gcloud.WriteChannel; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.Tuple; -import com.google.gcloud.spi.StorageRpcFactory; import com.google.gcloud.storage.Storage.CopyRequest; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; +import com.google.gcloud.storage.spi.StorageRpcFactory; import org.easymock.Capture; import org.easymock.EasyMock; @@ -181,8 +181,8 @@ public class StorageImplTest { StorageRpc.Option.IF_SOURCE_GENERATION_MATCH, BLOB_SOURCE_GENERATION.value()); // Bucket list options - private static final Storage.BucketListOption BUCKET_LIST_MAX_RESULT = - Storage.BucketListOption.maxResults(42L); + private static final Storage.BucketListOption BUCKET_LIST_PAGE_SIZE = + Storage.BucketListOption.pageSize(42L); private static final Storage.BucketListOption BUCKET_LIST_PREFIX = Storage.BucketListOption.prefix("prefix"); private static final Storage.BucketListOption BUCKET_LIST_FIELDS = @@ -190,12 +190,12 @@ public class StorageImplTest { private static final Storage.BucketListOption BUCKET_LIST_EMPTY_FIELDS = Storage.BucketListOption.fields(); private static final Map BUCKET_LIST_OPTIONS = ImmutableMap.of( - StorageRpc.Option.MAX_RESULTS, BUCKET_LIST_MAX_RESULT.value(), + StorageRpc.Option.MAX_RESULTS, BUCKET_LIST_PAGE_SIZE.value(), StorageRpc.Option.PREFIX, BUCKET_LIST_PREFIX.value()); // Blob list options - private static final Storage.BlobListOption BLOB_LIST_MAX_RESULT = - Storage.BlobListOption.maxResults(42L); + private static final Storage.BlobListOption BLOB_LIST_PAGE_SIZE = + Storage.BlobListOption.pageSize(42L); private static final Storage.BlobListOption BLOB_LIST_PREFIX = Storage.BlobListOption.prefix("prefix"); private static final Storage.BlobListOption BLOB_LIST_FIELDS = @@ -205,7 +205,7 @@ public class StorageImplTest { private static final Storage.BlobListOption BLOB_LIST_EMPTY_FIELDS = Storage.BlobListOption.fields(); private static final Map BLOB_LIST_OPTIONS = ImmutableMap.of( - StorageRpc.Option.MAX_RESULTS, BLOB_LIST_MAX_RESULT.value(), + StorageRpc.Option.MAX_RESULTS, BLOB_LIST_PAGE_SIZE.value(), StorageRpc.Option.PREFIX, BLOB_LIST_PREFIX.value(), StorageRpc.Option.VERSIONS, BLOB_LIST_VERSIONS.value()); @@ -567,7 +567,7 @@ public void testListBucketsWithOptions() { EasyMock.replay(storageRpcMock); initializeService(); ImmutableList bucketList = ImmutableList.of(expectedBucket1, expectedBucket2); - Page page = storage.list(BUCKET_LIST_MAX_RESULT, BUCKET_LIST_PREFIX); + Page page = storage.list(BUCKET_LIST_PAGE_SIZE, BUCKET_LIST_PREFIX); assertEquals(cursor, page.nextPageCursor()); assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class)); } @@ -654,7 +654,7 @@ public void testListBlobsWithOptions() { initializeService(); ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2); Page page = - storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX, BLOB_LIST_VERSIONS); + storage.list(BUCKET_NAME1, BLOB_LIST_PAGE_SIZE, BLOB_LIST_PREFIX, BLOB_LIST_VERSIONS); assertEquals(cursor, page.nextPageCursor()); assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class)); } @@ -673,9 +673,9 @@ public void testListBlobsWithSelectedFields() { initializeService(); ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2); Page page = - storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX, BLOB_LIST_FIELDS); - assertEquals(BLOB_LIST_MAX_RESULT.value(), - capturedOptions.getValue().get(BLOB_LIST_MAX_RESULT.rpcOption())); + storage.list(BUCKET_NAME1, BLOB_LIST_PAGE_SIZE, BLOB_LIST_PREFIX, BLOB_LIST_FIELDS); + assertEquals(BLOB_LIST_PAGE_SIZE.value(), + capturedOptions.getValue().get(BLOB_LIST_PAGE_SIZE.rpcOption())); assertEquals(BLOB_LIST_PREFIX.value(), capturedOptions.getValue().get(BLOB_LIST_PREFIX.rpcOption())); String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption()); @@ -704,9 +704,9 @@ public void testListBlobsWithEmptyFields() { initializeService(); ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2); Page page = - storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX, BLOB_LIST_EMPTY_FIELDS); - assertEquals(BLOB_LIST_MAX_RESULT.value(), - capturedOptions.getValue().get(BLOB_LIST_MAX_RESULT.rpcOption())); + storage.list(BUCKET_NAME1, BLOB_LIST_PAGE_SIZE, BLOB_LIST_PREFIX, BLOB_LIST_EMPTY_FIELDS); + assertEquals(BLOB_LIST_PAGE_SIZE.value(), + capturedOptions.getValue().get(BLOB_LIST_PAGE_SIZE.rpcOption())); assertEquals(BLOB_LIST_PREFIX.value(), capturedOptions.getValue().get(BLOB_LIST_PREFIX.rpcOption())); String selector = (String) capturedOptions.getValue().get(BLOB_LIST_EMPTY_FIELDS.rpcOption()); @@ -719,6 +719,22 @@ public void testListBlobsWithEmptyFields() { assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class)); } + @Test + public void testListBlobsCurrentDirectory() { + String cursor = "cursor"; + Map options = ImmutableMap.of(StorageRpc.Option.DELIMITER, "/"); + ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); + Tuple> result = + Tuple.of(cursor, Iterables.transform(blobInfoList, BlobInfo.INFO_TO_PB_FUNCTION)); + EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, options)).andReturn(result); + EasyMock.replay(storageRpcMock); + initializeService(); + ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2); + Page page = storage.list(BUCKET_NAME1, Storage.BlobListOption.currentDirectory()); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class)); + } + @Test public void testUpdateBucket() { BucketInfo updatedBucketInfo = BUCKET_INFO1.toBuilder().indexPage("some-page").build(); @@ -850,7 +866,7 @@ public void testComposeWithOptions() { public void testCopy() { CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.blobId(), BLOB_INFO2.blobId()); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), - EMPTY_RPC_OPTIONS, request.target().toPb(), EMPTY_RPC_OPTIONS, null); + EMPTY_RPC_OPTIONS, false, BLOB_INFO2.toPb(), EMPTY_RPC_OPTIONS, null); StorageRpc.RewriteResponse rpcResponse = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); @@ -870,7 +886,7 @@ public void testCopyWithOptions() { .target(BLOB_INFO1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) .build(); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), - BLOB_SOURCE_OPTIONS_COPY, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); + BLOB_SOURCE_OPTIONS_COPY, true, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); StorageRpc.RewriteResponse rpcResponse = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); @@ -890,7 +906,7 @@ public void testCopyWithOptionsFromBlobId() { .target(BLOB_INFO1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) .build(); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), - BLOB_SOURCE_OPTIONS_COPY, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); + BLOB_SOURCE_OPTIONS_COPY, true, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); StorageRpc.RewriteResponse rpcResponse = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); @@ -906,7 +922,7 @@ public void testCopyWithOptionsFromBlobId() { public void testCopyMultipleRequests() { CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.blobId(), BLOB_INFO2.blobId()); StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), - EMPTY_RPC_OPTIONS, request.target().toPb(), EMPTY_RPC_OPTIONS, null); + EMPTY_RPC_OPTIONS, false, BLOB_INFO2.toPb(), EMPTY_RPC_OPTIONS, null); StorageRpc.RewriteResponse rpcResponse1 = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); StorageRpc.RewriteResponse rpcResponse2 = new StorageRpc.RewriteResponse(rpcRequest, @@ -919,7 +935,7 @@ public void testCopyMultipleRequests() { assertEquals(42L, writer.blobSize()); assertEquals(21L, writer.totalBytesCopied()); assertTrue(!writer.isDone()); - assertEquals(BLOB_INFO1, writer.result()); + assertEquals(expectedBlob1, writer.result()); assertTrue(writer.isDone()); assertEquals(42L, writer.totalBytesCopied()); assertEquals(42L, writer.blobSize()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/it/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/it/ITStorageTest.java index 8e954de57e68..13d768442c34 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/it/ITStorageTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/it/ITStorageTest.java @@ -28,6 +28,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.gcloud.Page; import com.google.gcloud.ReadChannel; @@ -53,6 +54,7 @@ import org.junit.Test; import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; @@ -100,13 +102,12 @@ public static void afterClass() throws ExecutionException, InterruptedException @Test(timeout = 5000) public void testListBuckets() throws InterruptedException { - Iterator bucketIterator = - storage.list(Storage.BucketListOption.prefix(BUCKET), - Storage.BucketListOption.fields()).values().iterator(); + Iterator bucketIterator = storage.list(Storage.BucketListOption.prefix(BUCKET), + Storage.BucketListOption.fields()).iterateAll(); while (!bucketIterator.hasNext()) { Thread.sleep(500); bucketIterator = storage.list(Storage.BucketListOption.prefix(BUCKET), - Storage.BucketListOption.fields()).values().iterator(); + Storage.BucketListOption.fields()).iterateAll(); } while (bucketIterator.hasNext()) { Bucket remoteBucket = bucketIterator.next(); @@ -287,8 +288,8 @@ public void testGetBlobFailNonExistingGeneration() { assertTrue(remoteBlob.delete()); } - @Test - public void testListBlobsSelectedFields() { + @Test(timeout = 5000) + public void testListBlobsSelectedFields() throws InterruptedException { String[] blobNames = {"test-list-blobs-selected-fields-blob1", "test-list-blobs-selected-fields-blob2"}; ImmutableMap metadata = ImmutableMap.of("k", "v"); @@ -307,10 +308,20 @@ public void testListBlobsSelectedFields() { Page page = storage.list(BUCKET, Storage.BlobListOption.prefix("test-list-blobs-selected-fields-blob"), Storage.BlobListOption.fields(BlobField.METADATA)); - int index = 0; - for (Blob remoteBlob : page.values()) { + // Listing blobs is eventually consistent, we loop until the list is of the expected size. The + // test fails if timeout is reached. + while (Iterators.size(page.iterateAll()) != 2) { + Thread.sleep(500); + page = storage.list(BUCKET, + Storage.BlobListOption.prefix("test-list-blobs-selected-fields-blob"), + Storage.BlobListOption.fields(BlobField.METADATA)); + } + Set blobSet = ImmutableSet.of(blobNames[0], blobNames[1]); + Iterator iterator = page.iterateAll(); + while (iterator.hasNext()) { + Blob remoteBlob = iterator.next(); assertEquals(BUCKET, remoteBlob.bucket()); - assertEquals(blobNames[index++], remoteBlob.name()); + assertTrue(blobSet.contains(remoteBlob.name())); assertEquals(metadata, remoteBlob.metadata()); assertNull(remoteBlob.contentType()); } @@ -318,8 +329,8 @@ public void testListBlobsSelectedFields() { assertTrue(remoteBlob2.delete()); } - @Test - public void testListBlobsEmptySelectedFields() { + @Test(timeout = 5000) + public void testListBlobsEmptySelectedFields() throws InterruptedException { String[] blobNames = {"test-list-blobs-empty-selected-fields-blob1", "test-list-blobs-empty-selected-fields-blob2"}; BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0]) @@ -335,17 +346,27 @@ public void testListBlobsEmptySelectedFields() { Page page = storage.list(BUCKET, Storage.BlobListOption.prefix("test-list-blobs-empty-selected-fields-blob"), Storage.BlobListOption.fields()); - int index = 0; - for (Blob remoteBlob : page.values()) { + // Listing blobs is eventually consistent, we loop until the list is of the expected size. The + // test fails if timeout is reached. + while (Iterators.size(page.iterateAll()) != 2) { + Thread.sleep(500); + page = storage.list(BUCKET, + Storage.BlobListOption.prefix("test-list-blobs-empty-selected-fields-blob"), + Storage.BlobListOption.fields()); + } + Set blobSet = ImmutableSet.of(blobNames[0], blobNames[1]); + Iterator iterator = page.iterateAll(); + while (iterator.hasNext()) { + Blob remoteBlob = iterator.next(); assertEquals(BUCKET, remoteBlob.bucket()); - assertEquals(blobNames[index++], remoteBlob.name()); + assertTrue(blobSet.contains(remoteBlob.name())); assertNull(remoteBlob.contentType()); } assertTrue(remoteBlob1.delete()); assertTrue(remoteBlob2.delete()); } - @Test + @Test(timeout = 15000) public void testListBlobsVersioned() throws ExecutionException, InterruptedException { String bucketName = RemoteGcsHelper.generateBucketName(); Bucket bucket = storage.create(BucketInfo.builder(bucketName).versioningEnabled(true).build()); @@ -366,8 +387,18 @@ public void testListBlobsVersioned() throws ExecutionException, InterruptedExcep Page page = storage.list(bucketName, Storage.BlobListOption.prefix("test-list-blobs-versioned-blob"), Storage.BlobListOption.versions(true)); + // Listing blobs is eventually consistent, we loop until the list is of the expected size. The + // test fails if timeout is reached. + while (Iterators.size(page.iterateAll()) != 3) { + Thread.sleep(500); + page = storage.list(bucketName, + Storage.BlobListOption.prefix("test-list-blobs-versioned-blob"), + Storage.BlobListOption.versions(true)); + } Set blobSet = ImmutableSet.of(blobNames[0], blobNames[1]); - for (Blob remoteBlob : page.values()) { + Iterator iterator = page.iterateAll(); + while (iterator.hasNext()) { + Blob remoteBlob = iterator.next(); assertEquals(bucketName, remoteBlob.bucket()); assertTrue(blobSet.contains(remoteBlob.name())); assertNotNull(remoteBlob.generation()); @@ -380,6 +411,52 @@ public void testListBlobsVersioned() throws ExecutionException, InterruptedExcep } } + @Test(timeout = 5000) + public void testListBlobsCurrentDirectory() throws InterruptedException { + String directoryName = "test-list-blobs-current-directory/"; + String subdirectoryName = "subdirectory/"; + String[] blobNames = {directoryName + subdirectoryName + "blob1", + directoryName + "blob2"}; + BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0]) + .contentType(CONTENT_TYPE) + .build(); + BlobInfo blob2 = BlobInfo.builder(BUCKET, blobNames[1]) + .contentType(CONTENT_TYPE) + .build(); + Blob remoteBlob1 = storage.create(blob1, BLOB_BYTE_CONTENT); + Blob remoteBlob2 = storage.create(blob2, BLOB_BYTE_CONTENT); + assertNotNull(remoteBlob1); + assertNotNull(remoteBlob2); + Page page = storage.list(BUCKET, + Storage.BlobListOption.prefix("test-list-blobs-current-directory/"), + Storage.BlobListOption.currentDirectory()); + // Listing blobs is eventually consistent, we loop until the list is of the expected size. The + // test fails if timeout is reached. + while (Iterators.size(page.iterateAll()) != 2) { + Thread.sleep(500); + page = storage.list(BUCKET, + Storage.BlobListOption.prefix("test-list-blobs-current-directory/"), + Storage.BlobListOption.currentDirectory()); + } + Iterator iterator = page.iterateAll(); + while (iterator.hasNext()) { + Blob remoteBlob = iterator.next(); + assertEquals(BUCKET, remoteBlob.bucket()); + if (remoteBlob.name().equals(blobNames[1])) { + assertEquals(CONTENT_TYPE, remoteBlob.contentType()); + assertEquals(BLOB_BYTE_CONTENT.length, (long) remoteBlob.size()); + assertFalse(remoteBlob.isDirectory()); + } else if (remoteBlob.name().equals(directoryName + subdirectoryName)) { + assertEquals(0L, (long) remoteBlob.size()); + assertTrue(remoteBlob.isDirectory()); + } else { + fail("Unexpected blob with name " + remoteBlob.name()); + } + } + assertTrue(remoteBlob1.delete()); + assertTrue(remoteBlob2.delete()); + } + @Test public void testUpdateBlob() { String blobName = "test-update-blob"; @@ -522,6 +599,37 @@ public void testComposeBlob() { assertNotNull(remoteTargetBlob); assertEquals(targetBlob.name(), remoteTargetBlob.name()); assertEquals(targetBlob.bucket(), remoteTargetBlob.bucket()); + assertNull(remoteTargetBlob.contentType()); + byte[] readBytes = storage.readAllBytes(BUCKET, targetBlobName); + byte[] composedBytes = Arrays.copyOf(BLOB_BYTE_CONTENT, BLOB_BYTE_CONTENT.length * 2); + System.arraycopy(BLOB_BYTE_CONTENT, 0, composedBytes, BLOB_BYTE_CONTENT.length, + BLOB_BYTE_CONTENT.length); + assertArrayEquals(composedBytes, readBytes); + assertTrue(remoteSourceBlob1.delete()); + assertTrue(remoteSourceBlob2.delete()); + assertTrue(remoteTargetBlob.delete()); + } + + @Test + public void testComposeBlobWithContentType() { + String sourceBlobName1 = "test-compose-blob-with-content-type-source-1"; + String sourceBlobName2 = "test-compose-blob-with-content-type-source-2"; + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + Blob remoteSourceBlob1 = storage.create(sourceBlob1, BLOB_BYTE_CONTENT); + Blob remoteSourceBlob2 = storage.create(sourceBlob2, BLOB_BYTE_CONTENT); + assertNotNull(remoteSourceBlob1); + assertNotNull(remoteSourceBlob2); + String targetBlobName = "test-compose-blob-with-content-type-target"; + BlobInfo targetBlob = + BlobInfo.builder(BUCKET, targetBlobName).contentType(CONTENT_TYPE).build(); + Storage.ComposeRequest req = + Storage.ComposeRequest.of(ImmutableList.of(sourceBlobName1, sourceBlobName2), targetBlob); + Blob remoteTargetBlob = storage.compose(req); + assertNotNull(remoteTargetBlob); + assertEquals(targetBlob.name(), remoteTargetBlob.name()); + assertEquals(targetBlob.bucket(), remoteTargetBlob.bucket()); + assertEquals(CONTENT_TYPE, remoteTargetBlob.contentType()); byte[] readBytes = storage.readAllBytes(BUCKET, targetBlobName); byte[] composedBytes = Arrays.copyOf(BLOB_BYTE_CONTENT, BLOB_BYTE_CONTENT.length * 2); System.arraycopy(BLOB_BYTE_CONTENT, 0, composedBytes, BLOB_BYTE_CONTENT.length, @@ -605,6 +713,26 @@ public void testCopyBlobUpdateMetadata() { assertTrue(storage.delete(BUCKET, targetBlobName)); } + @Test + public void testCopyBlobNoContentType() { + String sourceBlobName = "test-copy-blob-no-content-type-source"; + BlobId source = BlobId.of(BUCKET, sourceBlobName); + Blob remoteSourceBlob = storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT); + assertNotNull(remoteSourceBlob); + String targetBlobName = "test-copy-blob-no-content-type-target"; + ImmutableMap metadata = ImmutableMap.of("k", "v"); + BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName).metadata(metadata).build(); + Storage.CopyRequest req = Storage.CopyRequest.of(source, target); + CopyWriter copyWriter = storage.copy(req); + assertEquals(BUCKET, copyWriter.result().bucket()); + assertEquals(targetBlobName, copyWriter.result().name()); + assertNull(copyWriter.result().contentType()); + assertEquals(metadata, copyWriter.result().metadata()); + assertTrue(copyWriter.isDone()); + assertTrue(remoteSourceBlob.delete()); + assertTrue(storage.delete(BUCKET, targetBlobName)); + } + @Test public void testCopyBlobFail() { String sourceBlobName = "test-copy-blob-source-fail"; @@ -793,6 +921,33 @@ public void testReadAndWriteChannels() throws IOException { assertTrue(storage.delete(BUCKET, blobName)); } + @Test + public void testReadAndWriteChannelsWithDifferentFileSize() throws IOException { + String blobNamePrefix = "test-read-and-write-channels-blob-"; + int[] blobSizes = {0, 700, 1024 * 256, 2 * 1024 * 1024, 4 * 1024 * 1024, 4 * 1024 * 1024 + 1}; + Random rnd = new Random(); + for (int blobSize : blobSizes) { + String blobName = blobNamePrefix + blobSize; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + byte[] bytes = new byte[blobSize]; + rnd.nextBytes(bytes); + try (WriteChannel writer = storage.writer(blob)) { + writer.write(ByteBuffer.wrap(bytes)); + } + ByteArrayOutputStream output = new ByteArrayOutputStream(); + try (ReadChannel reader = storage.reader(blob.blobId())) { + ByteBuffer buffer = ByteBuffer.allocate(64 * 1024); + while (reader.read(buffer) > 0) { + buffer.flip(); + output.write(buffer.array(), 0, buffer.limit()); + buffer.clear(); + } + } + assertArrayEquals(bytes, output.toByteArray()); + assertTrue(storage.delete(BUCKET, blobName)); + } + } + @Test public void testReadAndWriteCaptureChannels() throws IOException { String blobName = "test-read-and-write-capture-channels-blob"; diff --git a/gcloud-java/README.md b/gcloud-java/README.md index c51b4e8fe7bc..e296d0c0c565 100644 --- a/gcloud-java/README.md +++ b/gcloud-java/README.md @@ -27,16 +27,16 @@ If you are using Maven, add this to your pom.xml file com.google.gcloud gcloud-java - 0.1.4 + 0.1.5 ``` If you are using Gradle, add this to your dependencies ```Groovy -compile 'com.google.gcloud:gcloud-java:0.1.4' +compile 'com.google.gcloud:gcloud-java:0.1.5' ``` If you are using SBT, add this to your dependencies ```Scala -libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.1.4" +libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.1.5" ``` Troubleshooting diff --git a/gcloud-java/pom.xml b/gcloud-java/pom.xml index 03d2b6600ba3..654b34f92056 100644 --- a/gcloud-java/pom.xml +++ b/gcloud-java/pom.xml @@ -10,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT diff --git a/pom.xml b/pom.xml index 880e2530be60..d73956f506ee 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.gcloud gcloud-java-pom pom - 0.1.5-SNAPSHOT + 0.1.6-SNAPSHOT GCloud Java https://github.com/GoogleCloudPlatform/gcloud-java @@ -216,6 +216,13 @@ + + + + test-jar + + + maven-compiler-plugin @@ -390,6 +397,24 @@ protected true ${project.build.directory}/javadoc + + + API packages + com.google.gcloud* + + + Test helpers packages + com.google.gcloud.bigquery.testing:com.google.gcloud.datastore.testing:com.google.gcloud.resourcemanager.testing:com.google.gcloud.storage.testing + + + Example packages + com.google.gcloud.examples* + + + SPI packages + com.google.gcloud.spi:com.google.gcloud.bigquery.spi:com.google.gcloud.datastore.spi:com.google.gcloud.resourcemanager.spi:com.google.gcloud.storage.spi + +