Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add samples for managing tables. #3361

Merged
merged 3 commits into from
Jun 8, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -560,22 +560,24 @@ public int hashCode() {
* String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
* TableId tableId = TableId.of(datasetName, tableName);
* // Table field definition
* Field[] fields = new Field[] {
* Field.of("name", LegacySQLTypeName.STRING),
* Field.of("post_abbr", LegacySQLTypeName.STRING)
* };
* Field[] fields =
* new Field[] {
* Field.of("name", LegacySQLTypeName.STRING),
* Field.of("post_abbr", LegacySQLTypeName.STRING)
* };
* // Table schema definition
* Schema schema = Schema.of(fields);
* LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
* .setFormatOptions(FormatOptions.json())
* .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
* .setSchema(schema)
* .build();
* LoadJobConfiguration configuration =
* LoadJobConfiguration.builder(tableId, sourceUri)
* .setFormatOptions(FormatOptions.json())
* .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
* .setSchema(schema)
* .build();
* // Load the table
* Job remoteLoadJob = bigquery.create(JobInfo.of(configuration));
* remoteLoadJob = remoteLoadJob.waitFor();
* Job loadJob = bigquery.create(JobInfo.of(configuration));
* loadJob = loadJob.waitFor();
* // Check the table
* System.out.println("State: " + remoteLoadJob.getStatus().getState());
* System.out.println("State: " + loadJob.getStatus().getState());
* return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
* }</pre>
*
Expand Down Expand Up @@ -771,9 +773,25 @@ public int hashCode() {
* String datasetName = "my_dataset_name";
* String tableName = "my_table_name";
* String newDescription = "new_description";
* Table oldTable = bigquery.getTable(datasetName, tableName);
* TableInfo tableInfo = oldTable.toBuilder().setDescription(newDescription).build();
* Table newTable = bigquery.update(tableInfo);
* Table beforeTable = bigquery.getTable(datasetName, tableName);
* TableInfo tableInfo = beforeTable.toBuilder()
* .setDescription(newDescription)
* .build();
* Table afterTable = bigquery.update(tableInfo);
* }</pre>
*
* <p>Example of updating a table by changing its expiration.
* <pre> {@code
* String datasetName = "my_dataset_name";
* String tableName = "my_table_name";
* Table beforeTable = bigquery.getTable(datasetName, tableName);
*
* // Set table to expire 5 days from now.
* long expirationMillis = DateTime.now().plusDays(5).getMillis();
* TableInfo tableInfo = beforeTable.toBuilder()
* .setExpirationTime(expirationMillis)
* .build();
* Table afterTable = bigquery.update(tableInfo);
* }</pre>
*
* @throws BigQueryException upon failure
Expand Down Expand Up @@ -869,10 +887,12 @@ public int hashCode() {
* Map<String, Object> recordsContent = new HashMap<>();
* recordsContent.put("stringField", "Hello, World!");
* rowContent.put("recordField", recordsContent);
* InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId)
* .addRow("rowId", rowContent)
* // More rows can be added in the same RPC by invoking .addRow() on the builder
* .build());
* InsertAllResponse response =
* bigquery.insertAll(
* InsertAllRequest.newBuilder(tableId)
* .addRow("rowId", rowContent)
* // More rows can be added in the same RPC by invoking .addRow() on the builder
* .build());
* if (response.hasErrors()) {
* // If any of the insertions failed, this lets you inspect the errors
* for (Entry<Long, List<BigQueryError>> entry : response.getInsertErrors().entrySet()) {
Expand Down Expand Up @@ -936,8 +956,7 @@ public int hashCode() {
* String tableName = "my_table_name";
* Schema schema = ...;
* String field = "field";
* TableResult tableData =
* bigquery.listTableData(datasetName, tableName, schema);
* TableResult tableData = bigquery.listTableData(datasetName, tableName, schema);
* for (FieldValueList row : tableData.iterateAll()) {
* row.get(field);
* }
Expand Down Expand Up @@ -1083,10 +1102,8 @@ TableResult listTableData(
* <p>Example of running a query.
* <pre> {@code
* // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
* String query =
* "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
* QueryJobConfiguration queryConfig =
* QueryJobConfiguration.newBuilder(query).build();
* String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
* QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
*
* // Print the results.
* for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
Expand Down Expand Up @@ -1145,9 +1162,7 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
* String csvData = "StringValue1\nStringValue2\n";
* TableId tableId = TableId.of(datasetName, tableName);
* WriteChannelConfiguration writeChannelConfiguration =
* WriteChannelConfiguration.newBuilder(tableId)
* .setFormatOptions(FormatOptions.csv())
* .build();
* WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
* TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
* // Write data to writer
* try {
Expand All @@ -1170,9 +1185,7 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
* String location = "us";
* TableId tableId = TableId.of(datasetName, tableName);
* WriteChannelConfiguration writeChannelConfiguration =
* WriteChannelConfiguration.newBuilder(tableId)
* .setFormatOptions(FormatOptions.csv())
* .build();
* WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
* // The location must be specified; other fields can be auto-detected.
* JobId jobId = JobId.newBuilder().setLocation(location).build();
* TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@
import com.google.cloud.bigquery.TableInfo;
import com.google.cloud.bigquery.TableResult;
import com.google.cloud.bigquery.WriteChannelConfiguration;
import org.joda.time.DateTime;

import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
Expand Down Expand Up @@ -123,13 +125,35 @@ public Dataset updateDataset(String datasetName, String newDescription) {
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
// [VARIABLE "new_description"]
public Table updateTable(String datasetName, String tableName, String newDescription) {
public Table updateTableDescription(String datasetName, String tableName, String newDescription) {
// [START bigquery_update_table_description]
Table oldTable = bigquery.getTable(datasetName, tableName);
TableInfo tableInfo = oldTable.toBuilder().setDescription(newDescription).build();
Table newTable = bigquery.update(tableInfo);
Table beforeTable = bigquery.getTable(datasetName, tableName);
TableInfo tableInfo = beforeTable.toBuilder()
.setDescription(newDescription)
.build();
Table afterTable = bigquery.update(tableInfo);
// [END bigquery_update_table_description]
return newTable;
return afterTable;
}

/**
* Example of updating a table by changing its expiration.
*/
// [TARGET update(TableInfo, TableOption...)]
// [VARIABLE "my_dataset_name"]
// [VARIABLE "my_table_name"]
public Table updateTableExpiration(String datasetName, String tableName) {
// [START bigquery_update_table_expiration]
Table beforeTable = bigquery.getTable(datasetName, tableName);

// Set table to expire 5 days from now.
long expirationMillis = DateTime.now().plusDays(5).getMillis();
TableInfo tableInfo = beforeTable.toBuilder()
.setExpirationTime(expirationMillis)
.build();
Table afterTable = bigquery.update(tableInfo);
// [END bigquery_update_table_expiration]
return afterTable;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package com.google.cloud.examples.bigquery.snippets;

import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.CopyJobConfiguration;
import com.google.cloud.bigquery.FieldValue;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.FormatOptions;
Expand All @@ -27,9 +28,15 @@
import com.google.cloud.bigquery.QueryParameterValue;
import com.google.cloud.bigquery.StandardTableDefinition;
import com.google.cloud.bigquery.TableId;

import java.util.Arrays;
import java.util.concurrent.TimeoutException;

import com.google.cloud.bigquery.TableResult;
import com.google.datastore.v1.Query;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Instant;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;

Expand Down Expand Up @@ -309,4 +316,84 @@ public void loadTableGcsParquet(String datasetName) throws InterruptedException
System.out.printf("Loaded %d rows.\n", destinationTable.getNumRows());
// [END bigquery_load_table_gcs_parquet]
}

private void generateTableWithDdl(String datasetId, String tableId) throws InterruptedException {
String sql = String.format(
"CREATE TABLE %s.%s " +
"AS " +
"SELECT " +
"2000 + CAST(18 * RAND() as INT64) AS year, " +
"IF(RAND() > 0.5,\"foo\",\"bar\") AS token " +
"FROM " +
"UNNEST(GENERATE_ARRAY(0,5,1)) AS r", datasetId, tableId);
Job job = bigquery.create(JobInfo.of(QueryJobConfiguration.newBuilder(sql).build()));
job.waitFor();
}

/**
* Example of copying multiple tables to a destination.
*/
public void copyTables(String datasetId, String destinationTableId) throws InterruptedException {
generateTableWithDdl(datasetId, "table1");
generateTableWithDdl(datasetId, "table2");

// [START bigquery_copy_table_multiple_source]
TableId destinationTable = TableId.of(datasetId, destinationTableId);
CopyJobConfiguration configuration =
CopyJobConfiguration.newBuilder(
destinationTable,
Arrays.asList(
TableId.of(datasetId, "table1"),
TableId.of(datasetId, "table2")))
.build();

// Copy the tables.
Job job = bigquery.create(JobInfo.of(configuration));
job = job.waitFor();

// Check the table
StandardTableDefinition table = bigquery.getTable(destinationTable).getDefinition();
System.out.println("State: " + job.getStatus().getState());
System.out.printf("Copied %d rows.\n", table.getNumRows());
// [END bigquery_copy_table_multiple_source]
}

/**
* Example of undeleting a table.
*/
public void undeleteTable(String datasetId) throws InterruptedException {
generateTableWithDdl(datasetId, "oops_undelete_me");

// [START bigquery_undelete_table]
// String datasetId = "my_dataset";
String tableId = "oops_undelete_me";

// Record the current time. We'll use this as the snapshot time
// for recovering the table.
long snapTime = Instant.now().getMillis();

// "Accidentally" delete the table.
bigquery.delete(TableId.of(datasetId, tableId));

// Construct the restore-from tableID using a snapshot decorator.
String snapshotTableId = String.format("%s@%d", tableId, snapTime);
// Choose a new table ID for the recovered table data.
String recoverTableId = String.format("%s_recovered", tableId);

// Construct and run a copy job.
CopyJobConfiguration configuration =
CopyJobConfiguration.newBuilder(
TableId.of(datasetId, recoverTableId),
TableId.of(datasetId, snapshotTableId))
.build();
Job job = bigquery.create(JobInfo.of(configuration));
job = job.waitFor();

// Check the table
StandardTableDefinition table = bigquery.getTable(
TableId.of(datasetId, recoverTableId)).getDefinition();
System.out.println("State: " + job.getStatus().getState());
System.out.printf("Recovered %d rows.\n", table.getNumRows());
// [END bigquery_undelete_table]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,9 @@ public void testCreateGetAndDeleteTable() throws InterruptedException {
TableId tableId = TableId.of(bigquery.getOptions().getProjectId(), DATASET, tableName);
assertEquals(
tableId, bigquerySnippets.getTable(tableId.getDataset(), tableId.getTable()).getTableId());
assertNotNull(bigquerySnippets.updateTable(DATASET, tableName, "new description"));
assertNotNull(bigquerySnippets.updateTableDescription(DATASET, tableName, "new description"));
table = bigquerySnippets.updateTableExpiration(DATASET, tableName);
assertNotNull(table.getExpirationTime());
assertEquals(
"new description",
bigquerySnippets
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,4 +127,18 @@ public void testLoadTableGcsParquet() throws InterruptedException {
assertTrue(got.contains("DONE"));
assertTrue(got.contains("Loaded 50 rows."));
}

@Test
public void testCopyTables() throws InterruptedException {
cloudSnippets.copyTables(DATASET, "copytablesdestination");
String got = bout.toString();
assertTrue(got.contains("DONE"));
}

@Test
public void testUndeleteTable() throws InterruptedException {
cloudSnippets.undeleteTable(DATASET);
String got = bout.toString();
assertTrue(got.contains("DONE"));
}
}