Consolidate BigQuery handling into one place

I'm writing a follow-up CL that will send integrity checking data to
BigQuery, and that is made a lot easier by centralizing the BigQuery
connection logic.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=119375766
This commit is contained in:
mcilwain 2016-04-08 08:44:04 -07:00 committed by Justine Tunney
parent c880a042a7
commit 755fce9e52
12 changed files with 329 additions and 347 deletions

View file

@ -16,6 +16,7 @@ java_library(
"//java/com/google/api/client/http", "//java/com/google/api/client/http",
"//java/com/google/api/client/json", "//java/com/google/api/client/json",
"//java/com/google/api/client/json/jackson2", "//java/com/google/api/client/json/jackson2",
"//java/com/google/common/annotations",
"//java/com/google/common/base", "//java/com/google/common/base",
"//java/com/google/common/collect", "//java/com/google/common/collect",
"//java/com/google/common/io", "//java/com/google/common/io",

View file

@ -14,13 +14,52 @@
package com.google.domain.registry.bigquery; package com.google.domain.registry.bigquery;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.domain.registry.bigquery.BigquerySchemas.knownTableSchemas;
import com.google.api.client.extensions.appengine.http.UrlFetchTransport;
import com.google.api.client.googleapis.extensions.appengine.auth.oauth2.AppIdentityCredential;
import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport; import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory; import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.bigquery.Bigquery; import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.BigqueryScopes;
import com.google.api.services.bigquery.model.Dataset;
import com.google.api.services.bigquery.model.DatasetReference;
import com.google.api.services.bigquery.model.Table;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.api.services.bigquery.model.TableReference;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.domain.registry.util.FormattingLogger;
import com.google.domain.registry.util.NonFinalForTesting;
/** Factory for returning {@link Bigquery} instances. */ import java.io.IOException;
import java.util.Set;
/** Factory for creating {@link Bigquery} connections. */
public class BigqueryFactory { public class BigqueryFactory {
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
// Cross-request caches to avoid unnecessary RPCs.
@NonFinalForTesting
private Set<String> knownTables = Sets.newConcurrentHashSet();
@NonFinalForTesting
private Set<String> datasets = Sets.newConcurrentHashSet();
@NonFinalForTesting
@VisibleForTesting
Subfactory subfactory = new Subfactory();
/** This class is broken out solely so that it can be mocked inside of tests. */
static class Subfactory {
public Bigquery create( public Bigquery create(
String applicationName, String applicationName,
HttpTransport transport, HttpTransport transport,
@ -30,4 +69,99 @@ public class BigqueryFactory {
.setApplicationName(applicationName) .setApplicationName(applicationName)
.build(); .build();
} }
}
/** Returns a new connection to BigQuery. */
public Bigquery create(
String applicationName,
HttpTransport transport,
JsonFactory jsonFactory,
HttpRequestInitializer httpRequestInitializer) {
return subfactory.create(applicationName, transport, jsonFactory, httpRequestInitializer);
}
/**
* Returns a new connection to Bigquery, first ensuring that the given dataset exists in the
* project with the given id, creating it if required.
*/
public Bigquery create(String projectId, String datasetId) throws IOException {
Bigquery bigquery = create(
getClass().getSimpleName(),
new UrlFetchTransport(),
new JacksonFactory(),
new AppIdentityCredential(BigqueryScopes.all()));
// Note: it's safe for multiple threads to call this as the dataset will only be created once.
if (!datasets.contains(datasetId)) {
ensureDataset(bigquery, projectId, datasetId);
datasets.add(datasetId);
}
return bigquery;
}
/**
* Returns a new connection to Bigquery, first ensuring that the given dataset and table exist in
* project with the given id, creating them if required.
*/
public Bigquery create(String projectId, String datasetId, String tableId)
throws IOException {
Bigquery bigquery = create(projectId, datasetId);
checkArgument(knownTableSchemas.containsKey(tableId), "Unknown table ID: %s", tableId);
if (!knownTables.contains(tableId)) {
ensureTable(
bigquery,
new TableReference()
.setDatasetId(datasetId)
.setProjectId(projectId)
.setTableId(tableId),
knownTableSchemas.get(tableId));
knownTables.add(tableId);
}
return bigquery;
}
/**
* Ensures the dataset exists by trying to create it. Note that it's not appreciably cheaper
* to check for dataset existence than it is to try to create it and check for exceptions.
*/
// Note that these are not static so they can be mocked for testing.
private void ensureDataset(Bigquery bigquery, String projectId, String datasetId)
throws IOException {
try {
bigquery.datasets()
.insert(projectId,
new Dataset().setDatasetReference(
new DatasetReference()
.setProjectId(projectId)
.setDatasetId(datasetId)))
.execute();
} catch (IOException e) {
// Swallow errors about a duplicate dataset, and throw any other ones.
if (!BigqueryJobFailureException.create(e).getReason().equals("duplicate")) {
throw e;
}
}
}
/** Ensures the table exists in Bigquery. */
private void ensureTable(
Bigquery bigquery, TableReference table, ImmutableList<TableFieldSchema> schema)
throws IOException {
try {
bigquery.tables().insert(table.getProjectId(), table.getDatasetId(), new Table()
.setSchema(new TableSchema().setFields(schema))
.setTableReference(table))
.execute();
logger.infofmt("Created BigQuery table %s:%s.%s", table.getProjectId(), table.getDatasetId(),
table.getTableId());
} catch (IOException e) {
// Swallow errors about a table that exists, and throw any other ones.
if (!BigqueryJobFailureException.create(e).getReason().equals("duplicate")) {
throw e;
}
}
}
} }

View file

@ -1,75 +0,0 @@
// Copyright 2016 The Domain Registry Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.domain.registry.bigquery;
import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.model.Dataset;
import com.google.api.services.bigquery.model.DatasetReference;
import com.google.api.services.bigquery.model.Table;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.api.services.bigquery.model.TableReference;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.common.collect.ImmutableList;
import com.google.domain.registry.util.FormattingLogger;
import java.io.IOException;
/** Helpers for Bigquery. */
public class BigqueryHelper {
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
/**
* Ensures the dataset exists by trying to create it. Note that it's not appreciably cheaper
* to check for dataset existence than it is to try to create it and check for exceptions.
*/
// Note that these are not static so they can be mocked for testing.
public void ensureDataset(Bigquery bigquery, String projectId, String datasetId)
throws IOException {
try {
bigquery.datasets()
.insert(projectId,
new Dataset().setDatasetReference(
new DatasetReference()
.setProjectId(projectId)
.setDatasetId(datasetId)))
.execute();
} catch (IOException e) {
// Swallow errors about a duplicate dataset, and throw any other ones.
if (!BigqueryJobFailureException.create(e).getReason().equals("duplicate")) {
throw e;
}
}
}
/** Ensures the table exists in Bigquery. */
public void ensureTable(Bigquery bigquery, TableReference table,
ImmutableList<TableFieldSchema> schema) throws IOException {
try {
bigquery.tables().insert(table.getProjectId(), table.getDatasetId(), new Table()
.setSchema(new TableSchema().setFields(schema))
.setTableReference(table))
.execute();
logger.infofmt("Created BigQuery table %s:%s.%s", table.getProjectId(), table.getDatasetId(),
table.getTableId());
} catch (IOException e) {
// Swallow errors about a table that exists, and throw any other ones.
if (!BigqueryJobFailureException.create(e).getReason().equals("duplicate")) {
throw e;
}
}
}
}

View file

@ -0,0 +1,47 @@
// Copyright 2016 The Domain Registry Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.domain.registry.bigquery;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.domain.registry.bigquery.BigqueryUtils.FieldType;
import com.google.domain.registry.util.NonFinalForTesting;
import java.util.Map;
/** Schemas for BigQuery tables. */
public final class BigquerySchemas {
static final ImmutableList<TableFieldSchema> EPPMETRICS_SCHEMA_FIELDS =
ImmutableList.<TableFieldSchema>of(
new TableFieldSchema().setName("requestId").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("startTime").setType(FieldType.TIMESTAMP.name()),
new TableFieldSchema().setName("endTime").setType(FieldType.TIMESTAMP.name()),
new TableFieldSchema().setName("commandName").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("clientId").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("privilegeLevel").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("eppTarget").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("eppStatus").setType(FieldType.INTEGER.name()),
new TableFieldSchema().setName("attempts").setType(FieldType.INTEGER.name()));
public static final String EPPMETRICS_TABLE_ID = "eppMetrics";
@NonFinalForTesting
static Map<String, ImmutableList<TableFieldSchema>> knownTableSchemas =
ImmutableMap.of(EPPMETRICS_TABLE_ID, EPPMETRICS_SCHEMA_FIELDS);
private BigquerySchemas() {}
}

View file

@ -1,148 +0,0 @@
// Copyright 2016 The Domain Registry Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.domain.registry.export;
import static com.google.common.base.CaseFormat.LOWER_UNDERSCORE;
import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.common.base.Function;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.domain.registry.bigquery.BigqueryUtils.FieldMode;
import com.google.domain.registry.bigquery.BigqueryUtils.FieldType;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
/** Helper class which acts as a container for Bigquery table schemas. */
class BigquerySchemas {
private BigquerySchemas() {}
/** The fields in the "Logs" table. */
enum LogsTableField {
// These fields appear in nested APP_LOG_LINES records.
LOG_LEVEL(FieldType.STRING, FieldMode.NULLABLE),
LOG_MESSAGE(FieldType.STRING, FieldMode.NULLABLE),
TIME(FieldType.TIMESTAMP, FieldMode.NULLABLE),
// These fields appear in records at top level of the table.
START_TIME(FieldType.TIMESTAMP, FieldMode.NULLABLE),
END_TIME(FieldType.TIMESTAMP, FieldMode.NULLABLE),
RELEASE(FieldType.STRING, FieldMode.NULLABLE),
APPID(FieldType.STRING, FieldMode.NULLABLE),
COST(FieldType.FLOAT, FieldMode.NULLABLE),
EPP_CLIENT_ID(FieldType.STRING, FieldMode.NULLABLE),
EPP_COMMAND(FieldType.STRING, FieldMode.NULLABLE),
EPP_RESULT(FieldType.BOOLEAN, FieldMode.NULLABLE),
EPP_TARGET(FieldType.STRING, FieldMode.REPEATED),
EPP_TLD(FieldType.STRING, FieldMode.NULLABLE),
HOST(FieldType.STRING, FieldMode.NULLABLE),
HTTP_VERSION(FieldType.STRING, FieldMode.NULLABLE),
INSTANCE_KEY(FieldType.STRING, FieldMode.NULLABLE),
IP(FieldType.STRING, FieldMode.NULLABLE),
LATENCY_USEC(FieldType.INTEGER, FieldMode.NULLABLE),
MCYCLES(FieldType.INTEGER, FieldMode.NULLABLE),
METHOD(FieldType.STRING, FieldMode.NULLABLE),
MODULE_ID(FieldType.STRING, FieldMode.NULLABLE),
NICKNAME(FieldType.STRING, FieldMode.NULLABLE),
OFFSET(FieldType.STRING, FieldMode.NULLABLE),
PENDING_TIME_USEC(FieldType.INTEGER, FieldMode.NULLABLE),
REFERRER(FieldType.STRING, FieldMode.NULLABLE),
REPLICA_INDEX(FieldType.INTEGER, FieldMode.NULLABLE),
REQUEST_ID(FieldType.STRING, FieldMode.NULLABLE),
RESOURCE(FieldType.STRING, FieldMode.NULLABLE),
RESPONSE_SIZE(FieldType.INTEGER, FieldMode.NULLABLE),
STATUS(FieldType.INTEGER, FieldMode.NULLABLE),
TASK_NAME(FieldType.STRING, FieldMode.NULLABLE),
TASK_QUEUE_NAME(FieldType.STRING, FieldMode.NULLABLE),
URL_MAP_ENTRY(FieldType.STRING, FieldMode.NULLABLE),
USER_AGENT(FieldType.STRING, FieldMode.NULLABLE),
VERSION_ID(FieldType.STRING, FieldMode.NULLABLE),
APP_LOG_LINES(FieldType.RECORD, FieldMode.REPEATED,
ImmutableList.of(LOG_LEVEL, LOG_MESSAGE, TIME));
private final FieldType fieldType;
private final FieldMode fieldMode;
private final ImmutableList<LogsTableField> childFields;
LogsTableField(FieldType fieldType, FieldMode fieldMode) {
this(fieldType, fieldMode, ImmutableList.<LogsTableField>of());
}
LogsTableField(
FieldType fieldType, FieldMode fieldMode, ImmutableList<LogsTableField> childFields) {
this.fieldType = checkNotNull(fieldType);
this.fieldMode = checkNotNull(fieldMode);
this.childFields = checkNotNull(childFields);
}
/** Return the name of the field as it should appear in the Bigquery schema. */
String schemaName() {
return UPPER_UNDERSCORE.to(LOWER_UNDERSCORE, name());
}
/** Return the {@link TableFieldSchema} of this field for use in a Bigquery table. */
private TableFieldSchema getTableFieldSchema() {
TableFieldSchema tableFieldSchema = new TableFieldSchema()
.setName(schemaName())
.setType(fieldType.schemaName())
.setMode(fieldMode.schemaName());
if (!childFields.isEmpty()) {
tableFieldSchema.setFields(getSchema(childFields));
}
return tableFieldSchema;
}
/**
* Return the schema of a list of {@link TableFieldSchema} objects for use in a Bigquery table.
*/
private static List<TableFieldSchema> getSchema(Iterable<LogsTableField> fields) {
return FluentIterable.from(fields)
.transform(new Function<LogsTableField, TableFieldSchema>() {
@Override
public TableFieldSchema apply(LogsTableField field) {
return field.getTableFieldSchema();
}})
.toList();
}
/**
* Return the schema of this table for use in a Bigquery table.
*/
static List<TableFieldSchema> getTableSchema() {
List<LogsTableField> allFields = Arrays.asList(LogsTableField.values());
// Collect the list of all child fields so we can exclude them from the list of fields at the
// top level of the schema.
Set<LogsTableField> childFields = FluentIterable.from(allFields)
.transformAndConcat(new Function<LogsTableField, List<LogsTableField>>() {
@Override
public List<LogsTableField> apply(LogsTableField field) {
return field.childFields;
}})
.toSet();
Set<LogsTableField> topLevelFields =
Sets.difference(ImmutableSet.copyOf(allFields), childFields);
return getSchema(topLevelFields);
}
}
}

View file

@ -21,12 +21,8 @@ import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR; import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
import static javax.servlet.http.HttpServletResponse.SC_OK; import static javax.servlet.http.HttpServletResponse.SC_OK;
import com.google.api.client.extensions.appengine.http.UrlFetchTransport;
import com.google.api.client.googleapis.extensions.appengine.auth.oauth2.AppIdentityCredential;
import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.bigquery.Bigquery; import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.BigqueryScopes;
import com.google.api.services.bigquery.model.Table; import com.google.api.services.bigquery.model.Table;
import com.google.api.services.bigquery.model.TableReference; import com.google.api.services.bigquery.model.TableReference;
import com.google.api.services.bigquery.model.ViewDefinition; import com.google.api.services.bigquery.model.ViewDefinition;
@ -34,7 +30,6 @@ import com.google.appengine.api.taskqueue.TaskOptions;
import com.google.appengine.api.taskqueue.TaskOptions.Method; import com.google.appengine.api.taskqueue.TaskOptions.Method;
import com.google.common.net.MediaType; import com.google.common.net.MediaType;
import com.google.domain.registry.bigquery.BigqueryFactory; import com.google.domain.registry.bigquery.BigqueryFactory;
import com.google.domain.registry.bigquery.BigqueryHelper;
import com.google.domain.registry.config.RegistryEnvironment; import com.google.domain.registry.config.RegistryEnvironment;
import com.google.domain.registry.util.FormattingLogger; import com.google.domain.registry.util.FormattingLogger;
import com.google.domain.registry.util.NonFinalForTesting; import com.google.domain.registry.util.NonFinalForTesting;
@ -62,8 +57,6 @@ public class UpdateSnapshotViewServlet extends HttpServlet {
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass(); private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
private static final BigqueryHelper bigqueryHelper = new BigqueryHelper();
@NonFinalForTesting @NonFinalForTesting
private static BigqueryFactory bigqueryFactory = new BigqueryFactory(); private static BigqueryFactory bigqueryFactory = new BigqueryFactory();
@ -99,15 +92,9 @@ public class UpdateSnapshotViewServlet extends HttpServlet {
private String updateSnapshotView(String datasetId, String tableId, String kindName) private String updateSnapshotView(String datasetId, String tableId, String kindName)
throws IOException { throws IOException {
Bigquery bigquery = bigqueryFactory.create(
getClass().getSimpleName(),
new UrlFetchTransport(),
new JacksonFactory(),
new AppIdentityCredential(BigqueryScopes.all()));
String projectId = ENVIRONMENT.config().getProjectId(); String projectId = ENVIRONMENT.config().getProjectId();
Bigquery bigquery =
bigqueryHelper.ensureDataset( bigqueryFactory.create(projectId, ENVIRONMENT.config().getLatestSnapshotDataset());
bigquery, projectId, ENVIRONMENT.config().getLatestSnapshotDataset());
updateTable(bigquery, new Table() updateTable(bigquery, new Table()
.setTableReference(new TableReference() .setTableReference(new TableReference()

View file

@ -14,30 +14,14 @@
package com.google.domain.registry.monitoring.whitebox; package com.google.domain.registry.monitoring.whitebox;
import com.google.api.services.bigquery.model.TableFieldSchema; import com.google.domain.registry.bigquery.BigquerySchemas;
import com.google.common.collect.ImmutableList;
import com.google.domain.registry.bigquery.BigqueryUtils.FieldType;
import com.google.domain.registry.model.eppoutput.Result.Code; import com.google.domain.registry.model.eppoutput.Result.Code;
/** The EPP Metrics collector. See {@link Metrics}. */ /** The EPP Metrics collector. See {@link Metrics}. */
public class EppMetrics extends Metrics { public class EppMetrics extends Metrics {
public static final ImmutableList<TableFieldSchema> SCHEMA_FIELDS =
ImmutableList.<TableFieldSchema>of(
new TableFieldSchema().setName("requestId").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("startTime").setType(FieldType.TIMESTAMP.name()),
new TableFieldSchema().setName("endTime").setType(FieldType.TIMESTAMP.name()),
new TableFieldSchema().setName("commandName").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("clientId").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("privilegeLevel").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("eppTarget").setType(FieldType.STRING.name()),
new TableFieldSchema().setName("eppStatus").setType(FieldType.INTEGER.name()),
new TableFieldSchema().setName("attempts").setType(FieldType.INTEGER.name()));
public static final String TABLE_ID = "eppMetrics";
public EppMetrics() { public EppMetrics() {
setTableId(TABLE_ID); setTableId(BigquerySchemas.EPPMETRICS_TABLE_ID);
fields.put("attempts", 0); fields.put("attempts", 0);
} }

View file

@ -14,19 +14,12 @@
package com.google.domain.registry.monitoring.whitebox; package com.google.domain.registry.monitoring.whitebox;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.domain.registry.util.HttpServletUtils.getRequiredParameterValue; import static com.google.domain.registry.util.HttpServletUtils.getRequiredParameterValue;
import com.google.api.client.extensions.appengine.http.UrlFetchTransport;
import com.google.api.client.googleapis.extensions.appengine.auth.oauth2.AppIdentityCredential;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.bigquery.Bigquery; import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.BigqueryScopes;
import com.google.api.services.bigquery.model.TableDataInsertAllRequest; import com.google.api.services.bigquery.model.TableDataInsertAllRequest;
import com.google.api.services.bigquery.model.TableDataInsertAllResponse; import com.google.api.services.bigquery.model.TableDataInsertAllResponse;
import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.api.services.bigquery.model.TableReference;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.collect.FluentIterable; import com.google.common.collect.FluentIterable;
@ -35,13 +28,11 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.domain.registry.bigquery.BigqueryFactory; import com.google.domain.registry.bigquery.BigqueryFactory;
import com.google.domain.registry.bigquery.BigqueryHelper;
import com.google.domain.registry.config.RegistryEnvironment; import com.google.domain.registry.config.RegistryEnvironment;
import com.google.domain.registry.util.FormattingLogger; import com.google.domain.registry.util.FormattingLogger;
import com.google.domain.registry.util.NonFinalForTesting; import com.google.domain.registry.util.NonFinalForTesting;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
import java.util.Set; import java.util.Set;
import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServlet;
@ -59,25 +50,11 @@ public class MetricsTaskServlet extends HttpServlet {
private static final Set<String> SPECIAL_PARAMS = ImmutableSet.of("tableId", "insertId"); private static final Set<String> SPECIAL_PARAMS = ImmutableSet.of("tableId", "insertId");
// Add any concrete Metric classes to this map or doPost() will throw IllegalArgumentException.
private static final Map<String, ImmutableList<TableFieldSchema>> KNOWN_TABLE_SCHEMAS =
ImmutableMap.of(EppMetrics.TABLE_ID, EppMetrics.SCHEMA_FIELDS);
// servlet level cross-request caches to avoid unnecessary RPCs.
@NonFinalForTesting
private static Set<String> knownTables = Sets.newConcurrentHashSet();
@NonFinalForTesting
private static Set<String> datasets = Sets.newConcurrentHashSet();
@NonFinalForTesting @NonFinalForTesting
private static BigqueryFactory bigqueryFactory = new BigqueryFactory(); private static BigqueryFactory bigqueryFactory = new BigqueryFactory();
@NonFinalForTesting
private static BigqueryHelper bigqueryHelper = new BigqueryHelper();
/** Returns a filtered {@link ImmutableMap} from an {@link HttpServletRequest} */ /** Returns a filtered {@link ImmutableMap} from an {@link HttpServletRequest} */
private static ImmutableMap<String, Object> getFiteredMapFromRequest( private static ImmutableMap<String, Object> getFilteredMapFromRequest(
HttpServletRequest req, HttpServletRequest req,
Set<String> filter) { Set<String> filter) {
ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>(); ImmutableMap.Builder<String, Object> b = new ImmutableMap.Builder<>();
@ -97,35 +74,8 @@ public class MetricsTaskServlet extends HttpServlet {
public void doPost(HttpServletRequest req, HttpServletResponse rsp) throws IOException { public void doPost(HttpServletRequest req, HttpServletResponse rsp) throws IOException {
try { try {
final String tableId = getRequiredParameterValue(req, "tableId"); final String tableId = getRequiredParameterValue(req, "tableId");
ImmutableMap<String, Object> fields = getFiteredMapFromRequest(req, SPECIAL_PARAMS); ImmutableMap<String, Object> fields = getFilteredMapFromRequest(req, SPECIAL_PARAMS);
Bigquery bigquery = bigqueryFactory.create(PROJECT_ID, DATASET_ID, tableId);
final Bigquery bigquery = bigqueryFactory.create(
getClass().getSimpleName(),
new UrlFetchTransport(),
new JacksonFactory(),
new AppIdentityCredential(BigqueryScopes.all()));
// Note: it's safe for multiple threads to call this as the dataset will
// only be created once.
if (!datasets.contains(DATASET_ID)) {
bigqueryHelper.ensureDataset(bigquery, PROJECT_ID, DATASET_ID);
datasets.add(DATASET_ID);
}
checkArgument(KNOWN_TABLE_SCHEMAS.containsKey(tableId), "Unknown table ID: %s", tableId);
if (!knownTables.contains(tableId)) {
bigqueryHelper.ensureTable(
bigquery,
new TableReference()
.setDatasetId(DATASET_ID)
.setProjectId(PROJECT_ID)
.setTableId(tableId),
KNOWN_TABLE_SCHEMAS.get(tableId));
knownTables.add(tableId);
}
TableDataInsertAllResponse response = bigquery.tabledata() TableDataInsertAllResponse response = bigquery.tabledata()
.insertAll( .insertAll(

View file

@ -11,12 +11,16 @@ java_library(
resources = glob(["testdata/*"]), resources = glob(["testdata/*"]),
deps = [ deps = [
"//apiserving/discoverydata/bigquery:bigqueryv2", "//apiserving/discoverydata/bigquery:bigqueryv2",
"//java/com/google/api/client/http",
"//java/com/google/api/client/json",
"//java/com/google/common/collect",
"//java/com/google/domain/registry/bigquery", "//java/com/google/domain/registry/bigquery",
"//java/com/google/domain/registry/util", "//java/com/google/domain/registry/util",
"//javatests/com/google/domain/registry/testing", "//javatests/com/google/domain/registry/testing",
"//third_party/java/joda_time", "//third_party/java/joda_time",
"//third_party/java/jsr305_annotations", "//third_party/java/jsr305_annotations",
"//third_party/java/junit", "//third_party/java/junit",
"//third_party/java/mockito",
"//third_party/java/truth", "//third_party/java/truth",
], ],
) )

View file

@ -0,0 +1,129 @@
// Copyright 2016 The Domain Registry Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.domain.registry.bigquery;
import static com.google.common.truth.Truth.assertThat;
import static com.google.domain.registry.bigquery.BigqueryUtils.FieldType.STRING;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.model.Dataset;
import com.google.api.services.bigquery.model.Table;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.api.services.bigquery.model.TableReference;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.domain.registry.testing.InjectRule;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
/** Unit tests for {@link BigqueryFactory}. */
@RunWith(MockitoJUnitRunner.class)
public class BigqueryFactoryTest {
@Rule
public final InjectRule inject = new InjectRule();
@Mock
private BigqueryFactory.Subfactory subfactory;
@Mock
private Bigquery bigquery;
@Mock
private Bigquery.Datasets bigqueryDatasets;
@Mock
private Bigquery.Datasets.Insert bigqueryDatasetsInsert;
@Mock
private Bigquery.Tables bigqueryTables;
@Mock
private Bigquery.Tables.Insert bigqueryTablesInsert;
@Before
public void before() throws Exception {
when(subfactory.create(
anyString(),
any(HttpTransport.class),
any(JsonFactory.class),
any(HttpRequestInitializer.class)))
.thenReturn(bigquery);
when(bigquery.datasets()).thenReturn(bigqueryDatasets);
when(bigqueryDatasets.insert(eq("Project-Id"), any(Dataset.class)))
.thenReturn(bigqueryDatasetsInsert);
when(bigquery.tables()).thenReturn(bigqueryTables);
when(bigqueryTables.insert(eq("Project-Id"), any(String.class), any(Table.class)))
.thenReturn(bigqueryTablesInsert);
BigquerySchemas.knownTableSchemas =
ImmutableMap.of(
"Table-Id",
ImmutableList.of(new TableFieldSchema().setName("column1").setType(STRING.name())));
}
@Test
public void testSuccess_datastoreCreation() throws Exception {
BigqueryFactory factory = new BigqueryFactory();
factory.subfactory = subfactory;
factory.create("Project-Id", "Dataset-Id");
ArgumentCaptor<Dataset> datasetArg = ArgumentCaptor.forClass(Dataset.class);
verify(bigqueryDatasets).insert(eq("Project-Id"), datasetArg.capture());
assertThat(datasetArg.getValue().getDatasetReference().getProjectId())
.isEqualTo("Project-Id");
assertThat(datasetArg.getValue().getDatasetReference().getDatasetId())
.isEqualTo("Dataset-Id");
verify(bigqueryDatasetsInsert).execute();
}
@Test
public void testSuccess_datastoreAndTableCreation() throws Exception {
BigqueryFactory factory = new BigqueryFactory();
factory.subfactory = subfactory;
factory.create("Project-Id", "Dataset-Id", "Table-Id");
ArgumentCaptor<Dataset> datasetArg = ArgumentCaptor.forClass(Dataset.class);
verify(bigqueryDatasets).insert(eq("Project-Id"), datasetArg.capture());
assertThat(datasetArg.getValue().getDatasetReference().getProjectId())
.isEqualTo("Project-Id");
assertThat(datasetArg.getValue().getDatasetReference().getDatasetId())
.isEqualTo("Dataset-Id");
verify(bigqueryDatasetsInsert).execute();
ArgumentCaptor<Table> tableArg = ArgumentCaptor.forClass(Table.class);
verify(bigqueryTables).insert(eq("Project-Id"), eq("Dataset-Id"), tableArg.capture());
TableReference ref = tableArg.getValue().getTableReference();
assertThat(ref.getProjectId()).isEqualTo("Project-Id");
assertThat(ref.getDatasetId()).isEqualTo("Dataset-Id");
assertThat(ref.getTableId()).isEqualTo("Table-Id");
assertThat(tableArg.getValue().getSchema().getFields())
.containsExactly(new TableFieldSchema().setName("column1").setType(STRING.name()));
verify(bigqueryTablesInsert).execute();
}
}

View file

@ -25,9 +25,6 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.services.bigquery.Bigquery; import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.model.Dataset; import com.google.api.services.bigquery.model.Dataset;
import com.google.api.services.bigquery.model.Table; import com.google.api.services.bigquery.model.Table;
@ -107,17 +104,11 @@ public class UpdateSnapshotViewServletTest {
@Before @Before
public void before() throws Exception { public void before() throws Exception {
inject.setStaticField(UpdateSnapshotViewServlet.class, "bigqueryFactory", bigqueryFactory); inject.setStaticField(UpdateSnapshotViewServlet.class, "bigqueryFactory", bigqueryFactory);
when(bigqueryFactory.create(anyString(), anyString())).thenReturn(bigquery);
when(req.getMethod()).thenReturn("POST"); when(req.getMethod()).thenReturn("POST");
when(rsp.getWriter()).thenReturn(new PrintWriter(httpOutput)); when(rsp.getWriter()).thenReturn(new PrintWriter(httpOutput));
when(bigqueryFactory.create(
anyString(),
any(HttpTransport.class),
any(JsonFactory.class),
any(HttpRequestInitializer.class)))
.thenReturn(bigquery);
when(bigquery.datasets()).thenReturn(bigqueryDatasets); when(bigquery.datasets()).thenReturn(bigqueryDatasets);
when(bigqueryDatasets.insert(eq("Project-Id"), any(Dataset.class))) when(bigqueryDatasets.insert(eq("Project-Id"), any(Dataset.class)))
.thenReturn(bigqueryDatasetsInsert); .thenReturn(bigqueryDatasetsInsert);
@ -154,15 +145,6 @@ public class UpdateSnapshotViewServletTest {
servlet.service(req, rsp); servlet.service(req, rsp);
// Check that we attempted to create the latest_snapshot dataset.
ArgumentCaptor<Dataset> datasetArgument = ArgumentCaptor.forClass(Dataset.class);
verify(bigqueryDatasets).insert(eq("Project-Id"), datasetArgument.capture());
assertThat(datasetArgument.getValue().getDatasetReference().getProjectId())
.isEqualTo("Project-Id");
assertThat(datasetArgument.getValue().getDatasetReference().getDatasetId())
.isEqualTo("testdataset");
verify(bigqueryDatasetsInsert).execute();
// Check that we updated the view. // Check that we updated the view.
ArgumentCaptor<Table> tableArg = ArgumentCaptor.forClass(Table.class); ArgumentCaptor<Table> tableArg = ArgumentCaptor.forClass(Table.class);
verify(bigqueryTables).update( verify(bigqueryTables).update(

View file

@ -15,7 +15,6 @@
package com.google.domain.registry.monitoring.whitebox; package com.google.domain.registry.monitoring.whitebox;
import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -28,12 +27,9 @@ import com.google.api.services.bigquery.Bigquery.Tabledata.InsertAll;
import com.google.api.services.bigquery.model.TableDataInsertAllRequest; import com.google.api.services.bigquery.model.TableDataInsertAllRequest;
import com.google.api.services.bigquery.model.TableDataInsertAllResponse; import com.google.api.services.bigquery.model.TableDataInsertAllResponse;
import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.api.services.bigquery.model.TableReference;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.domain.registry.bigquery.BigqueryFactory; import com.google.domain.registry.bigquery.BigqueryFactory;
import com.google.domain.registry.bigquery.BigqueryHelper;
import com.google.domain.registry.testing.AppEngineRule; import com.google.domain.registry.testing.AppEngineRule;
import com.google.domain.registry.testing.InjectRule; import com.google.domain.registry.testing.InjectRule;
@ -73,9 +69,6 @@ public class MetricsTaskServletTest {
@Mock @Mock
BigqueryFactory bigqueryFactory; BigqueryFactory bigqueryFactory;
@Mock
BigqueryHelper bigqueryHelper;
@Mock @Mock
Bigquery bigquery; Bigquery bigquery;
@ -119,6 +112,8 @@ public class MetricsTaskServletTest {
when(rsp.getWriter()).thenReturn(new PrintWriter(httpOutput)); when(rsp.getWriter()).thenReturn(new PrintWriter(httpOutput));
inject.setStaticField(MetricsTaskServlet.class, "bigqueryFactory", bigqueryFactory); inject.setStaticField(MetricsTaskServlet.class, "bigqueryFactory", bigqueryFactory);
when(bigqueryFactory.create(anyString(), anyString(), anyString()))
.thenReturn(bigquery);
when(bigqueryFactory.create( when(bigqueryFactory.create(
anyString(), anyString(),
Matchers.any(HttpTransport.class), Matchers.any(HttpTransport.class),
@ -126,14 +121,6 @@ public class MetricsTaskServletTest {
Matchers.any(HttpRequestInitializer.class))) Matchers.any(HttpRequestInitializer.class)))
.thenReturn(bigquery); .thenReturn(bigquery);
inject.setStaticField(MetricsTaskServlet.class, "bigqueryHelper", bigqueryHelper);
doNothing().when(bigqueryHelper).ensureDataset(Matchers.any(Bigquery.class), anyString(),
anyString());
doNothing().when(bigqueryHelper).ensureTable(
Matchers.any(Bigquery.class),
Matchers.any(TableReference.class),
Matchers.<ImmutableList<TableFieldSchema>>any());
when(bigquery.tabledata()).thenReturn(tabledata); when(bigquery.tabledata()).thenReturn(tabledata);
when(tabledata.insertAll( when(tabledata.insertAll(
anyString(), anyString(),