mirror of
https://github.com/google/nomulus.git
synced 2025-05-15 00:47:11 +02:00
Use @DefaultCredential for Cloud API access in GAE
This change completes the switch to @DefaultCredential for all use cases in GAE. Impacted modules: - IcannReporting - CreateCdnsTld command - LoadSnapshot command. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=213511730
This commit is contained in:
parent
9bcd5579ef
commit
961e5cc7c7
20 changed files with 184 additions and 226 deletions
|
@ -26,12 +26,8 @@ import static google.registry.bigquery.BigqueryUtils.toJobReferenceString;
|
||||||
import static google.registry.config.RegistryConfig.getProjectId;
|
import static google.registry.config.RegistryConfig.getProjectId;
|
||||||
import static org.joda.time.DateTimeZone.UTC;
|
import static org.joda.time.DateTimeZone.UTC;
|
||||||
|
|
||||||
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
|
|
||||||
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
|
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
|
||||||
import com.google.api.client.http.AbstractInputStreamContent;
|
import com.google.api.client.http.AbstractInputStreamContent;
|
||||||
import com.google.api.client.http.HttpRequestInitializer;
|
|
||||||
import com.google.api.client.http.HttpTransport;
|
|
||||||
import com.google.api.client.json.JsonFactory;
|
|
||||||
import com.google.api.services.bigquery.Bigquery;
|
import com.google.api.services.bigquery.Bigquery;
|
||||||
import com.google.api.services.bigquery.model.Dataset;
|
import com.google.api.services.bigquery.model.Dataset;
|
||||||
import com.google.api.services.bigquery.model.DatasetReference;
|
import com.google.api.services.bigquery.model.DatasetReference;
|
||||||
|
@ -51,6 +47,7 @@ import com.google.api.services.bigquery.model.TableFieldSchema;
|
||||||
import com.google.api.services.bigquery.model.TableReference;
|
import com.google.api.services.bigquery.model.TableReference;
|
||||||
import com.google.api.services.bigquery.model.TableRow;
|
import com.google.api.services.bigquery.model.TableRow;
|
||||||
import com.google.api.services.bigquery.model.ViewDefinition;
|
import com.google.api.services.bigquery.model.ViewDefinition;
|
||||||
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableTable;
|
import com.google.common.collect.ImmutableTable;
|
||||||
import com.google.common.flogger.FluentLogger;
|
import com.google.common.flogger.FluentLogger;
|
||||||
|
@ -73,6 +70,7 @@ import java.util.List;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
import javax.inject.Inject;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.Duration;
|
import org.joda.time.Duration;
|
||||||
|
|
||||||
|
@ -96,20 +94,11 @@ public class BigqueryConnection implements AutoCloseable {
|
||||||
private static final Duration TEMP_TABLE_TTL = Duration.standardHours(24);
|
private static final Duration TEMP_TABLE_TTL = Duration.standardHours(24);
|
||||||
|
|
||||||
/** Bigquery client instance wrapped by this class. */
|
/** Bigquery client instance wrapped by this class. */
|
||||||
private Bigquery bigquery;
|
private final Bigquery bigquery;
|
||||||
|
|
||||||
/** Executor service for bigquery jobs. */
|
/** Executor service for bigquery jobs. */
|
||||||
private ListeningExecutorService service;
|
private ListeningExecutorService service;
|
||||||
|
|
||||||
/** Credential object to use for initializing HTTP requests to the bigquery API. */
|
|
||||||
private HttpRequestInitializer credential;
|
|
||||||
|
|
||||||
/** HTTP transport object to use for accessing bigquery API. */
|
|
||||||
private HttpTransport httpTransport;
|
|
||||||
|
|
||||||
/** JSON factory object to use for accessing bigquery API. */
|
|
||||||
private JsonFactory jsonFactory;
|
|
||||||
|
|
||||||
/** Pseudo-randomness source to use for creating random table names. */
|
/** Pseudo-randomness source to use for creating random table names. */
|
||||||
private Random random = new Random();
|
private Random random = new Random();
|
||||||
|
|
||||||
|
@ -122,12 +111,17 @@ public class BigqueryConnection implements AutoCloseable {
|
||||||
/** Duration to wait between polls for job status. */
|
/** Duration to wait between polls for job status. */
|
||||||
private Duration pollInterval = Duration.millis(1000);
|
private Duration pollInterval = Duration.millis(1000);
|
||||||
|
|
||||||
|
BigqueryConnection(Bigquery bigquery) {
|
||||||
|
this.bigquery = bigquery;
|
||||||
|
}
|
||||||
|
|
||||||
/** Builder for a {@link BigqueryConnection}, since the latter is immutable once created. */
|
/** Builder for a {@link BigqueryConnection}, since the latter is immutable once created. */
|
||||||
public static class Builder {
|
public static class Builder {
|
||||||
private BigqueryConnection instance;
|
private BigqueryConnection instance;
|
||||||
|
|
||||||
public Builder() {
|
@Inject
|
||||||
instance = new BigqueryConnection();
|
Builder(Bigquery bigquery) {
|
||||||
|
instance = new BigqueryConnection(bigquery);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -139,13 +133,6 @@ public class BigqueryConnection implements AutoCloseable {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder setCredential(GoogleCredential credential) {
|
|
||||||
instance.credential = checkNotNull(credential);
|
|
||||||
instance.httpTransport = credential.getTransport();
|
|
||||||
instance.jsonFactory = credential.getJsonFactory();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder setDatasetId(String datasetId) {
|
public Builder setDatasetId(String datasetId) {
|
||||||
instance.datasetId = checkNotNull(datasetId);
|
instance.datasetId = checkNotNull(datasetId);
|
||||||
return this;
|
return this;
|
||||||
|
@ -167,7 +154,11 @@ public class BigqueryConnection implements AutoCloseable {
|
||||||
public BigqueryConnection build() {
|
public BigqueryConnection build() {
|
||||||
try {
|
try {
|
||||||
checkNotNull(instance.service, "Must provide executor service");
|
checkNotNull(instance.service, "Must provide executor service");
|
||||||
|
instance.initialize();
|
||||||
return instance;
|
return instance;
|
||||||
|
} catch (Throwable e) {
|
||||||
|
Throwables.throwIfUnchecked(e);
|
||||||
|
throw new RuntimeException("Cannot initialize BigqueryConnection", e);
|
||||||
} finally {
|
} finally {
|
||||||
// Clear the internal instance so you can't accidentally mutate it through this builder.
|
// Clear the internal instance so you can't accidentally mutate it through this builder.
|
||||||
instance = null;
|
instance = null;
|
||||||
|
@ -306,13 +297,10 @@ public class BigqueryConnection implements AutoCloseable {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initializes the BigqueryConnection object by setting up the API client and creating the
|
* Initializes the BigqueryConnection object by setting up the API client and creating the default
|
||||||
* default dataset if it doesn't exist.
|
* dataset if it doesn't exist.
|
||||||
*/
|
*/
|
||||||
public BigqueryConnection initialize() throws Exception {
|
private BigqueryConnection initialize() throws Exception {
|
||||||
bigquery = new Bigquery.Builder(httpTransport, jsonFactory, credential)
|
|
||||||
.setApplicationName(getClass().getSimpleName())
|
|
||||||
.build();
|
|
||||||
createDatasetIfNeeded(datasetId);
|
createDatasetIfNeeded(datasetId);
|
||||||
createDatasetIfNeeded(TEMP_DATASET_NAME);
|
createDatasetIfNeeded(TEMP_DATASET_NAME);
|
||||||
return this;
|
return this;
|
||||||
|
|
|
@ -14,20 +14,16 @@
|
||||||
|
|
||||||
package google.registry.bigquery;
|
package google.registry.bigquery;
|
||||||
|
|
||||||
import com.google.api.client.http.HttpRequestInitializer;
|
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
|
||||||
import com.google.api.client.http.HttpTransport;
|
|
||||||
import com.google.api.client.json.JsonFactory;
|
|
||||||
import com.google.api.services.bigquery.Bigquery;
|
import com.google.api.services.bigquery.Bigquery;
|
||||||
import com.google.api.services.bigquery.BigqueryScopes;
|
|
||||||
import com.google.api.services.bigquery.model.TableFieldSchema;
|
import com.google.api.services.bigquery.model.TableFieldSchema;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import dagger.Module;
|
import dagger.Module;
|
||||||
import dagger.Provides;
|
import dagger.Provides;
|
||||||
import dagger.multibindings.Multibinds;
|
import dagger.multibindings.Multibinds;
|
||||||
|
import google.registry.config.CredentialModule.DefaultCredential;
|
||||||
import google.registry.config.RegistryConfig.Config;
|
import google.registry.config.RegistryConfig.Config;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.function.Function;
|
|
||||||
|
|
||||||
/** Dagger module for Google {@link Bigquery} connection objects. */
|
/** Dagger module for Google {@link Bigquery} connection objects. */
|
||||||
@Module
|
@Module
|
||||||
|
@ -39,11 +35,8 @@ public abstract class BigqueryModule {
|
||||||
|
|
||||||
@Provides
|
@Provides
|
||||||
static Bigquery provideBigquery(
|
static Bigquery provideBigquery(
|
||||||
HttpTransport transport,
|
@DefaultCredential GoogleCredential credential, @Config("projectId") String projectId) {
|
||||||
JsonFactory jsonFactory,
|
return new Bigquery.Builder(credential.getTransport(), credential.getJsonFactory(), credential)
|
||||||
Function<Set<String>, ? extends HttpRequestInitializer> credential,
|
|
||||||
@Config("projectId") String projectId) {
|
|
||||||
return new Bigquery.Builder(transport, jsonFactory, credential.apply(BigqueryScopes.all()))
|
|
||||||
.setApplicationName(projectId)
|
.setApplicationName(projectId)
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -17,14 +17,7 @@ package google.registry.bigquery;
|
||||||
import static com.google.common.base.Preconditions.checkArgument;
|
import static com.google.common.base.Preconditions.checkArgument;
|
||||||
import static com.google.common.collect.Sets.newConcurrentHashSet;
|
import static com.google.common.collect.Sets.newConcurrentHashSet;
|
||||||
|
|
||||||
import com.google.api.client.extensions.appengine.http.UrlFetchTransport;
|
|
||||||
import com.google.api.client.googleapis.extensions.appengine.auth.oauth2.AppIdentityCredential;
|
|
||||||
import com.google.api.client.http.HttpRequestInitializer;
|
|
||||||
import com.google.api.client.http.HttpTransport;
|
|
||||||
import com.google.api.client.json.JsonFactory;
|
|
||||||
import com.google.api.client.json.jackson2.JacksonFactory;
|
|
||||||
import com.google.api.services.bigquery.Bigquery;
|
import com.google.api.services.bigquery.Bigquery;
|
||||||
import com.google.api.services.bigquery.BigqueryScopes;
|
|
||||||
import com.google.api.services.bigquery.model.Dataset;
|
import com.google.api.services.bigquery.model.Dataset;
|
||||||
import com.google.api.services.bigquery.model.DatasetReference;
|
import com.google.api.services.bigquery.model.DatasetReference;
|
||||||
import com.google.api.services.bigquery.model.Table;
|
import com.google.api.services.bigquery.model.Table;
|
||||||
|
@ -39,8 +32,8 @@ import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
|
|
||||||
/** Factory for creating {@link Bigquery} connections. */
|
/** Wrapper of {@link Bigquery} with validation helpers. */
|
||||||
public class BigqueryFactory {
|
public class CheckedBigquery {
|
||||||
|
|
||||||
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
||||||
|
|
||||||
|
@ -49,45 +42,16 @@ public class BigqueryFactory {
|
||||||
private static Set<String> knownExistingTables = newConcurrentHashSet();
|
private static Set<String> knownExistingTables = newConcurrentHashSet();
|
||||||
|
|
||||||
@Inject Map<String, ImmutableList<TableFieldSchema>> bigquerySchemas;
|
@Inject Map<String, ImmutableList<TableFieldSchema>> bigquerySchemas;
|
||||||
@Inject Subfactory subfactory;
|
@Inject Bigquery bigquery;
|
||||||
@Inject BigqueryFactory() {}
|
|
||||||
|
|
||||||
/** This class is broken out solely so that it can be mocked inside of tests. */
|
@Inject
|
||||||
static class Subfactory {
|
CheckedBigquery() {}
|
||||||
|
|
||||||
@Inject Subfactory() {}
|
|
||||||
|
|
||||||
public Bigquery create(
|
|
||||||
String applicationName,
|
|
||||||
HttpTransport transport,
|
|
||||||
JsonFactory jsonFactory,
|
|
||||||
HttpRequestInitializer httpRequestInitializer) {
|
|
||||||
return new Bigquery.Builder(transport, jsonFactory, httpRequestInitializer)
|
|
||||||
.setApplicationName(applicationName)
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Returns a new connection to BigQuery. */
|
|
||||||
public Bigquery create(
|
|
||||||
String applicationName,
|
|
||||||
HttpTransport transport,
|
|
||||||
JsonFactory jsonFactory,
|
|
||||||
HttpRequestInitializer httpRequestInitializer) {
|
|
||||||
return subfactory.create(applicationName, transport, jsonFactory, httpRequestInitializer);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a new connection to Bigquery, first ensuring that the given dataset exists in the
|
* Returns a new connection to Bigquery, first ensuring that the given dataset exists in the
|
||||||
* project with the given id, creating it if required.
|
* project with the given id, creating it if required.
|
||||||
*/
|
*/
|
||||||
public Bigquery create(String projectId, String datasetId) throws IOException {
|
public Bigquery ensureDataSetExists(String projectId, String datasetId) throws IOException {
|
||||||
Bigquery bigquery = create(
|
|
||||||
getClass().getSimpleName(),
|
|
||||||
new UrlFetchTransport(),
|
|
||||||
new JacksonFactory(),
|
|
||||||
new AppIdentityCredential(BigqueryScopes.all()));
|
|
||||||
|
|
||||||
// Note: it's safe for multiple threads to call this as the dataset will only be created once.
|
// Note: it's safe for multiple threads to call this as the dataset will only be created once.
|
||||||
if (!knownExistingDatasets.contains(datasetId)) {
|
if (!knownExistingDatasets.contains(datasetId)) {
|
||||||
ensureDataset(bigquery, projectId, datasetId);
|
ensureDataset(bigquery, projectId, datasetId);
|
||||||
|
@ -101,9 +65,9 @@ public class BigqueryFactory {
|
||||||
* Returns a new connection to Bigquery, first ensuring that the given dataset and table exist in
|
* Returns a new connection to Bigquery, first ensuring that the given dataset and table exist in
|
||||||
* project with the given id, creating them if required.
|
* project with the given id, creating them if required.
|
||||||
*/
|
*/
|
||||||
public Bigquery create(String projectId, String datasetId, String tableId)
|
public Bigquery ensureDataSetAndTableExist(String projectId, String datasetId, String tableId)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Bigquery bigquery = create(projectId, datasetId);
|
ensureDataSetExists(projectId, datasetId);
|
||||||
checkArgument(bigquerySchemas.containsKey(tableId), "Unknown table ID: %s", tableId);
|
checkArgument(bigquerySchemas.containsKey(tableId), "Unknown table ID: %s", tableId);
|
||||||
|
|
||||||
if (!knownExistingTables.contains(tableId)) {
|
if (!knownExistingTables.contains(tableId)) {
|
|
@ -274,6 +274,11 @@ misc:
|
||||||
# hosts from being used on domains.
|
# hosts from being used on domains.
|
||||||
asyncDeleteDelaySeconds: 90
|
asyncDeleteDelaySeconds: 90
|
||||||
|
|
||||||
|
cloudDns:
|
||||||
|
# CloudDns testing config. Set both properties to null in Production.
|
||||||
|
rootUrl: https://staging-www.sandbox.googleapis.com
|
||||||
|
servicePath: dns/v2beta1_staging/projects/
|
||||||
|
|
||||||
beam:
|
beam:
|
||||||
# The default zone to run Apache Beam (Cloud Dataflow) jobs in.
|
# The default zone to run Apache Beam (Cloud Dataflow) jobs in.
|
||||||
defaultJobZone: us-east1-c
|
defaultJobZone: us-east1-c
|
||||||
|
|
|
@ -57,5 +57,9 @@ registrarConsole:
|
||||||
misc:
|
misc:
|
||||||
sheetExportId: placeholder
|
sheetExportId: placeholder
|
||||||
|
|
||||||
|
cloudDns:
|
||||||
|
rootUrl: null
|
||||||
|
servicePath: null
|
||||||
|
|
||||||
kms:
|
kms:
|
||||||
projectId: placeholder
|
projectId: placeholder
|
||||||
|
|
|
@ -34,9 +34,9 @@ import com.google.common.base.Splitter;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.flogger.FluentLogger;
|
import com.google.common.flogger.FluentLogger;
|
||||||
import google.registry.bigquery.BigqueryFactory;
|
|
||||||
import google.registry.bigquery.BigqueryUtils.SourceFormat;
|
import google.registry.bigquery.BigqueryUtils.SourceFormat;
|
||||||
import google.registry.bigquery.BigqueryUtils.WriteDisposition;
|
import google.registry.bigquery.BigqueryUtils.WriteDisposition;
|
||||||
|
import google.registry.bigquery.CheckedBigquery;
|
||||||
import google.registry.config.RegistryConfig.Config;
|
import google.registry.config.RegistryConfig.Config;
|
||||||
import google.registry.export.BigqueryPollJobAction.BigqueryPollJobEnqueuer;
|
import google.registry.export.BigqueryPollJobAction.BigqueryPollJobEnqueuer;
|
||||||
import google.registry.request.Action;
|
import google.registry.request.Action;
|
||||||
|
@ -70,7 +70,7 @@ public class LoadSnapshotAction implements Runnable {
|
||||||
|
|
||||||
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
||||||
|
|
||||||
@Inject BigqueryFactory bigqueryFactory;
|
@Inject CheckedBigquery checkedBigquery;
|
||||||
@Inject BigqueryPollJobEnqueuer bigqueryPollEnqueuer;
|
@Inject BigqueryPollJobEnqueuer bigqueryPollEnqueuer;
|
||||||
@Inject Clock clock;
|
@Inject Clock clock;
|
||||||
@Inject @Config("projectId") String projectId;
|
@Inject @Config("projectId") String projectId;
|
||||||
|
@ -109,7 +109,7 @@ public class LoadSnapshotAction implements Runnable {
|
||||||
|
|
||||||
private String loadSnapshot(String snapshotId, String gcsFilename, Iterable<String> kinds)
|
private String loadSnapshot(String snapshotId, String gcsFilename, Iterable<String> kinds)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Bigquery bigquery = bigqueryFactory.create(projectId, SNAPSHOTS_DATASET);
|
Bigquery bigquery = checkedBigquery.ensureDataSetExists(projectId, SNAPSHOTS_DATASET);
|
||||||
DateTime now = clock.nowUtc();
|
DateTime now = clock.nowUtc();
|
||||||
String loadMessage =
|
String loadMessage =
|
||||||
String.format("Loading Datastore snapshot %s from %s...", snapshotId, gcsFilename);
|
String.format("Loading Datastore snapshot %s from %s...", snapshotId, gcsFilename);
|
||||||
|
|
|
@ -24,7 +24,7 @@ import com.google.api.services.bigquery.model.ViewDefinition;
|
||||||
import com.google.appengine.api.taskqueue.TaskOptions;
|
import com.google.appengine.api.taskqueue.TaskOptions;
|
||||||
import com.google.appengine.api.taskqueue.TaskOptions.Method;
|
import com.google.appengine.api.taskqueue.TaskOptions.Method;
|
||||||
import com.google.common.flogger.FluentLogger;
|
import com.google.common.flogger.FluentLogger;
|
||||||
import google.registry.bigquery.BigqueryFactory;
|
import google.registry.bigquery.CheckedBigquery;
|
||||||
import google.registry.config.RegistryConfig.Config;
|
import google.registry.config.RegistryConfig.Config;
|
||||||
import google.registry.request.Action;
|
import google.registry.request.Action;
|
||||||
import google.registry.request.HttpException.InternalServerErrorException;
|
import google.registry.request.HttpException.InternalServerErrorException;
|
||||||
|
@ -69,7 +69,7 @@ public class UpdateSnapshotViewAction implements Runnable {
|
||||||
@Config("projectId")
|
@Config("projectId")
|
||||||
String projectId;
|
String projectId;
|
||||||
|
|
||||||
@Inject BigqueryFactory bigqueryFactory;
|
@Inject CheckedBigquery checkedBigquery;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
UpdateSnapshotViewAction() {}
|
UpdateSnapshotViewAction() {}
|
||||||
|
@ -106,7 +106,7 @@ public class UpdateSnapshotViewAction implements Runnable {
|
||||||
SqlTemplate viewQueryTemplate)
|
SqlTemplate viewQueryTemplate)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
Bigquery bigquery = bigqueryFactory.create(projectId, viewDataset);
|
Bigquery bigquery = checkedBigquery.ensureDataSetExists(projectId, viewDataset);
|
||||||
updateTable(
|
updateTable(
|
||||||
bigquery,
|
bigquery,
|
||||||
new Table()
|
new Table()
|
||||||
|
|
|
@ -28,7 +28,7 @@ import com.google.common.collect.ImmutableListMultimap;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.flogger.FluentLogger;
|
import com.google.common.flogger.FluentLogger;
|
||||||
import google.registry.bigquery.BigqueryFactory;
|
import google.registry.bigquery.CheckedBigquery;
|
||||||
import google.registry.config.RegistryConfig.Config;
|
import google.registry.config.RegistryConfig.Config;
|
||||||
import google.registry.request.Action;
|
import google.registry.request.Action;
|
||||||
import google.registry.request.Parameter;
|
import google.registry.request.Parameter;
|
||||||
|
@ -54,7 +54,8 @@ public class MetricsExportAction implements Runnable {
|
||||||
@Inject @Parameter("tableId") String tableId;
|
@Inject @Parameter("tableId") String tableId;
|
||||||
@Inject @Parameter("insertId") String insertId;
|
@Inject @Parameter("insertId") String insertId;
|
||||||
@Inject @Config("projectId") String projectId;
|
@Inject @Config("projectId") String projectId;
|
||||||
@Inject BigqueryFactory bigqueryFactory;
|
|
||||||
|
@Inject CheckedBigquery checkedBigquery;
|
||||||
@Inject @ParameterMap ImmutableListMultimap<String, String> parameters;
|
@Inject @ParameterMap ImmutableListMultimap<String, String> parameters;
|
||||||
@Inject MetricsExportAction() {}
|
@Inject MetricsExportAction() {}
|
||||||
|
|
||||||
|
@ -62,7 +63,8 @@ public class MetricsExportAction implements Runnable {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
try {
|
try {
|
||||||
Bigquery bigquery = bigqueryFactory.create(projectId, DATASET_ID, tableId);
|
Bigquery bigquery =
|
||||||
|
checkedBigquery.ensureDataSetAndTableExist(projectId, DATASET_ID, tableId);
|
||||||
// Filter out the special parameters that the Action is called with. Everything that's left
|
// Filter out the special parameters that the Action is called with. Everything that's left
|
||||||
// is returned in a Map that is suitable to pass to Bigquery as row data.
|
// is returned in a Map that is suitable to pass to Bigquery as row data.
|
||||||
Map<String, Object> jsonRows =
|
Map<String, Object> jsonRows =
|
||||||
|
|
|
@ -18,9 +18,6 @@ import static google.registry.request.RequestParameters.extractOptionalEnumParam
|
||||||
import static google.registry.request.RequestParameters.extractOptionalParameter;
|
import static google.registry.request.RequestParameters.extractOptionalParameter;
|
||||||
import static java.lang.annotation.RetentionPolicy.RUNTIME;
|
import static java.lang.annotation.RetentionPolicy.RUNTIME;
|
||||||
|
|
||||||
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
|
|
||||||
import com.google.api.client.http.HttpTransport;
|
|
||||||
import com.google.api.client.json.jackson2.JacksonFactory;
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.util.concurrent.MoreExecutors;
|
import com.google.common.util.concurrent.MoreExecutors;
|
||||||
import dagger.Module;
|
import dagger.Module;
|
||||||
|
@ -54,7 +51,6 @@ public final class IcannReportingModule {
|
||||||
static final String DATASTORE_EXPORT_DATA_SET = "latest_datastore_export";
|
static final String DATASTORE_EXPORT_DATA_SET = "latest_datastore_export";
|
||||||
static final String MANIFEST_FILE_NAME = "MANIFEST.txt";
|
static final String MANIFEST_FILE_NAME = "MANIFEST.txt";
|
||||||
private static final String DEFAULT_SUBDIR = "icann/monthly";
|
private static final String DEFAULT_SUBDIR = "icann/monthly";
|
||||||
private static final String BIGQUERY_SCOPE = "https://www.googleapis.com/auth/cloud-platform";
|
|
||||||
|
|
||||||
/** Provides an optional subdirectory to store/upload reports to, extracted from the request. */
|
/** Provides an optional subdirectory to store/upload reports to, extracted from the request. */
|
||||||
@Provides
|
@Provides
|
||||||
|
@ -103,19 +99,16 @@ public final class IcannReportingModule {
|
||||||
* @see google.registry.tools.BigqueryParameters for justifications of defaults.
|
* @see google.registry.tools.BigqueryParameters for justifications of defaults.
|
||||||
*/
|
*/
|
||||||
@Provides
|
@Provides
|
||||||
static BigqueryConnection provideBigqueryConnection(HttpTransport transport) {
|
static BigqueryConnection provideBigqueryConnection(
|
||||||
|
BigqueryConnection.Builder bigQueryConnectionBuilder) {
|
||||||
try {
|
try {
|
||||||
GoogleCredential credential = GoogleCredential
|
|
||||||
.getApplicationDefault(transport, new JacksonFactory());
|
|
||||||
BigqueryConnection connection =
|
BigqueryConnection connection =
|
||||||
new BigqueryConnection.Builder()
|
bigQueryConnectionBuilder
|
||||||
.setExecutorService(MoreExecutors.newDirectExecutorService())
|
.setExecutorService(MoreExecutors.newDirectExecutorService())
|
||||||
.setCredential(credential.createScoped(ImmutableList.of(BIGQUERY_SCOPE)))
|
|
||||||
.setDatasetId(ICANN_REPORTING_DATA_SET)
|
.setDatasetId(ICANN_REPORTING_DATA_SET)
|
||||||
.setOverwrite(true)
|
.setOverwrite(true)
|
||||||
.setPollInterval(Duration.standardSeconds(1))
|
.setPollInterval(Duration.standardSeconds(1))
|
||||||
.build();
|
.build();
|
||||||
connection.initialize();
|
|
||||||
return connection;
|
return connection;
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
throw new RuntimeException("Could not initialize BigqueryConnection!", e);
|
throw new RuntimeException("Could not initialize BigqueryConnection!", e);
|
||||||
|
|
|
@ -16,6 +16,8 @@ package google.registry.tools;
|
||||||
|
|
||||||
import com.beust.jcommander.ParametersDelegate;
|
import com.beust.jcommander.ParametersDelegate;
|
||||||
import google.registry.bigquery.BigqueryConnection;
|
import google.registry.bigquery.BigqueryConnection;
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Provider;
|
||||||
|
|
||||||
/** A {@link Command} that uses the bigquery client API. */
|
/** A {@link Command} that uses the bigquery client API. */
|
||||||
abstract class BigqueryCommand implements Command {
|
abstract class BigqueryCommand implements Command {
|
||||||
|
@ -28,9 +30,12 @@ abstract class BigqueryCommand implements Command {
|
||||||
/** Connection object for interacting with the Bigquery API. */
|
/** Connection object for interacting with the Bigquery API. */
|
||||||
private BigqueryConnection bigquery;
|
private BigqueryConnection bigquery;
|
||||||
|
|
||||||
|
@Inject Provider<BigqueryConnection.Builder> bigQueryConnectionBuilderProvider;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() throws Exception {
|
public void run() throws Exception {
|
||||||
try (BigqueryConnection autoClosingBigquery = bigqueryParameters.newConnection()) {
|
try (BigqueryConnection autoClosingBigquery =
|
||||||
|
bigqueryParameters.newConnection(bigQueryConnectionBuilderProvider.get())) {
|
||||||
bigquery = autoClosingBigquery;
|
bigquery = autoClosingBigquery;
|
||||||
runWithBigquery();
|
runWithBigquery();
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,13 +16,7 @@ package google.registry.tools;
|
||||||
|
|
||||||
import com.beust.jcommander.Parameter;
|
import com.beust.jcommander.Parameter;
|
||||||
import com.beust.jcommander.Parameters;
|
import com.beust.jcommander.Parameters;
|
||||||
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
|
|
||||||
import com.google.api.client.http.HttpTransport;
|
|
||||||
import com.google.api.client.http.javanet.NetHttpTransport;
|
|
||||||
import com.google.api.client.json.JsonFactory;
|
|
||||||
import com.google.api.client.json.jackson2.JacksonFactory;
|
|
||||||
import google.registry.bigquery.BigqueryConnection;
|
import google.registry.bigquery.BigqueryConnection;
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import org.joda.time.Duration;
|
import org.joda.time.Duration;
|
||||||
|
|
||||||
|
@ -57,31 +51,15 @@ final class BigqueryParameters {
|
||||||
description = "Number of threads for running simultaneous BigQuery operations.")
|
description = "Number of threads for running simultaneous BigQuery operations.")
|
||||||
private int bigqueryNumThreads = DEFAULT_NUM_THREADS;
|
private int bigqueryNumThreads = DEFAULT_NUM_THREADS;
|
||||||
|
|
||||||
private static final HttpTransport HTTP_TRANSPORT = new NetHttpTransport();
|
|
||||||
private static final JsonFactory JSON_FACTORY = new JacksonFactory();
|
|
||||||
|
|
||||||
/** Returns a new BigqueryConnection constructed according to the delegate's flag settings. */
|
/** Returns a new BigqueryConnection constructed according to the delegate's flag settings. */
|
||||||
BigqueryConnection newConnection() throws Exception {
|
BigqueryConnection newConnection(BigqueryConnection.Builder connectionBuilder) throws Exception {
|
||||||
BigqueryConnection connection = new BigqueryConnection.Builder()
|
BigqueryConnection connection =
|
||||||
.setExecutorService(Executors.newFixedThreadPool(bigqueryNumThreads))
|
connectionBuilder
|
||||||
.setCredential(newCredential())
|
.setExecutorService(Executors.newFixedThreadPool(bigqueryNumThreads))
|
||||||
.setDatasetId(bigqueryDataset)
|
.setDatasetId(bigqueryDataset)
|
||||||
.setOverwrite(bigqueryOverwrite)
|
.setOverwrite(bigqueryOverwrite)
|
||||||
.setPollInterval(bigqueryPollInterval)
|
.setPollInterval(bigqueryPollInterval)
|
||||||
.build();
|
.build();
|
||||||
connection.initialize();
|
|
||||||
return connection;
|
return connection;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Creates a credential object for the Bigquery client using application default credentials. */
|
|
||||||
private GoogleCredential newCredential() {
|
|
||||||
try {
|
|
||||||
return GoogleCredential.getApplicationDefault(HTTP_TRANSPORT, JSON_FACTORY);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(
|
|
||||||
"Could not obtain application default credentials - "
|
|
||||||
+ "did you remember to run 'gcloud auth application-default login'?",
|
|
||||||
e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,27 +14,21 @@
|
||||||
|
|
||||||
package google.registry.tools;
|
package google.registry.tools;
|
||||||
|
|
||||||
|
import static com.google.common.base.Verify.verify;
|
||||||
|
|
||||||
import com.beust.jcommander.Parameter;
|
import com.beust.jcommander.Parameter;
|
||||||
import com.beust.jcommander.Parameters;
|
import com.beust.jcommander.Parameters;
|
||||||
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
|
|
||||||
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
|
|
||||||
import com.google.api.client.http.HttpTransport;
|
|
||||||
import com.google.api.client.json.JsonFactory;
|
|
||||||
import com.google.api.client.json.jackson2.JacksonFactory;
|
|
||||||
import com.google.api.services.dns.Dns;
|
import com.google.api.services.dns.Dns;
|
||||||
import com.google.api.services.dns.model.ManagedZone;
|
import com.google.api.services.dns.model.ManagedZone;
|
||||||
import com.google.api.services.dns.model.ManagedZoneDnsSecConfig;
|
import com.google.api.services.dns.model.ManagedZoneDnsSecConfig;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
|
||||||
import google.registry.config.RegistryConfig.Config;
|
import google.registry.config.RegistryConfig.Config;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.security.GeneralSecurityException;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
|
|
||||||
@Parameters(separators = " =", commandDescription = "Create a Managed Zone for a TLD in Cloud DNS.")
|
@Parameters(separators = " =", commandDescription = "Create a Managed Zone for a TLD in Cloud DNS.")
|
||||||
class CreateCdnsTld extends ConfirmingCommand {
|
final class CreateCdnsTld extends ConfirmingCommand {
|
||||||
|
|
||||||
@Parameter(names = "--description", description = "Description of the new TLD.")
|
@Parameter(names = "--description", description = "Description of the new TLD.")
|
||||||
String description;
|
String description;
|
||||||
|
@ -57,6 +51,8 @@ class CreateCdnsTld extends ConfirmingCommand {
|
||||||
@Config("projectId")
|
@Config("projectId")
|
||||||
String projectId;
|
String projectId;
|
||||||
|
|
||||||
|
@Inject Dns dnsService;
|
||||||
|
|
||||||
private static final String KEY_VALUE_FORMAT = " %s = %s";
|
private static final String KEY_VALUE_FORMAT = " %s = %s";
|
||||||
|
|
||||||
private ManagedZone managedZone;
|
private ManagedZone managedZone;
|
||||||
|
@ -96,41 +92,19 @@ class CreateCdnsTld extends ConfirmingCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String execute() throws IOException, GeneralSecurityException {
|
public String execute() throws IOException {
|
||||||
Dns dnsService = createDnsService();
|
validateDnsService();
|
||||||
Dns.ManagedZones.Create request = dnsService.managedZones().create(projectId, managedZone);
|
Dns.ManagedZones.Create request = dnsService.managedZones().create(projectId, managedZone);
|
||||||
ManagedZone response = request.execute();
|
ManagedZone response = request.execute();
|
||||||
return String.format("Created managed zone: %s", response);
|
return String.format("Created managed zone: %s", response);
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
private void validateDnsService() {
|
||||||
Dns createDnsService() throws IOException, GeneralSecurityException {
|
// Sanity check to ensure only Production and Sandbox points to the CloudDns prod site.
|
||||||
// TODO(b/67367533): We should be obtaining the Dns instance from CloudDnsWriter module. But
|
|
||||||
// to do this cleanly we need to refactor everything down to the credential object. Having
|
|
||||||
// done that, this method will go away and this class will become final.
|
|
||||||
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
|
|
||||||
JsonFactory jsonFactory = JacksonFactory.getDefaultInstance();
|
|
||||||
|
|
||||||
GoogleCredential credential = GoogleCredential.getApplicationDefault();
|
|
||||||
if (credential.createScopedRequired()) {
|
|
||||||
credential =
|
|
||||||
credential.createScoped(
|
|
||||||
Arrays.asList(
|
|
||||||
"https://www.googleapis.com/auth/cloud-platform",
|
|
||||||
"https://www.googleapis.com/auth/cloud-platform.read-only",
|
|
||||||
"https://www.googleapis.com/auth/ndev.clouddns.readonly",
|
|
||||||
"https://www.googleapis.com/auth/ndev.clouddns.readwrite"));
|
|
||||||
}
|
|
||||||
|
|
||||||
Dns.Builder builder =
|
|
||||||
new Dns.Builder(httpTransport, jsonFactory, credential).setApplicationName(projectId);
|
|
||||||
if (RegistryToolEnvironment.get() != RegistryToolEnvironment.PRODUCTION
|
if (RegistryToolEnvironment.get() != RegistryToolEnvironment.PRODUCTION
|
||||||
&& RegistryToolEnvironment.get() != RegistryToolEnvironment.SANDBOX) {
|
&& RegistryToolEnvironment.get() != RegistryToolEnvironment.SANDBOX) {
|
||||||
builder
|
verify(!Dns.DEFAULT_ROOT_URL.equals(dnsService.getRootUrl()));
|
||||||
.setRootUrl("https://staging-www.sandbox.googleapis.com")
|
verify(!Dns.DEFAULT_SERVICE_PATH.equals(dnsService.getServicePath()));
|
||||||
.setServicePath("dns/v2beta1_staging/projects/");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return builder.build();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ package google.registry.tools;
|
||||||
|
|
||||||
import com.google.monitoring.metrics.MetricWriter;
|
import com.google.monitoring.metrics.MetricWriter;
|
||||||
import dagger.Component;
|
import dagger.Component;
|
||||||
|
import google.registry.bigquery.BigqueryModule;
|
||||||
import google.registry.config.CredentialModule;
|
import google.registry.config.CredentialModule;
|
||||||
import google.registry.config.RegistryConfig.ConfigModule;
|
import google.registry.config.RegistryConfig.ConfigModule;
|
||||||
import google.registry.dns.writer.VoidDnsWriterModule;
|
import google.registry.dns.writer.VoidDnsWriterModule;
|
||||||
|
@ -53,6 +54,7 @@ import javax.inject.Singleton;
|
||||||
// TODO(b/36866706): Find a way to replace this with a command-line friendly version
|
// TODO(b/36866706): Find a way to replace this with a command-line friendly version
|
||||||
AppIdentityCredentialModule.class,
|
AppIdentityCredentialModule.class,
|
||||||
AuthModule.class,
|
AuthModule.class,
|
||||||
|
BigqueryModule.class,
|
||||||
ConfigModule.class,
|
ConfigModule.class,
|
||||||
CredentialModule.class,
|
CredentialModule.class,
|
||||||
DatastoreServiceModule.class,
|
DatastoreServiceModule.class,
|
||||||
|
@ -99,6 +101,7 @@ interface RegistryToolComponent {
|
||||||
void inject(GetKeyringSecretCommand command);
|
void inject(GetKeyringSecretCommand command);
|
||||||
void inject(GhostrydeCommand command);
|
void inject(GhostrydeCommand command);
|
||||||
void inject(ListCursorsCommand command);
|
void inject(ListCursorsCommand command);
|
||||||
|
void inject(LoadSnapshotCommand command);
|
||||||
void inject(LockDomainCommand command);
|
void inject(LockDomainCommand command);
|
||||||
void inject(LoginCommand command);
|
void inject(LoginCommand command);
|
||||||
void inject(LogoutCommand command);
|
void inject(LogoutCommand command);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -17,15 +17,11 @@ package google.registry.bigquery;
|
||||||
import static com.google.common.truth.Truth.assertThat;
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
import static google.registry.bigquery.BigqueryUtils.FieldType.STRING;
|
import static google.registry.bigquery.BigqueryUtils.FieldType.STRING;
|
||||||
import static org.mockito.Matchers.any;
|
import static org.mockito.Matchers.any;
|
||||||
import static org.mockito.Matchers.anyString;
|
|
||||||
import static org.mockito.Matchers.eq;
|
import static org.mockito.Matchers.eq;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import com.google.api.client.http.HttpRequestInitializer;
|
|
||||||
import com.google.api.client.http.HttpTransport;
|
|
||||||
import com.google.api.client.json.JsonFactory;
|
|
||||||
import com.google.api.services.bigquery.Bigquery;
|
import com.google.api.services.bigquery.Bigquery;
|
||||||
import com.google.api.services.bigquery.model.Dataset;
|
import com.google.api.services.bigquery.model.Dataset;
|
||||||
import com.google.api.services.bigquery.model.Table;
|
import com.google.api.services.bigquery.model.Table;
|
||||||
|
@ -39,11 +35,10 @@ import org.junit.runner.RunWith;
|
||||||
import org.junit.runners.JUnit4;
|
import org.junit.runners.JUnit4;
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
|
|
||||||
/** Unit tests for {@link BigqueryFactory}. */
|
/** Unit tests for {@link CheckedBigquery}. */
|
||||||
@RunWith(JUnit4.class)
|
@RunWith(JUnit4.class)
|
||||||
public class BigqueryFactoryTest {
|
public class CheckedBigqueryTest {
|
||||||
|
|
||||||
private final BigqueryFactory.Subfactory subfactory = mock(BigqueryFactory.Subfactory.class);
|
|
||||||
private final Bigquery bigquery = mock(Bigquery.class);
|
private final Bigquery bigquery = mock(Bigquery.class);
|
||||||
private final Bigquery.Datasets bigqueryDatasets = mock(Bigquery.Datasets.class);
|
private final Bigquery.Datasets bigqueryDatasets = mock(Bigquery.Datasets.class);
|
||||||
private final Bigquery.Datasets.Insert bigqueryDatasetsInsert =
|
private final Bigquery.Datasets.Insert bigqueryDatasetsInsert =
|
||||||
|
@ -51,25 +46,19 @@ public class BigqueryFactoryTest {
|
||||||
private final Bigquery.Tables bigqueryTables = mock(Bigquery.Tables.class);
|
private final Bigquery.Tables bigqueryTables = mock(Bigquery.Tables.class);
|
||||||
private final Bigquery.Tables.Insert bigqueryTablesInsert = mock(Bigquery.Tables.Insert.class);
|
private final Bigquery.Tables.Insert bigqueryTablesInsert = mock(Bigquery.Tables.Insert.class);
|
||||||
|
|
||||||
private BigqueryFactory factory;
|
private CheckedBigquery checkedBigquery;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() throws Exception {
|
public void before() throws Exception {
|
||||||
when(subfactory.create(
|
|
||||||
anyString(),
|
|
||||||
any(HttpTransport.class),
|
|
||||||
any(JsonFactory.class),
|
|
||||||
any(HttpRequestInitializer.class)))
|
|
||||||
.thenReturn(bigquery);
|
|
||||||
when(bigquery.datasets()).thenReturn(bigqueryDatasets);
|
when(bigquery.datasets()).thenReturn(bigqueryDatasets);
|
||||||
when(bigqueryDatasets.insert(eq("Project-Id"), any(Dataset.class)))
|
when(bigqueryDatasets.insert(eq("Project-Id"), any(Dataset.class)))
|
||||||
.thenReturn(bigqueryDatasetsInsert);
|
.thenReturn(bigqueryDatasetsInsert);
|
||||||
when(bigquery.tables()).thenReturn(bigqueryTables);
|
when(bigquery.tables()).thenReturn(bigqueryTables);
|
||||||
when(bigqueryTables.insert(eq("Project-Id"), any(String.class), any(Table.class)))
|
when(bigqueryTables.insert(eq("Project-Id"), any(String.class), any(Table.class)))
|
||||||
.thenReturn(bigqueryTablesInsert);
|
.thenReturn(bigqueryTablesInsert);
|
||||||
factory = new BigqueryFactory();
|
checkedBigquery = new CheckedBigquery();
|
||||||
factory.subfactory = subfactory;
|
checkedBigquery.bigquery = bigquery;
|
||||||
factory.bigquerySchemas =
|
checkedBigquery.bigquerySchemas =
|
||||||
new ImmutableMap.Builder<String, ImmutableList<TableFieldSchema>>()
|
new ImmutableMap.Builder<String, ImmutableList<TableFieldSchema>>()
|
||||||
.put(
|
.put(
|
||||||
"Table-Id",
|
"Table-Id",
|
||||||
|
@ -82,7 +71,7 @@ public class BigqueryFactoryTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSuccess_datastoreCreation() throws Exception {
|
public void testSuccess_datastoreCreation() throws Exception {
|
||||||
factory.create("Project-Id", "Dataset-Id");
|
checkedBigquery.ensureDataSetExists("Project-Id", "Dataset-Id");
|
||||||
|
|
||||||
ArgumentCaptor<Dataset> datasetArg = ArgumentCaptor.forClass(Dataset.class);
|
ArgumentCaptor<Dataset> datasetArg = ArgumentCaptor.forClass(Dataset.class);
|
||||||
verify(bigqueryDatasets).insert(eq("Project-Id"), datasetArg.capture());
|
verify(bigqueryDatasets).insert(eq("Project-Id"), datasetArg.capture());
|
||||||
|
@ -95,7 +84,7 @@ public class BigqueryFactoryTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSuccess_datastoreAndTableCreation() throws Exception {
|
public void testSuccess_datastoreAndTableCreation() throws Exception {
|
||||||
factory.create("Project-Id", "Dataset2", "Table2");
|
checkedBigquery.ensureDataSetAndTableExist("Project-Id", "Dataset2", "Table2");
|
||||||
|
|
||||||
ArgumentCaptor<Dataset> datasetArg = ArgumentCaptor.forClass(Dataset.class);
|
ArgumentCaptor<Dataset> datasetArg = ArgumentCaptor.forClass(Dataset.class);
|
||||||
verify(bigqueryDatasets).insert(eq("Project-Id"), datasetArg.capture());
|
verify(bigqueryDatasets).insert(eq("Project-Id"), datasetArg.capture());
|
|
@ -12,6 +12,7 @@ java_library(
|
||||||
srcs = glob(["*.java"]),
|
srcs = glob(["*.java"]),
|
||||||
deps = [
|
deps = [
|
||||||
"//java/google/registry/config",
|
"//java/google/registry/config",
|
||||||
|
"@com_google_auto_value",
|
||||||
"@com_google_guava",
|
"@com_google_guava",
|
||||||
"@com_google_truth",
|
"@com_google_truth",
|
||||||
"@com_google_truth_extensions_truth_java8_extension",
|
"@com_google_truth_extensions_truth_java8_extension",
|
||||||
|
|
72
javatests/google/registry/config/CloudDnsConfigTest.java
Normal file
72
javatests/google/registry/config/CloudDnsConfigTest.java
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package google.registry.config;
|
||||||
|
|
||||||
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import google.registry.config.RegistryConfig.ConfigModule;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.Parameterized;
|
||||||
|
import org.junit.runners.Parameterized.Parameter;
|
||||||
|
import org.junit.runners.Parameterized.Parameters;
|
||||||
|
|
||||||
|
/** Unit tests for environment-specific CloudDns configurations. */
|
||||||
|
@RunWith(Parameterized.class)
|
||||||
|
public class CloudDnsConfigTest {
|
||||||
|
|
||||||
|
@Parameters
|
||||||
|
public static final Collection<RegistryEnvironment> environments() {
|
||||||
|
return ImmutableList.copyOf(RegistryEnvironment.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final ImmutableList<Optional<String>> TEST_CONFIGS =
|
||||||
|
ImmutableList.of(
|
||||||
|
Optional.of("https://staging-www.sandbox.googleapis.com"),
|
||||||
|
Optional.of("dns/v2beta1_staging/projects/"));
|
||||||
|
|
||||||
|
private static final ImmutableList<Optional<String>> PROD_CONFIGS =
|
||||||
|
ImmutableList.of(Optional.empty(), Optional.empty());
|
||||||
|
|
||||||
|
private static final Map<RegistryEnvironment, ImmutableList<Optional<String>>> data =
|
||||||
|
ImmutableMap.of(
|
||||||
|
RegistryEnvironment.PRODUCTION, PROD_CONFIGS,
|
||||||
|
RegistryEnvironment.SANDBOX, PROD_CONFIGS);
|
||||||
|
|
||||||
|
@Parameter public RegistryEnvironment environment;
|
||||||
|
|
||||||
|
private RegistryConfigSettings registryConfigSettings;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setup() {
|
||||||
|
System.setProperty(RegistryEnvironment.PROPERTY, environment.name());
|
||||||
|
registryConfigSettings = YamlUtils.getConfigSettings();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test() {
|
||||||
|
ImmutableList<Optional<String>> expectedConfigs = data.getOrDefault(environment, TEST_CONFIGS);
|
||||||
|
assertThat(ConfigModule.getCloudDnsRootUrl(registryConfigSettings))
|
||||||
|
.isEqualTo(expectedConfigs.get(0));
|
||||||
|
assertThat(ConfigModule.getCloudDnsServicePath(registryConfigSettings))
|
||||||
|
.isEqualTo(expectedConfigs.get(1));
|
||||||
|
}
|
||||||
|
}
|
|
@ -40,7 +40,7 @@ import com.google.api.services.bigquery.model.JobReference;
|
||||||
import com.google.appengine.api.taskqueue.QueueFactory;
|
import com.google.appengine.api.taskqueue.QueueFactory;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import google.registry.bigquery.BigqueryFactory;
|
import google.registry.bigquery.CheckedBigquery;
|
||||||
import google.registry.export.BigqueryPollJobAction.BigqueryPollJobEnqueuer;
|
import google.registry.export.BigqueryPollJobAction.BigqueryPollJobEnqueuer;
|
||||||
import google.registry.request.HttpException.BadRequestException;
|
import google.registry.request.HttpException.BadRequestException;
|
||||||
import google.registry.request.HttpException.InternalServerErrorException;
|
import google.registry.request.HttpException.InternalServerErrorException;
|
||||||
|
@ -65,7 +65,7 @@ public class LoadSnapshotActionTest {
|
||||||
public final AppEngineRule appEngine = AppEngineRule.builder()
|
public final AppEngineRule appEngine = AppEngineRule.builder()
|
||||||
.withTaskQueue()
|
.withTaskQueue()
|
||||||
.build();
|
.build();
|
||||||
private final BigqueryFactory bigqueryFactory = mock(BigqueryFactory.class);
|
private final CheckedBigquery checkedBigquery = mock(CheckedBigquery.class);
|
||||||
private final Bigquery bigquery = mock(Bigquery.class);
|
private final Bigquery bigquery = mock(Bigquery.class);
|
||||||
private final Bigquery.Jobs bigqueryJobs = mock(Bigquery.Jobs.class);
|
private final Bigquery.Jobs bigqueryJobs = mock(Bigquery.Jobs.class);
|
||||||
private final Bigquery.Jobs.Insert bigqueryJobsInsert = mock(Bigquery.Jobs.Insert.class);
|
private final Bigquery.Jobs.Insert bigqueryJobsInsert = mock(Bigquery.Jobs.Insert.class);
|
||||||
|
@ -79,14 +79,14 @@ public class LoadSnapshotActionTest {
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() throws Exception {
|
public void before() throws Exception {
|
||||||
when(bigqueryFactory.create("Project-Id", "snapshots")).thenReturn(bigquery);
|
when(checkedBigquery.ensureDataSetExists("Project-Id", "snapshots")).thenReturn(bigquery);
|
||||||
when(bigquery.jobs()).thenReturn(bigqueryJobs);
|
when(bigquery.jobs()).thenReturn(bigqueryJobs);
|
||||||
when(bigqueryJobs.insert(eq("Project-Id"), any(Job.class))).thenReturn(bigqueryJobsInsert);
|
when(bigqueryJobs.insert(eq("Project-Id"), any(Job.class))).thenReturn(bigqueryJobsInsert);
|
||||||
when(bigquery.datasets()).thenReturn(bigqueryDatasets);
|
when(bigquery.datasets()).thenReturn(bigqueryDatasets);
|
||||||
when(bigqueryDatasets.insert(eq("Project-Id"), any(Dataset.class)))
|
when(bigqueryDatasets.insert(eq("Project-Id"), any(Dataset.class)))
|
||||||
.thenReturn(bigqueryDatasetsInsert);
|
.thenReturn(bigqueryDatasetsInsert);
|
||||||
action = new LoadSnapshotAction();
|
action = new LoadSnapshotAction();
|
||||||
action.bigqueryFactory = bigqueryFactory;
|
action.checkedBigquery = checkedBigquery;
|
||||||
action.bigqueryPollEnqueuer = bigqueryPollEnqueuer;
|
action.bigqueryPollEnqueuer = bigqueryPollEnqueuer;
|
||||||
action.clock = clock;
|
action.clock = clock;
|
||||||
action.projectId = "Project-Id";
|
action.projectId = "Project-Id";
|
||||||
|
@ -113,9 +113,9 @@ public class LoadSnapshotActionTest {
|
||||||
public void testSuccess_doPost() throws Exception {
|
public void testSuccess_doPost() throws Exception {
|
||||||
action.run();
|
action.run();
|
||||||
|
|
||||||
// Verify that bigqueryFactory was called in a way that would create the dataset if it didn't
|
// Verify that checkedBigquery was called in a way that would create the dataset if it didn't
|
||||||
// already exist.
|
// already exist.
|
||||||
verify(bigqueryFactory).create("Project-Id", "snapshots");
|
verify(checkedBigquery).ensureDataSetExists("Project-Id", "snapshots");
|
||||||
|
|
||||||
// Capture the load jobs we inserted to do additional checking on them.
|
// Capture the load jobs we inserted to do additional checking on them.
|
||||||
ArgumentCaptor<Job> jobArgument = ArgumentCaptor.forClass(Job.class);
|
ArgumentCaptor<Job> jobArgument = ArgumentCaptor.forClass(Job.class);
|
||||||
|
|
|
@ -34,7 +34,7 @@ import com.google.api.services.bigquery.Bigquery;
|
||||||
import com.google.api.services.bigquery.model.Dataset;
|
import com.google.api.services.bigquery.model.Dataset;
|
||||||
import com.google.api.services.bigquery.model.Table;
|
import com.google.api.services.bigquery.model.Table;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import google.registry.bigquery.BigqueryFactory;
|
import google.registry.bigquery.CheckedBigquery;
|
||||||
import google.registry.request.HttpException.InternalServerErrorException;
|
import google.registry.request.HttpException.InternalServerErrorException;
|
||||||
import google.registry.testing.AppEngineRule;
|
import google.registry.testing.AppEngineRule;
|
||||||
import google.registry.testing.TaskQueueHelper.TaskMatcher;
|
import google.registry.testing.TaskQueueHelper.TaskMatcher;
|
||||||
|
@ -55,7 +55,7 @@ public class UpdateSnapshotViewActionTest {
|
||||||
public final AppEngineRule appEngine = AppEngineRule.builder()
|
public final AppEngineRule appEngine = AppEngineRule.builder()
|
||||||
.withTaskQueue()
|
.withTaskQueue()
|
||||||
.build();
|
.build();
|
||||||
private final BigqueryFactory bigqueryFactory = mock(BigqueryFactory.class);
|
private final CheckedBigquery checkedBigquery = mock(CheckedBigquery.class);
|
||||||
private final Bigquery bigquery = mock(Bigquery.class);
|
private final Bigquery bigquery = mock(Bigquery.class);
|
||||||
private final Bigquery.Datasets bigqueryDatasets = mock(Bigquery.Datasets.class);
|
private final Bigquery.Datasets bigqueryDatasets = mock(Bigquery.Datasets.class);
|
||||||
private final Bigquery.Datasets.Insert bigqueryDatasetsInsert =
|
private final Bigquery.Datasets.Insert bigqueryDatasetsInsert =
|
||||||
|
@ -67,7 +67,7 @@ public class UpdateSnapshotViewActionTest {
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void before() throws Exception {
|
public void before() throws Exception {
|
||||||
when(bigqueryFactory.create(anyString(), anyString())).thenReturn(bigquery);
|
when(checkedBigquery.ensureDataSetExists(anyString(), anyString())).thenReturn(bigquery);
|
||||||
when(bigquery.datasets()).thenReturn(bigqueryDatasets);
|
when(bigquery.datasets()).thenReturn(bigqueryDatasets);
|
||||||
when(bigqueryDatasets.insert(anyString(), any(Dataset.class)))
|
when(bigqueryDatasets.insert(anyString(), any(Dataset.class)))
|
||||||
.thenReturn(bigqueryDatasetsInsert);
|
.thenReturn(bigqueryDatasetsInsert);
|
||||||
|
@ -76,7 +76,7 @@ public class UpdateSnapshotViewActionTest {
|
||||||
.thenReturn(bigqueryTablesUpdate);
|
.thenReturn(bigqueryTablesUpdate);
|
||||||
|
|
||||||
action = new UpdateSnapshotViewAction();
|
action = new UpdateSnapshotViewAction();
|
||||||
action.bigqueryFactory = bigqueryFactory;
|
action.checkedBigquery = checkedBigquery;
|
||||||
action.datasetId = "some_dataset";
|
action.datasetId = "some_dataset";
|
||||||
action.kindName = "fookind";
|
action.kindName = "fookind";
|
||||||
action.projectId = "myproject";
|
action.projectId = "myproject";
|
||||||
|
@ -99,10 +99,12 @@ public class UpdateSnapshotViewActionTest {
|
||||||
public void testSuccess_doPost() throws Exception {
|
public void testSuccess_doPost() throws Exception {
|
||||||
action.run();
|
action.run();
|
||||||
|
|
||||||
InOrder factoryOrder = inOrder(bigqueryFactory);
|
InOrder factoryOrder = inOrder(checkedBigquery);
|
||||||
// Check that the BigQuery factory was called in such a way that the dataset would be created
|
// Check that the BigQuery factory was called in such a way that the dataset would be created
|
||||||
// if it didn't already exist.
|
// if it didn't already exist.
|
||||||
factoryOrder.verify(bigqueryFactory).create("myproject", "latest_datastore_export");
|
factoryOrder
|
||||||
|
.verify(checkedBigquery)
|
||||||
|
.ensureDataSetExists("myproject", "latest_datastore_export");
|
||||||
|
|
||||||
// Check that we updated both views
|
// Check that we updated both views
|
||||||
InOrder tableOrder = inOrder(bigqueryTables);
|
InOrder tableOrder = inOrder(bigqueryTables);
|
||||||
|
|
|
@ -20,9 +20,6 @@ import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import com.google.api.client.http.HttpRequestInitializer;
|
|
||||||
import com.google.api.client.http.HttpTransport;
|
|
||||||
import com.google.api.client.json.JsonFactory;
|
|
||||||
import com.google.api.services.bigquery.Bigquery;
|
import com.google.api.services.bigquery.Bigquery;
|
||||||
import com.google.api.services.bigquery.Bigquery.Tabledata;
|
import com.google.api.services.bigquery.Bigquery.Tabledata;
|
||||||
import com.google.api.services.bigquery.Bigquery.Tabledata.InsertAll;
|
import com.google.api.services.bigquery.Bigquery.Tabledata.InsertAll;
|
||||||
|
@ -31,7 +28,7 @@ import com.google.api.services.bigquery.model.TableDataInsertAllResponse;
|
||||||
import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors;
|
import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableListMultimap;
|
import com.google.common.collect.ImmutableListMultimap;
|
||||||
import google.registry.bigquery.BigqueryFactory;
|
import google.registry.bigquery.CheckedBigquery;
|
||||||
import google.registry.testing.AppEngineRule;
|
import google.registry.testing.AppEngineRule;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
|
@ -48,7 +45,7 @@ public class MetricsExportActionTest {
|
||||||
public final AppEngineRule appEngine =
|
public final AppEngineRule appEngine =
|
||||||
AppEngineRule.builder().withDatastore().withTaskQueue().build();
|
AppEngineRule.builder().withDatastore().withTaskQueue().build();
|
||||||
|
|
||||||
private final BigqueryFactory bigqueryFactory = mock(BigqueryFactory.class);
|
private final CheckedBigquery checkedBigquery = mock(CheckedBigquery.class);
|
||||||
private final Bigquery bigquery = mock(Bigquery.class);
|
private final Bigquery bigquery = mock(Bigquery.class);
|
||||||
private final Tabledata tabledata = mock(Tabledata.class);
|
private final Tabledata tabledata = mock(Tabledata.class);
|
||||||
private final InsertAll insertAll = mock(InsertAll.class);
|
private final InsertAll insertAll = mock(InsertAll.class);
|
||||||
|
@ -69,13 +66,8 @@ public class MetricsExportActionTest {
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setup() throws Exception {
|
public void setup() throws Exception {
|
||||||
when(bigqueryFactory.create(anyString(), anyString(), anyString())).thenReturn(bigquery);
|
when(checkedBigquery.ensureDataSetAndTableExist(anyString(), anyString(), anyString()))
|
||||||
when(bigqueryFactory.create(
|
.thenReturn(bigquery);
|
||||||
anyString(),
|
|
||||||
Matchers.any(HttpTransport.class),
|
|
||||||
Matchers.any(JsonFactory.class),
|
|
||||||
Matchers.any(HttpRequestInitializer.class)))
|
|
||||||
.thenReturn(bigquery);
|
|
||||||
|
|
||||||
when(bigquery.tabledata()).thenReturn(tabledata);
|
when(bigquery.tabledata()).thenReturn(tabledata);
|
||||||
when(tabledata.insertAll(
|
when(tabledata.insertAll(
|
||||||
|
@ -84,7 +76,7 @@ public class MetricsExportActionTest {
|
||||||
anyString(),
|
anyString(),
|
||||||
Matchers.any(TableDataInsertAllRequest.class))).thenReturn(insertAll);
|
Matchers.any(TableDataInsertAllRequest.class))).thenReturn(insertAll);
|
||||||
action = new MetricsExportAction();
|
action = new MetricsExportAction();
|
||||||
action.bigqueryFactory = bigqueryFactory;
|
action.checkedBigquery = checkedBigquery;
|
||||||
action.insertId = "insert id";
|
action.insertId = "insert id";
|
||||||
action.parameters = parameters;
|
action.parameters = parameters;
|
||||||
action.projectId = "project id";
|
action.projectId = "project id";
|
||||||
|
|
|
@ -41,16 +41,9 @@ public class CreateCdnsTldTest extends CommandTestCase<CreateCdnsTld> {
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
when(dnsService.managedZones()).thenReturn(managedZones);
|
when(dnsService.managedZones()).thenReturn(managedZones);
|
||||||
when(managedZones.create(projectId.capture(), requestBody.capture())).thenReturn(request);
|
when(managedZones.create(projectId.capture(), requestBody.capture())).thenReturn(request);
|
||||||
command = new CreateCdnsTldForTest();
|
command = new CreateCdnsTld();
|
||||||
command.projectId = "test-project";
|
command.projectId = "test-project";
|
||||||
}
|
command.dnsService = dnsService;
|
||||||
|
|
||||||
/** Fake the command class so we can override createDnsService() */
|
|
||||||
class CreateCdnsTldForTest extends CreateCdnsTld {
|
|
||||||
@Override
|
|
||||||
Dns createDnsService() {
|
|
||||||
return dnsService;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private ManagedZone createZone(
|
private ManagedZone createZone(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue