mirror of
https://github.com/google/nomulus.git
synced 2025-05-13 16:07:15 +02:00
Backup Datastore using the Admin REST API
Add server end points to backup Datastore using managed-export mechanism. A cron job is defined in Alpha to run daily exports using this implementation. Existing backup is left running. The new backups are saved to a new set of locations: - GCS bucket: gs://PROJECT-ID-datastore-backups - Big Query data set: datastore_backups - Big Query latest back up view name: latest_datastore_backup Also, the names of Bigquery tables now use the export timestamp assigned by Datastore. E.g., 2018_12_05T23_56_18_50532_ContactResource, After the new import mechanism is implemented and the back-restore flow is tested, we will stop the existing backup runs and deploy the new implementation to all environments. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=224932957
This commit is contained in:
parent
ea154a8378
commit
9c706e79fd
27 changed files with 1179 additions and 37 deletions
|
@ -40,7 +40,21 @@ import javax.inject.Singleton;
|
||||||
@Module
|
@Module
|
||||||
public abstract class CredentialModule {
|
public abstract class CredentialModule {
|
||||||
|
|
||||||
/** Provides the default {@link GoogleCredential} from the Google Cloud runtime. */
|
/**
|
||||||
|
* Provides the default {@link GoogleCredential} from the Google Cloud runtime.
|
||||||
|
*
|
||||||
|
* <p>The credential returned depends on the runtime environment:
|
||||||
|
*
|
||||||
|
* <ul>
|
||||||
|
* <li>On AppEngine, returns the service account credential for
|
||||||
|
* PROJECT_ID@appspot.gserviceaccount.com
|
||||||
|
* <li>On Compute Engine, returns the service account credential for
|
||||||
|
* PROJECT_NUMBER-compute@developer.gserviceaccount.com
|
||||||
|
* <li>On end user host, this returns the credential downloaded by gcloud. Please refer to <a
|
||||||
|
* href="https://cloud.google.com/sdk/gcloud/reference/auth/application-default/login">Cloud
|
||||||
|
* SDK documentation</a> for details.
|
||||||
|
* </ul>
|
||||||
|
*/
|
||||||
@DefaultCredential
|
@DefaultCredential
|
||||||
@Provides
|
@Provides
|
||||||
@Singleton
|
@Singleton
|
||||||
|
|
|
@ -1384,6 +1384,15 @@ public final class RegistryConfig {
|
||||||
return getProjectId() + "-snapshots";
|
return getProjectId() + "-snapshots";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the Google Cloud Storage bucket for storing Datastore backups.
|
||||||
|
*
|
||||||
|
* @see google.registry.export.BackupDatastoreAction
|
||||||
|
*/
|
||||||
|
public static String getDatastoreBackupsBucket() {
|
||||||
|
return "gs://" + getProjectId() + "-datastore-backups";
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Number of sharded commit log buckets.
|
* Number of sharded commit log buckets.
|
||||||
*
|
*
|
||||||
|
|
|
@ -142,6 +142,21 @@
|
||||||
<target>backend</target>
|
<target>backend</target>
|
||||||
</cron>
|
</cron>
|
||||||
|
|
||||||
|
<cron>
|
||||||
|
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/backupDatastore&runInEmpty]]></url>
|
||||||
|
<description>
|
||||||
|
This job fires off a Datastore managed-export job that generates snapshot files in GCS.
|
||||||
|
It also enqueues a new task to wait on the completion of that job and then load the resulting
|
||||||
|
snapshot into bigquery.
|
||||||
|
</description>
|
||||||
|
<!--
|
||||||
|
Keep google.registry.export.CheckBackupAction.MAXIMUM_BACKUP_RUNNING_TIME less than
|
||||||
|
this interval.
|
||||||
|
-->
|
||||||
|
<schedule>every day 07:00</schedule>
|
||||||
|
<target>backend</target>
|
||||||
|
</cron>
|
||||||
|
|
||||||
<cron>
|
<cron>
|
||||||
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/exportSnapshot&runInEmpty]]></url>
|
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/exportSnapshot&runInEmpty]]></url>
|
||||||
<description>
|
<description>
|
||||||
|
|
|
@ -174,6 +174,24 @@
|
||||||
<url-pattern>/_dr/dnsRefresh</url-pattern>
|
<url-pattern>/_dr/dnsRefresh</url-pattern>
|
||||||
</servlet-mapping>
|
</servlet-mapping>
|
||||||
|
|
||||||
|
<!-- Exports a Datastore backup snapshot to GCS. -->
|
||||||
|
<servlet-mapping>
|
||||||
|
<servlet-name>backend-servlet</servlet-name>
|
||||||
|
<url-pattern>/_dr/task/backupDatastore</url-pattern>
|
||||||
|
</servlet-mapping>
|
||||||
|
|
||||||
|
<!-- Checks the completion of a Datastore backup snapshot. -->
|
||||||
|
<servlet-mapping>
|
||||||
|
<servlet-name>backend-servlet</servlet-name>
|
||||||
|
<url-pattern>/_dr/task/checkDatastoreBackup</url-pattern>
|
||||||
|
</servlet-mapping>
|
||||||
|
|
||||||
|
<!-- Loads a Datastore backup snapshot into BigQuery. -->
|
||||||
|
<servlet-mapping>
|
||||||
|
<servlet-name>backend-servlet</servlet-name>
|
||||||
|
<url-pattern>/_dr/task/uploadDatastoreBackup</url-pattern>
|
||||||
|
</servlet-mapping>
|
||||||
|
|
||||||
<!-- Exports a Datastore backup snapshot to GCS. -->
|
<!-- Exports a Datastore backup snapshot to GCS. -->
|
||||||
<servlet-mapping>
|
<servlet-mapping>
|
||||||
<servlet-name>backend-servlet</servlet-name>
|
<servlet-name>backend-servlet</servlet-name>
|
||||||
|
|
|
@ -10,6 +10,7 @@ java_library(
|
||||||
deps = [
|
deps = [
|
||||||
"//java/google/registry/bigquery",
|
"//java/google/registry/bigquery",
|
||||||
"//java/google/registry/config",
|
"//java/google/registry/config",
|
||||||
|
"//java/google/registry/export/datastore",
|
||||||
"//java/google/registry/gcs",
|
"//java/google/registry/gcs",
|
||||||
"//java/google/registry/groups",
|
"//java/google/registry/groups",
|
||||||
"//java/google/registry/mapreduce",
|
"//java/google/registry/mapreduce",
|
||||||
|
|
86
java/google/registry/export/BackupDatastoreAction.java
Normal file
86
java/google/registry/export/BackupDatastoreAction.java
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package google.registry.export;
|
||||||
|
|
||||||
|
import static google.registry.export.CheckBackupAction.enqueuePollTask;
|
||||||
|
import static google.registry.request.Action.Method.POST;
|
||||||
|
|
||||||
|
import com.google.common.flogger.FluentLogger;
|
||||||
|
import google.registry.config.RegistryConfig;
|
||||||
|
import google.registry.export.datastore.DatastoreAdmin;
|
||||||
|
import google.registry.export.datastore.Operation;
|
||||||
|
import google.registry.request.Action;
|
||||||
|
import google.registry.request.HttpException.InternalServerErrorException;
|
||||||
|
import google.registry.request.Response;
|
||||||
|
import google.registry.request.auth.Auth;
|
||||||
|
import javax.inject.Inject;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Action to trigger a Datastore backup job that writes a snapshot to Google Cloud Storage. This
|
||||||
|
* class is introduced as an experimental feature, and will eventually replace {@link
|
||||||
|
* ExportSnapshotAction}.
|
||||||
|
*
|
||||||
|
* <p>This is the first step of a four step workflow for exporting snapshots, with each step calling
|
||||||
|
* the next upon successful completion:
|
||||||
|
*
|
||||||
|
* <ol>
|
||||||
|
* <li>The snapshot is exported to Google Cloud Storage (this action).
|
||||||
|
* <li>The {@link CheckBackupAction} polls until the export is completed.
|
||||||
|
* <li>The {@link UploadDatastoreBackupAction} uploads the data from GCS to BigQuery.
|
||||||
|
* <li>The {@link UpdateSnapshotViewAction} updates the view in latest_datastore_export.
|
||||||
|
* </ol>
|
||||||
|
*/
|
||||||
|
@Action(
|
||||||
|
path = BackupDatastoreAction.PATH,
|
||||||
|
method = POST,
|
||||||
|
automaticallyPrintOk = true,
|
||||||
|
auth = Auth.AUTH_INTERNAL_ONLY)
|
||||||
|
public class BackupDatastoreAction implements Runnable {
|
||||||
|
|
||||||
|
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
||||||
|
|
||||||
|
/** Queue to use for enqueuing the task that will actually launch the backup. */
|
||||||
|
static final String QUEUE = "export-snapshot"; // See queue.xml.
|
||||||
|
|
||||||
|
static final String PATH = "/_dr/task/backupDatastore"; // See web.xml.
|
||||||
|
|
||||||
|
@Inject DatastoreAdmin datastoreAdmin;
|
||||||
|
@Inject Response response;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
BackupDatastoreAction() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
Operation backup =
|
||||||
|
datastoreAdmin
|
||||||
|
.export(RegistryConfig.getDatastoreBackupsBucket(), ExportConstants.getBackupKinds())
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
String backupName = backup.getName();
|
||||||
|
// Enqueue a poll task to monitor the backup and load REPORTING-related kinds into bigquery.
|
||||||
|
enqueuePollTask(backupName, ExportConstants.getReportingKinds());
|
||||||
|
String message =
|
||||||
|
String.format(
|
||||||
|
"Datastore backup started with name: %s\nSaving to %s",
|
||||||
|
backupName, backup.getExportFolderUrl());
|
||||||
|
logger.atInfo().log(message);
|
||||||
|
response.setPayload(message);
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw new InternalServerErrorException("Exception occurred while backing up datastore.", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
189
java/google/registry/export/CheckBackupAction.java
Normal file
189
java/google/registry/export/CheckBackupAction.java
Normal file
|
@ -0,0 +1,189 @@
|
||||||
|
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package google.registry.export;
|
||||||
|
|
||||||
|
import static com.google.common.collect.Sets.intersection;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.enqueueUploadBackupTask;
|
||||||
|
import static google.registry.request.Action.Method.GET;
|
||||||
|
import static google.registry.request.Action.Method.POST;
|
||||||
|
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
|
||||||
|
|
||||||
|
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
|
||||||
|
import com.google.appengine.api.taskqueue.QueueFactory;
|
||||||
|
import com.google.appengine.api.taskqueue.TaskHandle;
|
||||||
|
import com.google.appengine.api.taskqueue.TaskOptions;
|
||||||
|
import com.google.appengine.api.taskqueue.TaskOptions.Method;
|
||||||
|
import com.google.common.base.Joiner;
|
||||||
|
import com.google.common.base.Splitter;
|
||||||
|
import com.google.common.collect.ImmutableSet;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
import com.google.common.flogger.FluentLogger;
|
||||||
|
import google.registry.export.datastore.DatastoreAdmin;
|
||||||
|
import google.registry.export.datastore.Operation;
|
||||||
|
import google.registry.request.Action;
|
||||||
|
import google.registry.request.HttpException;
|
||||||
|
import google.registry.request.HttpException.BadRequestException;
|
||||||
|
import google.registry.request.HttpException.InternalServerErrorException;
|
||||||
|
import google.registry.request.HttpException.NoContentException;
|
||||||
|
import google.registry.request.HttpException.NotModifiedException;
|
||||||
|
import google.registry.request.Parameter;
|
||||||
|
import google.registry.request.RequestMethod;
|
||||||
|
import google.registry.request.Response;
|
||||||
|
import google.registry.request.auth.Auth;
|
||||||
|
import google.registry.util.Clock;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Set;
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import org.joda.time.Duration;
|
||||||
|
import org.joda.time.PeriodType;
|
||||||
|
import org.joda.time.format.PeriodFormat;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Action that checks the status of a snapshot, and if complete, trigger loading it into BigQuery.
|
||||||
|
*/
|
||||||
|
@Action(
|
||||||
|
path = CheckBackupAction.PATH,
|
||||||
|
method = {POST, GET},
|
||||||
|
automaticallyPrintOk = true,
|
||||||
|
auth = Auth.AUTH_INTERNAL_ONLY)
|
||||||
|
public class CheckBackupAction implements Runnable {
|
||||||
|
|
||||||
|
/** Parameter names for passing parameters into this action. */
|
||||||
|
static final String CHECK_BACKUP_NAME_PARAM = "name";
|
||||||
|
|
||||||
|
static final String CHECK_BACKUP_KINDS_TO_LOAD_PARAM = "kindsToLoad";
|
||||||
|
|
||||||
|
/** Action-specific details needed for enqueuing tasks against itself. */
|
||||||
|
static final String QUEUE = "export-snapshot-poll"; // See queue.xml.
|
||||||
|
|
||||||
|
static final String PATH = "/_dr/task/checkDatastoreBackup"; // See web.xml.
|
||||||
|
static final Duration POLL_COUNTDOWN = Duration.standardMinutes(2);
|
||||||
|
|
||||||
|
/** The maximum amount of time we allow a backup to run before abandoning it. */
|
||||||
|
static final Duration MAXIMUM_BACKUP_RUNNING_TIME = Duration.standardHours(20);
|
||||||
|
|
||||||
|
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
||||||
|
|
||||||
|
@Inject DatastoreAdmin datastoreAdmin;
|
||||||
|
@Inject Clock clock;
|
||||||
|
@Inject Response response;
|
||||||
|
@Inject @RequestMethod Action.Method requestMethod;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
@Parameter(CHECK_BACKUP_NAME_PARAM)
|
||||||
|
String backupName;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
@Parameter(CHECK_BACKUP_KINDS_TO_LOAD_PARAM)
|
||||||
|
String kindsToLoadParam;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
CheckBackupAction() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
if (requestMethod == POST) {
|
||||||
|
checkAndLoadBackupIfComplete();
|
||||||
|
} else {
|
||||||
|
// This is a GET request.
|
||||||
|
// TODO(weiminyu): consider moving this functionality to Registry tool.
|
||||||
|
response.setPayload(getExportStatus().toPrettyString());
|
||||||
|
}
|
||||||
|
} catch (HttpException e) {
|
||||||
|
// Rethrow and let caller propagate status code and error message to the response.
|
||||||
|
// See google.registry.request.RequestHandler#handleRequest.
|
||||||
|
throw e;
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw new InternalServerErrorException(
|
||||||
|
String.format("Exception occurred while checking datastore exports."), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Operation getExportStatus() throws IOException {
|
||||||
|
try {
|
||||||
|
return datastoreAdmin.get(backupName).execute();
|
||||||
|
} catch (GoogleJsonResponseException e) {
|
||||||
|
if (e.getStatusCode() == SC_NOT_FOUND) {
|
||||||
|
String message = String.format("Bad backup name %s: %s", backupName, e.getMessage());
|
||||||
|
// TODO(b/19081569): Ideally this would return a 2XX error so the task would not be
|
||||||
|
// retried but we might abandon backups that start late and haven't yet written to
|
||||||
|
// Datastore. We could fix that by replacing this with a two-phase polling strategy.
|
||||||
|
throw new BadRequestException(message, e);
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkAndLoadBackupIfComplete() throws IOException {
|
||||||
|
Set<String> kindsToLoad = ImmutableSet.copyOf(Splitter.on(',').split(kindsToLoadParam));
|
||||||
|
Operation backup = getExportStatus();
|
||||||
|
|
||||||
|
if (backup.isProcessing()
|
||||||
|
&& backup.getRunningTime(clock).isShorterThan(MAXIMUM_BACKUP_RUNNING_TIME)) {
|
||||||
|
// Backup might still be running, so send a 304 to have the task retry.
|
||||||
|
throw new NotModifiedException(
|
||||||
|
String.format(
|
||||||
|
"Datastore backup %s still in progress: %s", backupName, backup.getProgress()));
|
||||||
|
}
|
||||||
|
if (!backup.isSuccessful()) {
|
||||||
|
// Declare the backup a lost cause, and send 204 No Content so the task will
|
||||||
|
// not be retried.
|
||||||
|
String message =
|
||||||
|
String.format(
|
||||||
|
"Datastore backup %s abandoned - not complete after %s. Progress: %s",
|
||||||
|
backupName,
|
||||||
|
PeriodFormat.getDefault()
|
||||||
|
.print(
|
||||||
|
backup
|
||||||
|
.getRunningTime(clock)
|
||||||
|
.toPeriod()
|
||||||
|
.normalizedStandard(PeriodType.dayTime().withMillisRemoved())),
|
||||||
|
backup.getProgress());
|
||||||
|
throw new NoContentException(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
String backupId = backup.getExportId();
|
||||||
|
// Log a warning if kindsToLoad is not a subset of the exported kinds.
|
||||||
|
if (!backup.getKinds().containsAll(kindsToLoad)) {
|
||||||
|
logger.atWarning().log(
|
||||||
|
"Kinds to load included non-exported kinds: %s",
|
||||||
|
Sets.difference(kindsToLoad, backup.getKinds()));
|
||||||
|
}
|
||||||
|
// Load kinds from the backup, limited to those also in kindsToLoad (if it's present).
|
||||||
|
ImmutableSet<String> exportedKindsToLoad =
|
||||||
|
ImmutableSet.copyOf(intersection(backup.getKinds(), kindsToLoad));
|
||||||
|
String message = String.format("Datastore backup %s complete - ", backupName);
|
||||||
|
if (exportedKindsToLoad.isEmpty()) {
|
||||||
|
message += "no kinds to load into BigQuery";
|
||||||
|
} else {
|
||||||
|
enqueueUploadBackupTask(backupId, backup.getExportFolderUrl(), exportedKindsToLoad);
|
||||||
|
message += "BigQuery load task enqueued";
|
||||||
|
}
|
||||||
|
logger.atInfo().log(message);
|
||||||
|
response.setPayload(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Enqueue a poll task to monitor the named backup for completion. */
|
||||||
|
static TaskHandle enqueuePollTask(String backupId, ImmutableSet<String> kindsToLoad) {
|
||||||
|
return QueueFactory.getQueue(QUEUE)
|
||||||
|
.add(
|
||||||
|
TaskOptions.Builder.withUrl(PATH)
|
||||||
|
.method(Method.POST)
|
||||||
|
.countdownMillis(POLL_COUNTDOWN.getMillis())
|
||||||
|
.param(CHECK_BACKUP_NAME_PARAM, backupId)
|
||||||
|
.param(CHECK_BACKUP_KINDS_TO_LOAD_PARAM, Joiner.on(',').join(kindsToLoad)));
|
||||||
|
}
|
||||||
|
}
|
|
@ -25,6 +25,8 @@ import static google.registry.export.LoadSnapshotAction.LOAD_SNAPSHOT_KINDS_PARA
|
||||||
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_DATASET_ID_PARAM;
|
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_DATASET_ID_PARAM;
|
||||||
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_KIND_PARAM;
|
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_KIND_PARAM;
|
||||||
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_TABLE_ID_PARAM;
|
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_TABLE_ID_PARAM;
|
||||||
|
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_VIEWNAME_PARAM;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.UPLOAD_BACKUP_FOLDER_PARAM;
|
||||||
import static google.registry.request.RequestParameters.extractRequiredHeader;
|
import static google.registry.request.RequestParameters.extractRequiredHeader;
|
||||||
import static google.registry.request.RequestParameters.extractRequiredParameter;
|
import static google.registry.request.RequestParameters.extractRequiredParameter;
|
||||||
|
|
||||||
|
@ -56,12 +58,24 @@ public final class ExportRequestModule {
|
||||||
return extractRequiredParameter(req, UPDATE_SNAPSHOT_KIND_PARAM);
|
return extractRequiredParameter(req, UPDATE_SNAPSHOT_KIND_PARAM);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Provides
|
||||||
|
@Parameter(UPDATE_SNAPSHOT_VIEWNAME_PARAM)
|
||||||
|
static String provideUpdateSnapshotViewName(HttpServletRequest req) {
|
||||||
|
return extractRequiredParameter(req, UPDATE_SNAPSHOT_VIEWNAME_PARAM);
|
||||||
|
}
|
||||||
|
|
||||||
@Provides
|
@Provides
|
||||||
@Parameter(LOAD_SNAPSHOT_FILE_PARAM)
|
@Parameter(LOAD_SNAPSHOT_FILE_PARAM)
|
||||||
static String provideLoadSnapshotFile(HttpServletRequest req) {
|
static String provideLoadSnapshotFile(HttpServletRequest req) {
|
||||||
return extractRequiredParameter(req, LOAD_SNAPSHOT_FILE_PARAM);
|
return extractRequiredParameter(req, LOAD_SNAPSHOT_FILE_PARAM);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Provides
|
||||||
|
@Parameter(UPLOAD_BACKUP_FOLDER_PARAM)
|
||||||
|
static String provideSnapshotUrlPrefix(HttpServletRequest req) {
|
||||||
|
return extractRequiredParameter(req, UPLOAD_BACKUP_FOLDER_PARAM);
|
||||||
|
}
|
||||||
|
|
||||||
@Provides
|
@Provides
|
||||||
@Parameter(LOAD_SNAPSHOT_ID_PARAM)
|
@Parameter(LOAD_SNAPSHOT_ID_PARAM)
|
||||||
static String provideLoadSnapshotId(HttpServletRequest req) {
|
static String provideLoadSnapshotId(HttpServletRequest req) {
|
||||||
|
|
|
@ -64,6 +64,8 @@ public class LoadSnapshotAction implements Runnable {
|
||||||
|
|
||||||
static final String SNAPSHOTS_DATASET = "snapshots";
|
static final String SNAPSHOTS_DATASET = "snapshots";
|
||||||
|
|
||||||
|
static final String LATEST_SNAPSHOT_VIEW_NAME = "latest_datastore_export";
|
||||||
|
|
||||||
/** Servlet-specific details needed for enqueuing tasks against itself. */
|
/** Servlet-specific details needed for enqueuing tasks against itself. */
|
||||||
static final String QUEUE = "export-snapshot"; // See queue.xml.
|
static final String QUEUE = "export-snapshot"; // See queue.xml.
|
||||||
static final String PATH = "/_dr/task/loadSnapshot"; // See web.xml.
|
static final String PATH = "/_dr/task/loadSnapshot"; // See web.xml.
|
||||||
|
@ -131,7 +133,7 @@ public class LoadSnapshotAction implements Runnable {
|
||||||
// well-known view in BigQuery to point at the newly loaded snapshot table for this kind.
|
// well-known view in BigQuery to point at the newly loaded snapshot table for this kind.
|
||||||
bigqueryPollEnqueuer.enqueuePollTask(
|
bigqueryPollEnqueuer.enqueuePollTask(
|
||||||
jobRef,
|
jobRef,
|
||||||
createViewUpdateTask(SNAPSHOTS_DATASET, tableId, kindName),
|
createViewUpdateTask(SNAPSHOTS_DATASET, tableId, kindName, LATEST_SNAPSHOT_VIEW_NAME),
|
||||||
getQueue(UpdateSnapshotViewAction.QUEUE));
|
getQueue(UpdateSnapshotViewAction.QUEUE));
|
||||||
|
|
||||||
builder.append(String.format(" - %s:%s\n", projectId, jobId));
|
builder.append(String.format(" - %s:%s\n", projectId, jobId));
|
||||||
|
|
|
@ -43,10 +43,12 @@ public class UpdateSnapshotViewAction implements Runnable {
|
||||||
|
|
||||||
static final String UPDATE_SNAPSHOT_TABLE_ID_PARAM = "table";
|
static final String UPDATE_SNAPSHOT_TABLE_ID_PARAM = "table";
|
||||||
static final String UPDATE_SNAPSHOT_KIND_PARAM = "kind";
|
static final String UPDATE_SNAPSHOT_KIND_PARAM = "kind";
|
||||||
|
static final String UPDATE_SNAPSHOT_VIEWNAME_PARAM = "viewname";
|
||||||
private static final String TARGET_DATASET_NAME = "latest_datastore_export";
|
|
||||||
|
|
||||||
/** Servlet-specific details needed for enqueuing tasks against itself. */
|
/** Servlet-specific details needed for enqueuing tasks against itself. */
|
||||||
|
// For now this queue is shared by the backup workflows started by both ExportSnapshotAction
|
||||||
|
// and BackupDatastoreAction.
|
||||||
|
// TODO(weiminyu): update queue name (snapshot->backup) after ExportSnapshot flow is removed.
|
||||||
static final String QUEUE = "export-snapshot-update-view"; // See queue.xml.
|
static final String QUEUE = "export-snapshot-update-view"; // See queue.xml.
|
||||||
|
|
||||||
static final String PATH = "/_dr/task/updateSnapshotView"; // See web.xml.
|
static final String PATH = "/_dr/task/updateSnapshotView"; // See web.xml.
|
||||||
|
@ -65,6 +67,10 @@ public class UpdateSnapshotViewAction implements Runnable {
|
||||||
@Parameter(UPDATE_SNAPSHOT_KIND_PARAM)
|
@Parameter(UPDATE_SNAPSHOT_KIND_PARAM)
|
||||||
String kindName;
|
String kindName;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
@Parameter(UPDATE_SNAPSHOT_VIEWNAME_PARAM)
|
||||||
|
String viewName;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
@Config("projectId")
|
@Config("projectId")
|
||||||
String projectId;
|
String projectId;
|
||||||
|
@ -75,12 +81,14 @@ public class UpdateSnapshotViewAction implements Runnable {
|
||||||
UpdateSnapshotViewAction() {}
|
UpdateSnapshotViewAction() {}
|
||||||
|
|
||||||
/** Create a task for updating a snapshot view. */
|
/** Create a task for updating a snapshot view. */
|
||||||
static TaskOptions createViewUpdateTask(String datasetId, String tableId, String kindName) {
|
static TaskOptions createViewUpdateTask(
|
||||||
|
String datasetId, String tableId, String kindName, String viewName) {
|
||||||
return TaskOptions.Builder.withUrl(PATH)
|
return TaskOptions.Builder.withUrl(PATH)
|
||||||
.method(Method.POST)
|
.method(Method.POST)
|
||||||
.param(UPDATE_SNAPSHOT_DATASET_ID_PARAM, datasetId)
|
.param(UPDATE_SNAPSHOT_DATASET_ID_PARAM, datasetId)
|
||||||
.param(UPDATE_SNAPSHOT_TABLE_ID_PARAM, tableId)
|
.param(UPDATE_SNAPSHOT_TABLE_ID_PARAM, tableId)
|
||||||
.param(UPDATE_SNAPSHOT_KIND_PARAM, kindName);
|
.param(UPDATE_SNAPSHOT_KIND_PARAM, kindName)
|
||||||
|
.param(UPDATE_SNAPSHOT_VIEWNAME_PARAM, viewName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -89,12 +97,10 @@ public class UpdateSnapshotViewAction implements Runnable {
|
||||||
SqlTemplate sqlTemplate =
|
SqlTemplate sqlTemplate =
|
||||||
SqlTemplate.create(
|
SqlTemplate.create(
|
||||||
"#standardSQL\nSELECT * FROM `%PROJECT%.%SOURCE_DATASET%.%SOURCE_TABLE%`");
|
"#standardSQL\nSELECT * FROM `%PROJECT%.%SOURCE_DATASET%.%SOURCE_TABLE%`");
|
||||||
updateSnapshotView(datasetId, tableId, kindName, TARGET_DATASET_NAME, sqlTemplate);
|
updateSnapshotView(datasetId, tableId, kindName, viewName, sqlTemplate);
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
throw new InternalServerErrorException(
|
throw new InternalServerErrorException(
|
||||||
String.format(
|
String.format("Could not update snapshot view %s for table %s", viewName, tableId), e);
|
||||||
"Could not update snapshot view %s for table %s", TARGET_DATASET_NAME, tableId),
|
|
||||||
e);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
181
java/google/registry/export/UploadDatastoreBackupAction.java
Normal file
181
java/google/registry/export/UploadDatastoreBackupAction.java
Normal file
|
@ -0,0 +1,181 @@
|
||||||
|
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package google.registry.export;
|
||||||
|
|
||||||
|
import static com.google.appengine.api.taskqueue.QueueFactory.getQueue;
|
||||||
|
import static com.google.common.base.MoreObjects.firstNonNull;
|
||||||
|
import static google.registry.export.UpdateSnapshotViewAction.createViewUpdateTask;
|
||||||
|
import static google.registry.request.Action.Method.POST;
|
||||||
|
|
||||||
|
import com.google.api.services.bigquery.Bigquery;
|
||||||
|
import com.google.api.services.bigquery.model.Job;
|
||||||
|
import com.google.api.services.bigquery.model.JobConfiguration;
|
||||||
|
import com.google.api.services.bigquery.model.JobConfigurationLoad;
|
||||||
|
import com.google.api.services.bigquery.model.JobReference;
|
||||||
|
import com.google.api.services.bigquery.model.TableReference;
|
||||||
|
import com.google.appengine.api.taskqueue.TaskHandle;
|
||||||
|
import com.google.appengine.api.taskqueue.TaskOptions;
|
||||||
|
import com.google.appengine.api.taskqueue.TaskOptions.Method;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
import com.google.common.base.Joiner;
|
||||||
|
import com.google.common.base.Splitter;
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import com.google.common.collect.ImmutableSet;
|
||||||
|
import com.google.common.flogger.FluentLogger;
|
||||||
|
import google.registry.bigquery.BigqueryUtils.SourceFormat;
|
||||||
|
import google.registry.bigquery.BigqueryUtils.WriteDisposition;
|
||||||
|
import google.registry.bigquery.CheckedBigquery;
|
||||||
|
import google.registry.config.RegistryConfig.Config;
|
||||||
|
import google.registry.export.BigqueryPollJobAction.BigqueryPollJobEnqueuer;
|
||||||
|
import google.registry.request.Action;
|
||||||
|
import google.registry.request.HttpException.BadRequestException;
|
||||||
|
import google.registry.request.HttpException.InternalServerErrorException;
|
||||||
|
import google.registry.request.Parameter;
|
||||||
|
import google.registry.request.auth.Auth;
|
||||||
|
import java.io.IOException;
|
||||||
|
import javax.inject.Inject;
|
||||||
|
|
||||||
|
/** Action to load a Datastore backup from Google Cloud Storage into BigQuery. */
|
||||||
|
@Action(path = UploadDatastoreBackupAction.PATH, method = POST, auth = Auth.AUTH_INTERNAL_ONLY)
|
||||||
|
public class UploadDatastoreBackupAction implements Runnable {
|
||||||
|
|
||||||
|
/** Parameter names for passing parameters into the servlet. */
|
||||||
|
static final String UPLOAD_BACKUP_ID_PARAM = "id";
|
||||||
|
|
||||||
|
static final String UPLOAD_BACKUP_FOLDER_PARAM = "folder";
|
||||||
|
static final String UPLOAD_BACKUP_KINDS_PARAM = "kinds";
|
||||||
|
|
||||||
|
static final String BACKUP_DATASET = "datastore_backups";
|
||||||
|
|
||||||
|
/** Servlet-specific details needed for enqueuing tasks against itself. */
|
||||||
|
static final String QUEUE = "export-snapshot"; // See queue.xml.
|
||||||
|
|
||||||
|
static final String LATEST_BACKUP_VIEW_NAME = "latest_datastore_backup";
|
||||||
|
|
||||||
|
static final String PATH = "/_dr/task/uploadDatastoreBackup"; // See web.xml.
|
||||||
|
|
||||||
|
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
||||||
|
|
||||||
|
@Inject CheckedBigquery checkedBigquery;
|
||||||
|
@Inject BigqueryPollJobEnqueuer bigqueryPollEnqueuer;
|
||||||
|
@Inject @Config("projectId") String projectId;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
@Parameter(UPLOAD_BACKUP_FOLDER_PARAM)
|
||||||
|
String backupFolderUrl;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
@Parameter(UPLOAD_BACKUP_ID_PARAM)
|
||||||
|
String backupId;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
@Parameter(UPLOAD_BACKUP_KINDS_PARAM)
|
||||||
|
String backupKinds;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
UploadDatastoreBackupAction() {}
|
||||||
|
|
||||||
|
/** Enqueue a task for starting a backup load. */
|
||||||
|
public static TaskHandle enqueueUploadBackupTask(
|
||||||
|
String backupId, String gcsFile, ImmutableSet<String> kinds) {
|
||||||
|
return getQueue(QUEUE)
|
||||||
|
.add(
|
||||||
|
TaskOptions.Builder.withUrl(PATH)
|
||||||
|
.method(Method.POST)
|
||||||
|
.param(UPLOAD_BACKUP_ID_PARAM, backupId)
|
||||||
|
.param(UPLOAD_BACKUP_FOLDER_PARAM, gcsFile)
|
||||||
|
.param(UPLOAD_BACKUP_KINDS_PARAM, Joiner.on(',').join(kinds)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
String message = uploadBackup(backupId, backupFolderUrl, Splitter.on(',').split(backupKinds));
|
||||||
|
logger.atInfo().log("Loaded backup successfully: %s", message);
|
||||||
|
} catch (Throwable e) {
|
||||||
|
logger.atSevere().withCause(e).log("Error loading backup");
|
||||||
|
if (e instanceof IllegalArgumentException) {
|
||||||
|
throw new BadRequestException("Error calling load backup: " + e.getMessage(), e);
|
||||||
|
} else {
|
||||||
|
throw new InternalServerErrorException(
|
||||||
|
"Error loading backup: " + firstNonNull(e.getMessage(), e.toString()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String uploadBackup(String backupId, String backupFolderUrl, Iterable<String> kinds)
|
||||||
|
throws IOException {
|
||||||
|
Bigquery bigquery = checkedBigquery.ensureDataSetExists(projectId, BACKUP_DATASET);
|
||||||
|
String loadMessage =
|
||||||
|
String.format("Loading Datastore backup %s from %s...", backupId, backupFolderUrl);
|
||||||
|
logger.atInfo().log(loadMessage);
|
||||||
|
|
||||||
|
String sanitizedBackupId = sanitizeForBigquery(backupId);
|
||||||
|
StringBuilder builder = new StringBuilder(loadMessage + "\n");
|
||||||
|
builder.append("Load jobs:\n");
|
||||||
|
|
||||||
|
for (String kindName : kinds) {
|
||||||
|
String jobId = String.format("load-backup-%s-%s", sanitizedBackupId, kindName);
|
||||||
|
JobReference jobRef = new JobReference().setProjectId(projectId).setJobId(jobId);
|
||||||
|
String sourceUri = getBackupInfoFileForKind(backupFolderUrl, kindName);
|
||||||
|
String tableId = String.format("%s_%s", sanitizedBackupId, kindName);
|
||||||
|
|
||||||
|
// Launch the load job.
|
||||||
|
Job job = makeLoadJob(jobRef, sourceUri, tableId);
|
||||||
|
bigquery.jobs().insert(projectId, job).execute();
|
||||||
|
|
||||||
|
// Enqueue a task to check on the load job's completion, and if it succeeds, to update a
|
||||||
|
// well-known view in BigQuery to point at the newly loaded backup table for this kind.
|
||||||
|
bigqueryPollEnqueuer.enqueuePollTask(
|
||||||
|
jobRef,
|
||||||
|
createViewUpdateTask(BACKUP_DATASET, tableId, kindName, LATEST_BACKUP_VIEW_NAME),
|
||||||
|
getQueue(UpdateSnapshotViewAction.QUEUE));
|
||||||
|
|
||||||
|
builder.append(String.format(" - %s:%s\n", projectId, jobId));
|
||||||
|
logger.atInfo().log("Submitted load job %s:%s", projectId, jobId);
|
||||||
|
}
|
||||||
|
return builder.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
static String sanitizeForBigquery(String backupId) {
|
||||||
|
return backupId.replaceAll("[^a-zA-Z0-9_]", "_");
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
static String getBackupInfoFileForKind(String backupFolderUrl, String kindName) {
|
||||||
|
return Joiner.on('/')
|
||||||
|
.join(
|
||||||
|
backupFolderUrl,
|
||||||
|
"all_namespaces",
|
||||||
|
String.format("kind_%s", kindName),
|
||||||
|
String.format("all_namespaces_kind_%s.%s", kindName, "export_metadata"));
|
||||||
|
}
|
||||||
|
|
||||||
|
private Job makeLoadJob(JobReference jobRef, String sourceUri, String tableId) {
|
||||||
|
TableReference tableReference =
|
||||||
|
new TableReference()
|
||||||
|
.setProjectId(jobRef.getProjectId())
|
||||||
|
.setDatasetId(BACKUP_DATASET)
|
||||||
|
.setTableId(tableId);
|
||||||
|
return new Job()
|
||||||
|
.setJobReference(jobRef)
|
||||||
|
.setConfiguration(new JobConfiguration()
|
||||||
|
.setLoad(new JobConfigurationLoad()
|
||||||
|
.setWriteDisposition(WriteDisposition.WRITE_EMPTY.toString())
|
||||||
|
.setSourceFormat(SourceFormat.DATASTORE_BACKUP.toString())
|
||||||
|
.setSourceUris(ImmutableList.of(sourceUri))
|
||||||
|
.setDestinationTable(tableReference)));
|
||||||
|
}
|
||||||
|
}
|
|
@ -25,7 +25,7 @@ import com.google.api.client.json.GenericJson;
|
||||||
import com.google.api.client.json.JsonFactory;
|
import com.google.api.client.json.JsonFactory;
|
||||||
import com.google.api.client.util.Key;
|
import com.google.api.client.util.Key;
|
||||||
import com.google.common.base.Strings;
|
import com.google.common.base.Strings;
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -75,7 +75,7 @@ public class DatastoreAdmin extends AbstractGoogleJsonClient {
|
||||||
* @param outputUrlPrefix the full resource URL of the external storage location
|
* @param outputUrlPrefix the full resource URL of the external storage location
|
||||||
* @param kinds the datastore 'kinds' to be exported
|
* @param kinds the datastore 'kinds' to be exported
|
||||||
*/
|
*/
|
||||||
public Export export(String outputUrlPrefix, List<String> kinds) {
|
public Export export(String outputUrlPrefix, Collection<String> kinds) {
|
||||||
return new Export(new ExportRequest(outputUrlPrefix, kinds));
|
return new Export(new ExportRequest(outputUrlPrefix, kinds));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -214,7 +214,7 @@ public class DatastoreAdmin extends AbstractGoogleJsonClient {
|
||||||
@Key private final String outputUrlPrefix;
|
@Key private final String outputUrlPrefix;
|
||||||
@Key private final EntityFilter entityFilter;
|
@Key private final EntityFilter entityFilter;
|
||||||
|
|
||||||
ExportRequest(String outputUrlPrefix, List<String> kinds) {
|
ExportRequest(String outputUrlPrefix, Collection<String> kinds) {
|
||||||
checkNotNull(outputUrlPrefix, "outputUrlPrefix");
|
checkNotNull(outputUrlPrefix, "outputUrlPrefix");
|
||||||
this.outputUrlPrefix = outputUrlPrefix;
|
this.outputUrlPrefix = outputUrlPrefix;
|
||||||
this.entityFilter = new EntityFilter(kinds);
|
this.entityFilter = new EntityFilter(kinds);
|
||||||
|
|
|
@ -20,6 +20,7 @@ import static com.google.common.base.Preconditions.checkNotNull;
|
||||||
import com.google.api.client.json.GenericJson;
|
import com.google.api.client.json.GenericJson;
|
||||||
import com.google.api.client.util.Key;
|
import com.google.api.client.util.Key;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -36,7 +37,7 @@ public class EntityFilter extends GenericJson {
|
||||||
/** For JSON deserialization. */
|
/** For JSON deserialization. */
|
||||||
public EntityFilter() {}
|
public EntityFilter() {}
|
||||||
|
|
||||||
EntityFilter(List<String> kinds) {
|
EntityFilter(Collection<String> kinds) {
|
||||||
checkNotNull(kinds, "kinds");
|
checkNotNull(kinds, "kinds");
|
||||||
checkArgument(!kinds.isEmpty(), "kinds must not be empty");
|
checkArgument(!kinds.isEmpty(), "kinds must not be empty");
|
||||||
this.kinds = ImmutableList.copyOf(kinds);
|
this.kinds = ImmutableList.copyOf(kinds);
|
||||||
|
|
|
@ -68,6 +68,10 @@ public class Operation extends GenericJson {
|
||||||
return getState().equals(STATE_PROCESSING);
|
return getState().equals(STATE_PROCESSING);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the elapsed time since starting if this operation is still running, or the total
|
||||||
|
* running time if this operation has completed.
|
||||||
|
*/
|
||||||
public Duration getRunningTime(Clock clock) {
|
public Duration getRunningTime(Clock clock) {
|
||||||
return new Duration(
|
return new Duration(
|
||||||
getStartTime(), getMetadata().getCommonMetadata().getEndTime().orElse(clock.nowUtc()));
|
getStartTime(), getMetadata().getCommonMetadata().getEndTime().orElse(clock.nowUtc()));
|
||||||
|
|
|
@ -18,6 +18,7 @@ java_library(
|
||||||
"//java/google/registry/dns/writer/clouddns",
|
"//java/google/registry/dns/writer/clouddns",
|
||||||
"//java/google/registry/dns/writer/dnsupdate",
|
"//java/google/registry/dns/writer/dnsupdate",
|
||||||
"//java/google/registry/export",
|
"//java/google/registry/export",
|
||||||
|
"//java/google/registry/export/datastore",
|
||||||
"//java/google/registry/export/sheet",
|
"//java/google/registry/export/sheet",
|
||||||
"//java/google/registry/flows",
|
"//java/google/registry/flows",
|
||||||
"//java/google/registry/gcs",
|
"//java/google/registry/gcs",
|
||||||
|
|
|
@ -22,6 +22,7 @@ import google.registry.config.CredentialModule;
|
||||||
import google.registry.config.RegistryConfig.ConfigModule;
|
import google.registry.config.RegistryConfig.ConfigModule;
|
||||||
import google.registry.dns.writer.VoidDnsWriterModule;
|
import google.registry.dns.writer.VoidDnsWriterModule;
|
||||||
import google.registry.export.DriveModule;
|
import google.registry.export.DriveModule;
|
||||||
|
import google.registry.export.datastore.DatastoreAdminModule;
|
||||||
import google.registry.export.sheet.SheetsServiceModule;
|
import google.registry.export.sheet.SheetsServiceModule;
|
||||||
import google.registry.gcs.GcsServiceModule;
|
import google.registry.gcs.GcsServiceModule;
|
||||||
import google.registry.groups.DirectoryModule;
|
import google.registry.groups.DirectoryModule;
|
||||||
|
@ -56,6 +57,7 @@ import javax.inject.Singleton;
|
||||||
BigqueryModule.class,
|
BigqueryModule.class,
|
||||||
ConfigModule.class,
|
ConfigModule.class,
|
||||||
CredentialModule.class,
|
CredentialModule.class,
|
||||||
|
DatastoreAdminModule.class,
|
||||||
DatastoreServiceModule.class,
|
DatastoreServiceModule.class,
|
||||||
DirectoryModule.class,
|
DirectoryModule.class,
|
||||||
DummyKeyringModule.class,
|
DummyKeyringModule.class,
|
||||||
|
|
|
@ -39,7 +39,9 @@ import google.registry.dns.writer.VoidDnsWriterModule;
|
||||||
import google.registry.dns.writer.clouddns.CloudDnsWriterModule;
|
import google.registry.dns.writer.clouddns.CloudDnsWriterModule;
|
||||||
import google.registry.dns.writer.dnsupdate.DnsUpdateConfigModule;
|
import google.registry.dns.writer.dnsupdate.DnsUpdateConfigModule;
|
||||||
import google.registry.dns.writer.dnsupdate.DnsUpdateWriterModule;
|
import google.registry.dns.writer.dnsupdate.DnsUpdateWriterModule;
|
||||||
|
import google.registry.export.BackupDatastoreAction;
|
||||||
import google.registry.export.BigqueryPollJobAction;
|
import google.registry.export.BigqueryPollJobAction;
|
||||||
|
import google.registry.export.CheckBackupAction;
|
||||||
import google.registry.export.CheckSnapshotAction;
|
import google.registry.export.CheckSnapshotAction;
|
||||||
import google.registry.export.ExportDomainListsAction;
|
import google.registry.export.ExportDomainListsAction;
|
||||||
import google.registry.export.ExportPremiumTermsAction;
|
import google.registry.export.ExportPremiumTermsAction;
|
||||||
|
@ -49,6 +51,7 @@ import google.registry.export.ExportSnapshotAction;
|
||||||
import google.registry.export.LoadSnapshotAction;
|
import google.registry.export.LoadSnapshotAction;
|
||||||
import google.registry.export.SyncGroupMembersAction;
|
import google.registry.export.SyncGroupMembersAction;
|
||||||
import google.registry.export.UpdateSnapshotViewAction;
|
import google.registry.export.UpdateSnapshotViewAction;
|
||||||
|
import google.registry.export.UploadDatastoreBackupAction;
|
||||||
import google.registry.export.sheet.SheetModule;
|
import google.registry.export.sheet.SheetModule;
|
||||||
import google.registry.export.sheet.SyncRegistrarsSheetAction;
|
import google.registry.export.sheet.SyncRegistrarsSheetAction;
|
||||||
import google.registry.flows.async.AsyncFlowsModule;
|
import google.registry.flows.async.AsyncFlowsModule;
|
||||||
|
@ -114,8 +117,10 @@ import google.registry.tmch.TmchSmdrlAction;
|
||||||
WhiteboxModule.class,
|
WhiteboxModule.class,
|
||||||
})
|
})
|
||||||
interface BackendRequestComponent {
|
interface BackendRequestComponent {
|
||||||
|
BackupDatastoreAction backupDatastoreAction();
|
||||||
BigqueryPollJobAction bigqueryPollJobAction();
|
BigqueryPollJobAction bigqueryPollJobAction();
|
||||||
BrdaCopyAction brdaCopyAction();
|
BrdaCopyAction brdaCopyAction();
|
||||||
|
CheckBackupAction checkBackupAction();
|
||||||
CheckSnapshotAction checkSnapshotAction();
|
CheckSnapshotAction checkSnapshotAction();
|
||||||
CommitLogCheckpointAction commitLogCheckpointAction();
|
CommitLogCheckpointAction commitLogCheckpointAction();
|
||||||
CommitLogFanoutAction commitLogFanoutAction();
|
CommitLogFanoutAction commitLogFanoutAction();
|
||||||
|
@ -158,6 +163,7 @@ interface BackendRequestComponent {
|
||||||
TmchCrlAction tmchCrlAction();
|
TmchCrlAction tmchCrlAction();
|
||||||
TmchDnlAction tmchDnlAction();
|
TmchDnlAction tmchDnlAction();
|
||||||
TmchSmdrlAction tmchSmdrlAction();
|
TmchSmdrlAction tmchSmdrlAction();
|
||||||
|
UploadDatastoreBackupAction uploadDatastoreBackupAction();
|
||||||
UpdateSnapshotViewAction updateSnapshotViewAction();
|
UpdateSnapshotViewAction updateSnapshotViewAction();
|
||||||
PublishInvoicesAction uploadInvoicesAction();
|
PublishInvoicesAction uploadInvoicesAction();
|
||||||
|
|
||||||
|
|
|
@ -10,13 +10,16 @@ load("//java/com/google/testing/builddefs:GenTestRules.bzl", "GenTestRules")
|
||||||
java_library(
|
java_library(
|
||||||
name = "export",
|
name = "export",
|
||||||
srcs = glob(["*.java"]),
|
srcs = glob(["*.java"]),
|
||||||
resources = [
|
resources = glob([
|
||||||
|
"testdata/*.json",
|
||||||
"backup_kinds.txt",
|
"backup_kinds.txt",
|
||||||
"reporting_kinds.txt",
|
"reporting_kinds.txt",
|
||||||
],
|
]),
|
||||||
deps = [
|
deps = [
|
||||||
"//java/google/registry/bigquery",
|
"//java/google/registry/bigquery",
|
||||||
|
"//java/google/registry/config",
|
||||||
"//java/google/registry/export",
|
"//java/google/registry/export",
|
||||||
|
"//java/google/registry/export/datastore",
|
||||||
"//java/google/registry/groups",
|
"//java/google/registry/groups",
|
||||||
"//java/google/registry/model",
|
"//java/google/registry/model",
|
||||||
"//java/google/registry/request",
|
"//java/google/registry/request",
|
||||||
|
@ -25,6 +28,7 @@ java_library(
|
||||||
"//javatests/google/registry/testing",
|
"//javatests/google/registry/testing",
|
||||||
"//javatests/google/registry/testing/mapreduce",
|
"//javatests/google/registry/testing/mapreduce",
|
||||||
"//third_party/objectify:objectify-v4_1",
|
"//third_party/objectify:objectify-v4_1",
|
||||||
|
"@com_google_api_client",
|
||||||
"@com_google_apis_google_api_services_bigquery",
|
"@com_google_apis_google_api_services_bigquery",
|
||||||
"@com_google_apis_google_api_services_drive",
|
"@com_google_apis_google_api_services_drive",
|
||||||
"@com_google_appengine_api_1_0_sdk",
|
"@com_google_appengine_api_1_0_sdk",
|
||||||
|
@ -35,6 +39,7 @@ java_library(
|
||||||
"@com_google_flogger_system_backend",
|
"@com_google_flogger_system_backend",
|
||||||
"@com_google_guava",
|
"@com_google_guava",
|
||||||
"@com_google_http_client",
|
"@com_google_http_client",
|
||||||
|
"@com_google_http_client_jackson2",
|
||||||
"@com_google_re2j",
|
"@com_google_re2j",
|
||||||
"@com_google_truth",
|
"@com_google_truth",
|
||||||
"@com_google_truth_extensions_truth_java8_extension",
|
"@com_google_truth_extensions_truth_java8_extension",
|
||||||
|
|
|
@ -0,0 +1,85 @@
|
||||||
|
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package google.registry.export;
|
||||||
|
|
||||||
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
|
import static google.registry.export.CheckBackupAction.CHECK_BACKUP_KINDS_TO_LOAD_PARAM;
|
||||||
|
import static google.registry.export.CheckBackupAction.CHECK_BACKUP_NAME_PARAM;
|
||||||
|
import static google.registry.testing.TaskQueueHelper.assertTasksEnqueued;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import com.google.common.base.Joiner;
|
||||||
|
import google.registry.export.datastore.DatastoreAdmin;
|
||||||
|
import google.registry.export.datastore.DatastoreAdmin.Export;
|
||||||
|
import google.registry.export.datastore.Operation;
|
||||||
|
import google.registry.testing.AppEngineRule;
|
||||||
|
import google.registry.testing.FakeResponse;
|
||||||
|
import google.registry.testing.MockitoJUnitRule;
|
||||||
|
import google.registry.testing.TaskQueueHelper.TaskMatcher;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
|
||||||
|
/** Unit tests for {@link BackupDatastoreAction}. */
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
|
public class BackupDatastoreActionTest {
|
||||||
|
|
||||||
|
@Rule public final AppEngineRule appEngine = AppEngineRule.builder().withTaskQueue().build();
|
||||||
|
@Rule public final MockitoJUnitRule mocks = MockitoJUnitRule.create();
|
||||||
|
|
||||||
|
@Mock private DatastoreAdmin datastoreAdmin;
|
||||||
|
@Mock private Export exportRequest;
|
||||||
|
@Mock private Operation backupOperation;
|
||||||
|
|
||||||
|
private final FakeResponse response = new FakeResponse();
|
||||||
|
private final BackupDatastoreAction action = new BackupDatastoreAction();
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() throws Exception {
|
||||||
|
action.datastoreAdmin = datastoreAdmin;
|
||||||
|
action.response = response;
|
||||||
|
|
||||||
|
when(datastoreAdmin.export(
|
||||||
|
"gs://registry-project-id-datastore-backups", ExportConstants.getBackupKinds()))
|
||||||
|
.thenReturn(exportRequest);
|
||||||
|
when(exportRequest.execute()).thenReturn(backupOperation);
|
||||||
|
when(backupOperation.getName())
|
||||||
|
.thenReturn("projects/registry-project-id/operations/ASA1ODYwNjc");
|
||||||
|
when(backupOperation.getExportFolderUrl())
|
||||||
|
.thenReturn("gs://registry-project-id-datastore-backups/some-id");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testBackup_enqueuesPollTask() {
|
||||||
|
action.run();
|
||||||
|
assertTasksEnqueued(
|
||||||
|
CheckBackupAction.QUEUE,
|
||||||
|
new TaskMatcher()
|
||||||
|
.url(CheckBackupAction.PATH)
|
||||||
|
.param(CHECK_BACKUP_NAME_PARAM, "projects/registry-project-id/operations/ASA1ODYwNjc")
|
||||||
|
.param(
|
||||||
|
CHECK_BACKUP_KINDS_TO_LOAD_PARAM,
|
||||||
|
Joiner.on(",").join(ExportConstants.getReportingKinds()))
|
||||||
|
.method("POST"));
|
||||||
|
assertThat(response.getPayload())
|
||||||
|
.isEqualTo(
|
||||||
|
"Datastore backup started with name: "
|
||||||
|
+ "projects/registry-project-id/operations/ASA1ODYwNjc\n"
|
||||||
|
+ "Saving to gs://registry-project-id-datastore-backups/some-id");
|
||||||
|
}
|
||||||
|
}
|
222
javatests/google/registry/export/CheckBackupActionTest.java
Normal file
222
javatests/google/registry/export/CheckBackupActionTest.java
Normal file
|
@ -0,0 +1,222 @@
|
||||||
|
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package google.registry.export;
|
||||||
|
|
||||||
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
|
import static google.registry.export.CheckBackupAction.CHECK_BACKUP_KINDS_TO_LOAD_PARAM;
|
||||||
|
import static google.registry.export.CheckBackupAction.CHECK_BACKUP_NAME_PARAM;
|
||||||
|
import static google.registry.testing.JUnitBackports.assertThrows;
|
||||||
|
import static google.registry.testing.TaskQueueHelper.assertNoTasksEnqueued;
|
||||||
|
import static google.registry.testing.TaskQueueHelper.assertTasksEnqueued;
|
||||||
|
import static org.mockito.Matchers.anyString;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
|
||||||
|
import com.google.api.client.http.HttpHeaders;
|
||||||
|
import com.google.api.client.json.JsonFactory;
|
||||||
|
import com.google.api.client.json.jackson2.JacksonFactory;
|
||||||
|
import com.google.common.collect.ImmutableSet;
|
||||||
|
import google.registry.export.datastore.DatastoreAdmin;
|
||||||
|
import google.registry.export.datastore.DatastoreAdmin.Get;
|
||||||
|
import google.registry.export.datastore.Operation;
|
||||||
|
import google.registry.request.Action.Method;
|
||||||
|
import google.registry.request.HttpException.BadRequestException;
|
||||||
|
import google.registry.request.HttpException.NoContentException;
|
||||||
|
import google.registry.request.HttpException.NotModifiedException;
|
||||||
|
import google.registry.testing.AppEngineRule;
|
||||||
|
import google.registry.testing.FakeClock;
|
||||||
|
import google.registry.testing.FakeResponse;
|
||||||
|
import google.registry.testing.MockitoJUnitRule;
|
||||||
|
import google.registry.testing.TaskQueueHelper.TaskMatcher;
|
||||||
|
import google.registry.testing.TestDataHelper;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.Duration;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
|
||||||
|
/** Unit tests for {@link CheckBackupAction}. */
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
|
public class CheckBackupActionTest {
|
||||||
|
|
||||||
|
static final DateTime START_TIME = DateTime.parse("2014-08-01T01:02:03Z");
|
||||||
|
static final DateTime COMPLETE_TIME = START_TIME.plus(Duration.standardMinutes(30));
|
||||||
|
|
||||||
|
static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance();
|
||||||
|
|
||||||
|
@Rule public final AppEngineRule appEngine = AppEngineRule.builder().withTaskQueue().build();
|
||||||
|
@Rule public final MockitoJUnitRule mocks = MockitoJUnitRule.create();
|
||||||
|
|
||||||
|
@Mock private DatastoreAdmin datastoreAdmin;
|
||||||
|
@Mock private Get getNotFoundBackupProgressRequest;
|
||||||
|
@Mock private Get getBackupProgressRequest;
|
||||||
|
private Operation backupOperation;
|
||||||
|
|
||||||
|
private final FakeResponse response = new FakeResponse();
|
||||||
|
private final FakeClock clock = new FakeClock(COMPLETE_TIME.plusMillis(1000));
|
||||||
|
private final CheckBackupAction action = new CheckBackupAction();
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() throws Exception {
|
||||||
|
action.requestMethod = Method.POST;
|
||||||
|
action.datastoreAdmin = datastoreAdmin;
|
||||||
|
action.clock = clock;
|
||||||
|
action.backupName = "some_backup";
|
||||||
|
action.kindsToLoadParam = "one,two";
|
||||||
|
action.response = response;
|
||||||
|
|
||||||
|
when(datastoreAdmin.get(anyString())).thenReturn(getBackupProgressRequest);
|
||||||
|
when(getBackupProgressRequest.execute()).thenAnswer(arg -> backupOperation);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setPendingBackup() throws Exception {
|
||||||
|
backupOperation =
|
||||||
|
JSON_FACTORY.fromString(
|
||||||
|
TestDataHelper.loadFile(
|
||||||
|
CheckBackupActionTest.class, "backup_operation_in_progress.json"),
|
||||||
|
Operation.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setCompleteBackup() throws Exception {
|
||||||
|
backupOperation =
|
||||||
|
JSON_FACTORY.fromString(
|
||||||
|
TestDataHelper.loadFile(CheckBackupActionTest.class, "backup_operation_success.json"),
|
||||||
|
Operation.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setBackupNotFound() throws Exception {
|
||||||
|
when(datastoreAdmin.get(anyString())).thenReturn(getNotFoundBackupProgressRequest);
|
||||||
|
when(getNotFoundBackupProgressRequest.execute())
|
||||||
|
.thenThrow(
|
||||||
|
new GoogleJsonResponseException(
|
||||||
|
new GoogleJsonResponseException.Builder(404, "NOT_FOUND", new HttpHeaders())
|
||||||
|
.setMessage("No backup found"),
|
||||||
|
null));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void assertLoadTaskEnqueued(String id, String folder, String kinds) {
|
||||||
|
assertTasksEnqueued(
|
||||||
|
"export-snapshot",
|
||||||
|
new TaskMatcher()
|
||||||
|
.url("/_dr/task/uploadDatastoreBackup")
|
||||||
|
.method("POST")
|
||||||
|
.param("id", id)
|
||||||
|
.param("folder", folder)
|
||||||
|
.param("kinds", kinds));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSuccess_enqueuePollTask() {
|
||||||
|
CheckBackupAction.enqueuePollTask("some_backup_name", ImmutableSet.of("one", "two", "three"));
|
||||||
|
assertTasksEnqueued(
|
||||||
|
CheckBackupAction.QUEUE,
|
||||||
|
new TaskMatcher()
|
||||||
|
.url(CheckBackupAction.PATH)
|
||||||
|
.param(CHECK_BACKUP_NAME_PARAM, "some_backup_name")
|
||||||
|
.param(CHECK_BACKUP_KINDS_TO_LOAD_PARAM, "one,two,three")
|
||||||
|
.method("POST"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPost_forPendingBackup_returnsNotModified() throws Exception {
|
||||||
|
setPendingBackup();
|
||||||
|
|
||||||
|
NotModifiedException thrown = assertThrows(NotModifiedException.class, action::run);
|
||||||
|
assertThat(thrown)
|
||||||
|
.hasMessageThat()
|
||||||
|
.contains("Datastore backup some_backup still in progress: Progress: N/A");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPost_forStalePendingBackupBackup_returnsNoContent() throws Exception {
|
||||||
|
setPendingBackup();
|
||||||
|
clock.setTo(
|
||||||
|
START_TIME
|
||||||
|
.plus(Duration.standardHours(20))
|
||||||
|
.plus(Duration.standardMinutes(3))
|
||||||
|
.plus(Duration.millis(1234)));
|
||||||
|
|
||||||
|
NoContentException thrown = assertThrows(NoContentException.class, action::run);
|
||||||
|
assertThat(thrown)
|
||||||
|
.hasMessageThat()
|
||||||
|
.contains(
|
||||||
|
"Datastore backup some_backup abandoned - "
|
||||||
|
+ "not complete after 20 hours, 3 minutes and 1 second. Progress: Progress: N/A");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPost_forCompleteBackup_enqueuesLoadTask() throws Exception {
|
||||||
|
setCompleteBackup();
|
||||||
|
action.run();
|
||||||
|
assertLoadTaskEnqueued(
|
||||||
|
"2014-08-01T01:02:03_99364",
|
||||||
|
"gs://registry-project-id-datastore-export-test/2014-08-01T01:02:03_99364",
|
||||||
|
"one,two");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPost_forCompleteBackup_withExtraKindsToLoad_enqueuesLoadTask() throws Exception {
|
||||||
|
setCompleteBackup();
|
||||||
|
action.kindsToLoadParam = "one,foo";
|
||||||
|
|
||||||
|
action.run();
|
||||||
|
assertLoadTaskEnqueued(
|
||||||
|
"2014-08-01T01:02:03_99364",
|
||||||
|
"gs://registry-project-id-datastore-export-test/2014-08-01T01:02:03_99364",
|
||||||
|
"one");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPost_forCompleteBackup_withEmptyKindsToLoad_skipsLoadTask() throws Exception {
|
||||||
|
setCompleteBackup();
|
||||||
|
action.kindsToLoadParam = "";
|
||||||
|
|
||||||
|
action.run();
|
||||||
|
assertNoTasksEnqueued("export-snapshot");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPost_forBadBackup_returnsBadRequest() throws Exception {
|
||||||
|
setBackupNotFound();
|
||||||
|
|
||||||
|
BadRequestException thrown = assertThrows(BadRequestException.class, action::run);
|
||||||
|
assertThat(thrown).hasMessageThat().contains("Bad backup name some_backup: No backup found");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGet_returnsInformation() throws Exception {
|
||||||
|
setCompleteBackup();
|
||||||
|
action.requestMethod = Method.GET;
|
||||||
|
|
||||||
|
action.run();
|
||||||
|
assertThat(response.getPayload())
|
||||||
|
.isEqualTo(
|
||||||
|
TestDataHelper.loadFile(
|
||||||
|
CheckBackupActionTest.class, "pretty_printed_success_backup_operation.json")
|
||||||
|
.trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGet_forBadBackup_returnsError() throws Exception {
|
||||||
|
setBackupNotFound();
|
||||||
|
action.requestMethod = Method.GET;
|
||||||
|
|
||||||
|
BadRequestException thrown = assertThrows(BadRequestException.class, action::run);
|
||||||
|
assertThat(thrown).hasMessageThat().contains("Bad backup name some_backup: No backup found");
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,6 +16,7 @@ package google.registry.export;
|
||||||
|
|
||||||
import static com.google.common.collect.Iterables.transform;
|
import static com.google.common.collect.Iterables.transform;
|
||||||
import static com.google.common.truth.Truth.assertThat;
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
|
import static google.registry.export.LoadSnapshotAction.LATEST_SNAPSHOT_VIEW_NAME;
|
||||||
import static google.registry.export.LoadSnapshotAction.LOAD_SNAPSHOT_FILE_PARAM;
|
import static google.registry.export.LoadSnapshotAction.LOAD_SNAPSHOT_FILE_PARAM;
|
||||||
import static google.registry.export.LoadSnapshotAction.LOAD_SNAPSHOT_ID_PARAM;
|
import static google.registry.export.LoadSnapshotAction.LOAD_SNAPSHOT_ID_PARAM;
|
||||||
import static google.registry.export.LoadSnapshotAction.LOAD_SNAPSHOT_KINDS_PARAM;
|
import static google.registry.export.LoadSnapshotAction.LOAD_SNAPSHOT_KINDS_PARAM;
|
||||||
|
@ -159,23 +160,29 @@ public class LoadSnapshotActionTest {
|
||||||
verify(bigqueryJobsInsert, times(3)).execute();
|
verify(bigqueryJobsInsert, times(3)).execute();
|
||||||
|
|
||||||
// Check that the poll tasks for each load job were enqueued.
|
// Check that the poll tasks for each load job were enqueued.
|
||||||
verify(bigqueryPollEnqueuer).enqueuePollTask(
|
verify(bigqueryPollEnqueuer)
|
||||||
|
.enqueuePollTask(
|
||||||
new JobReference()
|
new JobReference()
|
||||||
.setProjectId("Project-Id")
|
.setProjectId("Project-Id")
|
||||||
.setJobId("load-snapshot-id12345-one-1391096117045"),
|
.setJobId("load-snapshot-id12345-one-1391096117045"),
|
||||||
UpdateSnapshotViewAction.createViewUpdateTask("snapshots", "id12345_one", "one"),
|
UpdateSnapshotViewAction.createViewUpdateTask(
|
||||||
|
"snapshots", "id12345_one", "one", LATEST_SNAPSHOT_VIEW_NAME),
|
||||||
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
||||||
verify(bigqueryPollEnqueuer).enqueuePollTask(
|
verify(bigqueryPollEnqueuer)
|
||||||
|
.enqueuePollTask(
|
||||||
new JobReference()
|
new JobReference()
|
||||||
.setProjectId("Project-Id")
|
.setProjectId("Project-Id")
|
||||||
.setJobId("load-snapshot-id12345-two-1391096117045"),
|
.setJobId("load-snapshot-id12345-two-1391096117045"),
|
||||||
UpdateSnapshotViewAction.createViewUpdateTask("snapshots", "id12345_two", "two"),
|
UpdateSnapshotViewAction.createViewUpdateTask(
|
||||||
|
"snapshots", "id12345_two", "two", LATEST_SNAPSHOT_VIEW_NAME),
|
||||||
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
||||||
verify(bigqueryPollEnqueuer).enqueuePollTask(
|
verify(bigqueryPollEnqueuer)
|
||||||
|
.enqueuePollTask(
|
||||||
new JobReference()
|
new JobReference()
|
||||||
.setProjectId("Project-Id")
|
.setProjectId("Project-Id")
|
||||||
.setJobId("load-snapshot-id12345-three-1391096117045"),
|
.setJobId("load-snapshot-id12345-three-1391096117045"),
|
||||||
UpdateSnapshotViewAction.createViewUpdateTask("snapshots", "id12345_three", "three"),
|
UpdateSnapshotViewAction.createViewUpdateTask(
|
||||||
|
"snapshots", "id12345_three", "three", LATEST_SNAPSHOT_VIEW_NAME),
|
||||||
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,7 @@ import static google.registry.export.UpdateSnapshotViewAction.QUEUE;
|
||||||
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_DATASET_ID_PARAM;
|
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_DATASET_ID_PARAM;
|
||||||
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_KIND_PARAM;
|
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_KIND_PARAM;
|
||||||
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_TABLE_ID_PARAM;
|
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_TABLE_ID_PARAM;
|
||||||
|
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_VIEWNAME_PARAM;
|
||||||
import static google.registry.export.UpdateSnapshotViewAction.createViewUpdateTask;
|
import static google.registry.export.UpdateSnapshotViewAction.createViewUpdateTask;
|
||||||
import static google.registry.testing.JUnitBackports.assertThrows;
|
import static google.registry.testing.JUnitBackports.assertThrows;
|
||||||
import static google.registry.testing.TaskQueueHelper.assertTasksEnqueued;
|
import static google.registry.testing.TaskQueueHelper.assertTasksEnqueued;
|
||||||
|
@ -79,20 +80,26 @@ public class UpdateSnapshotViewActionTest {
|
||||||
action.checkedBigquery = checkedBigquery;
|
action.checkedBigquery = checkedBigquery;
|
||||||
action.datasetId = "some_dataset";
|
action.datasetId = "some_dataset";
|
||||||
action.kindName = "fookind";
|
action.kindName = "fookind";
|
||||||
|
action.viewName = "latest_datastore_export";
|
||||||
action.projectId = "myproject";
|
action.projectId = "myproject";
|
||||||
action.tableId = "12345_fookind";
|
action.tableId = "12345_fookind";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSuccess_createViewUpdateTask() {
|
public void testSuccess_createViewUpdateTask() {
|
||||||
getQueue(QUEUE).add(createViewUpdateTask("some_dataset", "12345_fookind", "fookind"));
|
getQueue(QUEUE)
|
||||||
assertTasksEnqueued(QUEUE,
|
.add(
|
||||||
|
createViewUpdateTask(
|
||||||
|
"some_dataset", "12345_fookind", "fookind", "latest_datastore_export"));
|
||||||
|
assertTasksEnqueued(
|
||||||
|
QUEUE,
|
||||||
new TaskMatcher()
|
new TaskMatcher()
|
||||||
.url(UpdateSnapshotViewAction.PATH)
|
.url(UpdateSnapshotViewAction.PATH)
|
||||||
.method("POST")
|
.method("POST")
|
||||||
.param(UPDATE_SNAPSHOT_DATASET_ID_PARAM, "some_dataset")
|
.param(UPDATE_SNAPSHOT_DATASET_ID_PARAM, "some_dataset")
|
||||||
.param(UPDATE_SNAPSHOT_TABLE_ID_PARAM, "12345_fookind")
|
.param(UPDATE_SNAPSHOT_TABLE_ID_PARAM, "12345_fookind")
|
||||||
.param(UPDATE_SNAPSHOT_KIND_PARAM, "fookind"));
|
.param(UPDATE_SNAPSHOT_KIND_PARAM, "fookind")
|
||||||
|
.param(UPDATE_SNAPSHOT_VIEWNAME_PARAM, "latest_datastore_export"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -0,0 +1,209 @@
|
||||||
|
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package google.registry.export;
|
||||||
|
|
||||||
|
import static com.google.common.collect.Iterables.transform;
|
||||||
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.BACKUP_DATASET;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.LATEST_BACKUP_VIEW_NAME;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.PATH;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.QUEUE;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.UPLOAD_BACKUP_FOLDER_PARAM;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.UPLOAD_BACKUP_ID_PARAM;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.UPLOAD_BACKUP_KINDS_PARAM;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.enqueueUploadBackupTask;
|
||||||
|
import static google.registry.export.UploadDatastoreBackupAction.getBackupInfoFileForKind;
|
||||||
|
import static google.registry.testing.JUnitBackports.assertThrows;
|
||||||
|
import static google.registry.testing.TaskQueueHelper.assertTasksEnqueued;
|
||||||
|
import static org.mockito.Matchers.any;
|
||||||
|
import static org.mockito.Matchers.eq;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.times;
|
||||||
|
import static org.mockito.Mockito.verify;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import com.google.api.services.bigquery.Bigquery;
|
||||||
|
import com.google.api.services.bigquery.model.Dataset;
|
||||||
|
import com.google.api.services.bigquery.model.Job;
|
||||||
|
import com.google.api.services.bigquery.model.JobConfigurationLoad;
|
||||||
|
import com.google.api.services.bigquery.model.JobReference;
|
||||||
|
import com.google.appengine.api.taskqueue.QueueFactory;
|
||||||
|
import com.google.common.collect.ImmutableSet;
|
||||||
|
import com.google.common.collect.Iterables;
|
||||||
|
import google.registry.bigquery.CheckedBigquery;
|
||||||
|
import google.registry.export.BigqueryPollJobAction.BigqueryPollJobEnqueuer;
|
||||||
|
import google.registry.request.HttpException.InternalServerErrorException;
|
||||||
|
import google.registry.testing.AppEngineRule;
|
||||||
|
import google.registry.testing.TaskQueueHelper.TaskMatcher;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
|
||||||
|
/** Unit tests for {@link UploadDatastoreBackupAction}. */
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
|
public class UploadDatastoreBackupActionTest {
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public final AppEngineRule appEngine = AppEngineRule.builder()
|
||||||
|
.withTaskQueue()
|
||||||
|
.build();
|
||||||
|
private final CheckedBigquery checkedBigquery = mock(CheckedBigquery.class);
|
||||||
|
private final Bigquery bigquery = mock(Bigquery.class);
|
||||||
|
private final Bigquery.Jobs bigqueryJobs = mock(Bigquery.Jobs.class);
|
||||||
|
private final Bigquery.Jobs.Insert bigqueryJobsInsert = mock(Bigquery.Jobs.Insert.class);
|
||||||
|
private final Bigquery.Datasets bigqueryDatasets = mock(Bigquery.Datasets.class);
|
||||||
|
private final Bigquery.Datasets.Insert bigqueryDatasetsInsert =
|
||||||
|
mock(Bigquery.Datasets.Insert.class);
|
||||||
|
private final BigqueryPollJobEnqueuer bigqueryPollEnqueuer = mock(BigqueryPollJobEnqueuer.class);
|
||||||
|
private UploadDatastoreBackupAction action;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() throws Exception {
|
||||||
|
when(checkedBigquery.ensureDataSetExists("Project-Id", BACKUP_DATASET)).thenReturn(bigquery);
|
||||||
|
when(bigquery.jobs()).thenReturn(bigqueryJobs);
|
||||||
|
when(bigqueryJobs.insert(eq("Project-Id"), any(Job.class))).thenReturn(bigqueryJobsInsert);
|
||||||
|
when(bigquery.datasets()).thenReturn(bigqueryDatasets);
|
||||||
|
when(bigqueryDatasets.insert(eq("Project-Id"), any(Dataset.class)))
|
||||||
|
.thenReturn(bigqueryDatasetsInsert);
|
||||||
|
action = new UploadDatastoreBackupAction();
|
||||||
|
action.checkedBigquery = checkedBigquery;
|
||||||
|
action.bigqueryPollEnqueuer = bigqueryPollEnqueuer;
|
||||||
|
action.projectId = "Project-Id";
|
||||||
|
action.backupFolderUrl = "gs://bucket/path";
|
||||||
|
action.backupId = "2018-12-05T17:46:39_92612";
|
||||||
|
action.backupKinds = "one,two,three";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSuccess_enqueueLoadTask() {
|
||||||
|
enqueueUploadBackupTask("id12345", "gs://bucket/path", ImmutableSet.of("one", "two", "three"));
|
||||||
|
assertTasksEnqueued(
|
||||||
|
QUEUE,
|
||||||
|
new TaskMatcher()
|
||||||
|
.url(PATH)
|
||||||
|
.method("POST")
|
||||||
|
.param(UPLOAD_BACKUP_ID_PARAM, "id12345")
|
||||||
|
.param(UPLOAD_BACKUP_FOLDER_PARAM, "gs://bucket/path")
|
||||||
|
.param(UPLOAD_BACKUP_KINDS_PARAM, "one,two,three"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSuccess_doPost() throws Exception {
|
||||||
|
action.run();
|
||||||
|
|
||||||
|
// Verify that checkedBigquery was called in a way that would create the dataset if it didn't
|
||||||
|
// already exist.
|
||||||
|
verify(checkedBigquery).ensureDataSetExists("Project-Id", BACKUP_DATASET);
|
||||||
|
|
||||||
|
// Capture the load jobs we inserted to do additional checking on them.
|
||||||
|
ArgumentCaptor<Job> jobArgument = ArgumentCaptor.forClass(Job.class);
|
||||||
|
verify(bigqueryJobs, times(3)).insert(eq("Project-Id"), jobArgument.capture());
|
||||||
|
List<Job> jobs = jobArgument.getAllValues();
|
||||||
|
assertThat(jobs).hasSize(3);
|
||||||
|
|
||||||
|
// Check properties that should be common to all load jobs.
|
||||||
|
for (Job job : jobs) {
|
||||||
|
assertThat(job.getJobReference().getProjectId()).isEqualTo("Project-Id");
|
||||||
|
JobConfigurationLoad config = job.getConfiguration().getLoad();
|
||||||
|
assertThat(config.getSourceFormat()).isEqualTo("DATASTORE_BACKUP");
|
||||||
|
assertThat(config.getDestinationTable().getProjectId()).isEqualTo("Project-Id");
|
||||||
|
assertThat(config.getDestinationTable().getDatasetId()).isEqualTo(BACKUP_DATASET);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the job IDs for each load job.
|
||||||
|
assertThat(transform(jobs, job -> job.getJobReference().getJobId()))
|
||||||
|
.containsExactly(
|
||||||
|
"load-backup-2018_12_05T17_46_39_92612-one",
|
||||||
|
"load-backup-2018_12_05T17_46_39_92612-two",
|
||||||
|
"load-backup-2018_12_05T17_46_39_92612-three");
|
||||||
|
|
||||||
|
// Check the source URI for each load job.
|
||||||
|
assertThat(
|
||||||
|
transform(
|
||||||
|
jobs,
|
||||||
|
job -> Iterables.getOnlyElement(job.getConfiguration().getLoad().getSourceUris())))
|
||||||
|
.containsExactly(
|
||||||
|
"gs://bucket/path/all_namespaces/kind_one/all_namespaces_kind_one.export_metadata",
|
||||||
|
"gs://bucket/path/all_namespaces/kind_two/all_namespaces_kind_two.export_metadata",
|
||||||
|
"gs://bucket/path/all_namespaces/kind_three/all_namespaces_kind_three.export_metadata");
|
||||||
|
|
||||||
|
// Check the destination table ID for each load job.
|
||||||
|
assertThat(
|
||||||
|
transform(
|
||||||
|
jobs, job -> job.getConfiguration().getLoad().getDestinationTable().getTableId()))
|
||||||
|
.containsExactly(
|
||||||
|
"2018_12_05T17_46_39_92612_one",
|
||||||
|
"2018_12_05T17_46_39_92612_two",
|
||||||
|
"2018_12_05T17_46_39_92612_three");
|
||||||
|
|
||||||
|
// Check that we executed the inserted jobs.
|
||||||
|
verify(bigqueryJobsInsert, times(3)).execute();
|
||||||
|
|
||||||
|
// Check that the poll tasks for each load job were enqueued.
|
||||||
|
verify(bigqueryPollEnqueuer)
|
||||||
|
.enqueuePollTask(
|
||||||
|
new JobReference()
|
||||||
|
.setProjectId("Project-Id")
|
||||||
|
.setJobId("load-backup-2018_12_05T17_46_39_92612-one"),
|
||||||
|
UpdateSnapshotViewAction.createViewUpdateTask(
|
||||||
|
BACKUP_DATASET, "2018_12_05T17_46_39_92612_one", "one", LATEST_BACKUP_VIEW_NAME),
|
||||||
|
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
||||||
|
verify(bigqueryPollEnqueuer)
|
||||||
|
.enqueuePollTask(
|
||||||
|
new JobReference()
|
||||||
|
.setProjectId("Project-Id")
|
||||||
|
.setJobId("load-backup-2018_12_05T17_46_39_92612-two"),
|
||||||
|
UpdateSnapshotViewAction.createViewUpdateTask(
|
||||||
|
BACKUP_DATASET, "2018_12_05T17_46_39_92612_two", "two", LATEST_BACKUP_VIEW_NAME),
|
||||||
|
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
||||||
|
verify(bigqueryPollEnqueuer)
|
||||||
|
.enqueuePollTask(
|
||||||
|
new JobReference()
|
||||||
|
.setProjectId("Project-Id")
|
||||||
|
.setJobId("load-backup-2018_12_05T17_46_39_92612-three"),
|
||||||
|
UpdateSnapshotViewAction.createViewUpdateTask(
|
||||||
|
BACKUP_DATASET,
|
||||||
|
"2018_12_05T17_46_39_92612_three",
|
||||||
|
"three",
|
||||||
|
LATEST_BACKUP_VIEW_NAME),
|
||||||
|
QueueFactory.getQueue(UpdateSnapshotViewAction.QUEUE));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testFailure_doPost_bigqueryThrowsException() throws Exception {
|
||||||
|
when(bigqueryJobsInsert.execute()).thenThrow(new IOException("The Internet has gone missing"));
|
||||||
|
InternalServerErrorException thrown =
|
||||||
|
assertThrows(InternalServerErrorException.class, action::run);
|
||||||
|
assertThat(thrown)
|
||||||
|
.hasMessageThat()
|
||||||
|
.contains("Error loading backup: The Internet has gone missing");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testgetBackupInfoFileForKind() {
|
||||||
|
assertThat(
|
||||||
|
getBackupInfoFileForKind(
|
||||||
|
"gs://BucketName/2018-11-11T00:00:00_12345", "AllocationToken"))
|
||||||
|
.isEqualTo(
|
||||||
|
"gs://BucketName/2018-11-11T00:00:00_12345/"
|
||||||
|
+ "all_namespaces/kind_AllocationToken/"
|
||||||
|
+ "all_namespaces_kind_AllocationToken.export_metadata");
|
||||||
|
}
|
||||||
|
}
|
18
javatests/google/registry/export/testdata/backup_operation_in_progress.json
vendored
Normal file
18
javatests/google/registry/export/testdata/backup_operation_in_progress.json
vendored
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
{
|
||||||
|
"name": "projects/registry-project-id/operations/ASAzNjMwOTEyNjUJ",
|
||||||
|
"metadata": {
|
||||||
|
"@type": "type.googleapis.com/google.datastore.admin.v1.ExportEntitiesMetadata",
|
||||||
|
"common": {
|
||||||
|
"startTime": "2014-08-01T01:02:03Z",
|
||||||
|
"operationType": "EXPORT_ENTITIES",
|
||||||
|
"state": "PROCESSING"
|
||||||
|
},
|
||||||
|
"entityFilter": {
|
||||||
|
"kinds": [
|
||||||
|
"one",
|
||||||
|
"two"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"outputUrlPrefix": "gs://registry-project-id-datastore-export-test/2014-08-01T01:02:03_99364"
|
||||||
|
}
|
||||||
|
}
|
20
javatests/google/registry/export/testdata/backup_operation_success.json
vendored
Normal file
20
javatests/google/registry/export/testdata/backup_operation_success.json
vendored
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"name": "projects/registry-project-id/operations/ASAzNjMwOTEyNjUJ",
|
||||||
|
"metadata": {
|
||||||
|
"@type": "type.googleapis.com/google.datastore.admin.v1.ExportEntitiesMetadata",
|
||||||
|
"common": {
|
||||||
|
"startTime": "2014-08-01T01:02:03Z",
|
||||||
|
"endTime": "2014-08-01T01:32:03Z",
|
||||||
|
"operationType": "EXPORT_ENTITIES",
|
||||||
|
"state": "SUCCESSFUL"
|
||||||
|
},
|
||||||
|
"entityFilter": {
|
||||||
|
"kinds": [
|
||||||
|
"one",
|
||||||
|
"two"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"outputUrlPrefix": "gs://registry-project-id-datastore-export-test/2014-08-01T01:02:03_99364"
|
||||||
|
},
|
||||||
|
"done": true
|
||||||
|
}
|
17
javatests/google/registry/export/testdata/pretty_printed_success_backup_operation.json
vendored
Normal file
17
javatests/google/registry/export/testdata/pretty_printed_success_backup_operation.json
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
{
|
||||||
|
"done" : true,
|
||||||
|
"metadata" : {
|
||||||
|
"common" : {
|
||||||
|
"endTime" : "2014-08-01T01:32:03Z",
|
||||||
|
"operationType" : "EXPORT_ENTITIES",
|
||||||
|
"startTime" : "2014-08-01T01:02:03Z",
|
||||||
|
"state" : "SUCCESSFUL"
|
||||||
|
},
|
||||||
|
"entityFilter" : {
|
||||||
|
"kinds" : [ "one", "two" ]
|
||||||
|
},
|
||||||
|
"outputUrlPrefix" : "gs://registry-project-id-datastore-export-test/2014-08-01T01:02:03_99364",
|
||||||
|
"@type" : "type.googleapis.com/google.datastore.admin.v1.ExportEntitiesMetadata"
|
||||||
|
},
|
||||||
|
"name" : "projects/registry-project-id/operations/ASAzNjMwOTEyNjUJ"
|
||||||
|
}
|
|
@ -4,7 +4,9 @@ PATH CLASS METHOD
|
||||||
/_dr/cron/fanout TldFanoutAction GET y INTERNAL APP IGNORED
|
/_dr/cron/fanout TldFanoutAction GET y INTERNAL APP IGNORED
|
||||||
/_dr/cron/readDnsQueue ReadDnsQueueAction GET y INTERNAL APP IGNORED
|
/_dr/cron/readDnsQueue ReadDnsQueueAction GET y INTERNAL APP IGNORED
|
||||||
/_dr/dnsRefresh RefreshDnsAction GET y INTERNAL APP IGNORED
|
/_dr/dnsRefresh RefreshDnsAction GET y INTERNAL APP IGNORED
|
||||||
|
/_dr/task/backupDatastore BackupDatastoreAction POST y INTERNAL APP IGNORED
|
||||||
/_dr/task/brdaCopy BrdaCopyAction POST y INTERNAL APP IGNORED
|
/_dr/task/brdaCopy BrdaCopyAction POST y INTERNAL APP IGNORED
|
||||||
|
/_dr/task/checkDatastoreBackup CheckBackupAction POST,GET y INTERNAL APP IGNORED
|
||||||
/_dr/task/checkSnapshot CheckSnapshotAction POST,GET y INTERNAL APP IGNORED
|
/_dr/task/checkSnapshot CheckSnapshotAction POST,GET y INTERNAL APP IGNORED
|
||||||
/_dr/task/copyDetailReports CopyDetailReportsAction POST n INTERNAL,API APP ADMIN
|
/_dr/task/copyDetailReports CopyDetailReportsAction POST n INTERNAL,API APP ADMIN
|
||||||
/_dr/task/deleteContactsAndHosts DeleteContactsAndHostsAction GET n INTERNAL APP IGNORED
|
/_dr/task/deleteContactsAndHosts DeleteContactsAndHostsAction GET n INTERNAL APP IGNORED
|
||||||
|
@ -44,3 +46,4 @@ PATH CLASS METHOD
|
||||||
/_dr/task/tmchDnl TmchDnlAction POST y INTERNAL APP IGNORED
|
/_dr/task/tmchDnl TmchDnlAction POST y INTERNAL APP IGNORED
|
||||||
/_dr/task/tmchSmdrl TmchSmdrlAction POST y INTERNAL APP IGNORED
|
/_dr/task/tmchSmdrl TmchSmdrlAction POST y INTERNAL APP IGNORED
|
||||||
/_dr/task/updateSnapshotView UpdateSnapshotViewAction POST n INTERNAL APP IGNORED
|
/_dr/task/updateSnapshotView UpdateSnapshotViewAction POST n INTERNAL APP IGNORED
|
||||||
|
/_dr/task/uploadDatastoreBackup UploadDatastoreBackupAction POST n INTERNAL APP IGNORED
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue