mirror of
https://github.com/google/nomulus.git
synced 2025-05-13 07:57:13 +02:00
Send a plaintext link to the mapreduce console in fluent style
The link was previously being sent using a JS redirect, which doesn't work because the endpoints that trigger mapreduces can only be hit from the command line (because they require auth). This commit switches the link to be in plaintext and renders the full URL instead of just the path, so that clicking it directly from the terminal works. This also improves how these links are sent from callsites by using a fluent style. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=228764606
This commit is contained in:
parent
072576ec9d
commit
765e63e7e9
31 changed files with 202 additions and 239 deletions
|
@ -18,7 +18,6 @@ import static com.google.common.base.Preconditions.checkNotNull;
|
||||||
import static com.google.common.base.Preconditions.checkState;
|
import static com.google.common.base.Preconditions.checkState;
|
||||||
import static google.registry.mapreduce.MapreduceRunner.PARAM_DRY_RUN;
|
import static google.registry.mapreduce.MapreduceRunner.PARAM_DRY_RUN;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
import static java.lang.Boolean.FALSE;
|
import static java.lang.Boolean.FALSE;
|
||||||
import static java.lang.Boolean.TRUE;
|
import static java.lang.Boolean.TRUE;
|
||||||
|
|
||||||
|
@ -88,17 +87,18 @@ public final class DeleteOldCommitLogsAction implements Runnable {
|
||||||
"Processing asynchronous deletion of unreferenced CommitLogManifests older than %s",
|
"Processing asynchronous deletion of unreferenced CommitLogManifests older than %s",
|
||||||
deletionThreshold);
|
deletionThreshold);
|
||||||
|
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Delete old commit logs")
|
.setJobName("Delete old commit logs")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.setDefaultMapShards(NUM_MAP_SHARDS)
|
.setDefaultMapShards(NUM_MAP_SHARDS)
|
||||||
.setDefaultReduceShards(NUM_REDUCE_SHARDS)
|
.setDefaultReduceShards(NUM_REDUCE_SHARDS)
|
||||||
.runMapreduce(
|
.runMapreduce(
|
||||||
new DeleteOldCommitLogsMapper(deletionThreshold),
|
new DeleteOldCommitLogsMapper(deletionThreshold),
|
||||||
new DeleteOldCommitLogsReducer(deletionThreshold, isDryRun),
|
new DeleteOldCommitLogsReducer(deletionThreshold, isDryRun),
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new CommitLogManifestInput(deletionThreshold),
|
new CommitLogManifestInput(deletionThreshold),
|
||||||
EppResourceInputs.createKeyInput(EppResource.class)))));
|
EppResourceInputs.createKeyInput(EppResource.class)))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -40,7 +40,6 @@ import static google.registry.model.reporting.HistoryEntry.Type.CONTACT_DELETE_F
|
||||||
import static google.registry.model.reporting.HistoryEntry.Type.HOST_DELETE;
|
import static google.registry.model.reporting.HistoryEntry.Type.HOST_DELETE;
|
||||||
import static google.registry.model.reporting.HistoryEntry.Type.HOST_DELETE_FAILURE;
|
import static google.registry.model.reporting.HistoryEntry.Type.HOST_DELETE_FAILURE;
|
||||||
import static google.registry.model.transfer.TransferStatus.SERVER_CANCELLED;
|
import static google.registry.model.transfer.TransferStatus.SERVER_CANCELLED;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
import static java.math.RoundingMode.CEILING;
|
import static java.math.RoundingMode.CEILING;
|
||||||
import static java.util.concurrent.TimeUnit.DAYS;
|
import static java.util.concurrent.TimeUnit.DAYS;
|
||||||
import static java.util.concurrent.TimeUnit.SECONDS;
|
import static java.util.concurrent.TimeUnit.SECONDS;
|
||||||
|
@ -215,20 +214,18 @@ public class DeleteContactsAndHostsAction implements Runnable {
|
||||||
try {
|
try {
|
||||||
int numReducers =
|
int numReducers =
|
||||||
Math.min(MAX_REDUCE_SHARDS, divide(deletionRequests.size(), DELETES_PER_SHARD, CEILING));
|
Math.min(MAX_REDUCE_SHARDS, divide(deletionRequests.size(), DELETES_PER_SHARD, CEILING));
|
||||||
response.sendJavaScriptRedirect(
|
mrRunner
|
||||||
createJobPath(
|
.setJobName("Check for EPP resource references and then delete")
|
||||||
mrRunner
|
.setModuleName("backend")
|
||||||
.setJobName("Check for EPP resource references and then delete")
|
.setDefaultReduceShards(numReducers)
|
||||||
.setModuleName("backend")
|
.runMapreduce(
|
||||||
.setDefaultReduceShards(numReducers)
|
new DeleteContactsAndHostsMapper(deletionRequests),
|
||||||
.runMapreduce(
|
new DeleteEppResourceReducer(),
|
||||||
new DeleteContactsAndHostsMapper(deletionRequests),
|
ImmutableList.of(
|
||||||
new DeleteEppResourceReducer(),
|
// Add an extra shard that maps over a null domain. See the mapper code for why.
|
||||||
ImmutableList.of(
|
new NullInput<>(), EppResourceInputs.createEntityInput(DomainBase.class)),
|
||||||
// Add an extra shard that maps over a null domain. See the mapper code
|
new UnlockerOutput<Void>(lock.get()))
|
||||||
// for why.
|
.sendLinkToMapreduceConsole(response);
|
||||||
new NullInput<>(), EppResourceInputs.createEntityInput(DomainBase.class)),
|
|
||||||
new UnlockerOutput<Void>(lock.get()))));
|
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
logRespondAndUnlock(SEVERE, "Error starting mapreduce to delete contacts/hosts.", lock);
|
logRespondAndUnlock(SEVERE, "Error starting mapreduce to delete contacts/hosts.", lock);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,6 @@ import google.registry.request.Action;
|
||||||
import google.registry.request.Parameter;
|
import google.registry.request.Parameter;
|
||||||
import google.registry.request.Response;
|
import google.registry.request.Response;
|
||||||
import google.registry.request.auth.Auth;
|
import google.registry.request.auth.Auth;
|
||||||
import google.registry.util.PipelineUtils;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
|
|
||||||
|
@ -83,16 +82,14 @@ public class DeleteLoadTestDataAction implements Runnable {
|
||||||
checkState(
|
checkState(
|
||||||
registryEnvironment != PRODUCTION, "This mapreduce is not safe to run on PRODUCTION.");
|
registryEnvironment != PRODUCTION, "This mapreduce is not safe to run on PRODUCTION.");
|
||||||
|
|
||||||
response.sendJavaScriptRedirect(
|
mrRunner
|
||||||
PipelineUtils.createJobPath(
|
.setJobName("Delete load test data")
|
||||||
mrRunner
|
.setModuleName("backend")
|
||||||
.setJobName("Delete load test data")
|
.runMapOnly(
|
||||||
.setModuleName("backend")
|
new DeleteLoadTestDataMapper(isDryRun),
|
||||||
.runMapOnly(
|
ImmutableList.of(
|
||||||
new DeleteLoadTestDataMapper(isDryRun),
|
createEntityInput(ContactResource.class), createEntityInput(HostResource.class)))
|
||||||
ImmutableList.of(
|
.sendLinkToMapreduceConsole(response);
|
||||||
createEntityInput(ContactResource.class),
|
|
||||||
createEntityInput(HostResource.class)))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Provides the map method that runs for each existing contact and host entity. */
|
/** Provides the map method that runs for each existing contact and host entity. */
|
||||||
|
|
|
@ -52,7 +52,6 @@ import google.registry.request.Action;
|
||||||
import google.registry.request.Parameter;
|
import google.registry.request.Parameter;
|
||||||
import google.registry.request.Response;
|
import google.registry.request.Response;
|
||||||
import google.registry.request.auth.Auth;
|
import google.registry.request.auth.Auth;
|
||||||
import google.registry.util.PipelineUtils;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
@ -87,12 +86,13 @@ public class DeleteProberDataAction implements Runnable {
|
||||||
checkState(
|
checkState(
|
||||||
!Strings.isNullOrEmpty(registryAdminClientId),
|
!Strings.isNullOrEmpty(registryAdminClientId),
|
||||||
"Registry admin client ID must be configured for prober data deletion to work");
|
"Registry admin client ID must be configured for prober data deletion to work");
|
||||||
response.sendJavaScriptRedirect(PipelineUtils.createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Delete prober data")
|
.setJobName("Delete prober data")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.runMapOnly(
|
.runMapOnly(
|
||||||
new DeleteProberDataMapper(getProberRoidSuffixes(), isDryRun, registryAdminClientId),
|
new DeleteProberDataMapper(getProberRoidSuffixes(), isDryRun, registryAdminClientId),
|
||||||
ImmutableList.of(EppResourceInputs.createKeyInput(DomainBase.class)))));
|
ImmutableList.of(EppResourceInputs.createKeyInput(DomainBase.class)))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
private ImmutableSet<String> getProberRoidSuffixes() {
|
private ImmutableSet<String> getProberRoidSuffixes() {
|
||||||
|
|
|
@ -28,7 +28,6 @@ import static google.registry.util.CollectionUtils.union;
|
||||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||||
import static google.registry.util.DateTimeUtils.earliestOf;
|
import static google.registry.util.DateTimeUtils.earliestOf;
|
||||||
import static google.registry.util.DomainNameUtils.getTldFromDomainName;
|
import static google.registry.util.DomainNameUtils.getTldFromDomainName;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
|
|
||||||
import com.google.appengine.tools.mapreduce.Mapper;
|
import com.google.appengine.tools.mapreduce.Mapper;
|
||||||
import com.google.appengine.tools.mapreduce.Reducer;
|
import com.google.appengine.tools.mapreduce.Reducer;
|
||||||
|
@ -102,7 +101,7 @@ public class ExpandRecurringBillingEventsAction implements Runnable {
|
||||||
logger.atInfo().log(
|
logger.atInfo().log(
|
||||||
"Running Recurring billing event expansion for billing time range [%s, %s).",
|
"Running Recurring billing event expansion for billing time range [%s, %s).",
|
||||||
cursorTime, executeTime);
|
cursorTime, executeTime);
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Expand Recurring billing events into synthetic OneTime events.")
|
.setJobName("Expand Recurring billing events into synthetic OneTime events.")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.runMapreduce(
|
.runMapreduce(
|
||||||
|
@ -112,7 +111,8 @@ public class ExpandRecurringBillingEventsAction implements Runnable {
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new NullInput<>(),
|
new NullInput<>(),
|
||||||
createChildEntityInput(
|
createChildEntityInput(
|
||||||
ImmutableSet.of(DomainResource.class), ImmutableSet.of(Recurring.class))))));
|
ImmutableSet.of(DomainResource.class), ImmutableSet.of(Recurring.class))))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Mapper to expand {@link Recurring} billing events into synthetic {@link OneTime} events. */
|
/** Mapper to expand {@link Recurring} billing events into synthetic {@link OneTime} events. */
|
||||||
|
|
|
@ -27,7 +27,6 @@ import static google.registry.model.EppResourceUtils.isActive;
|
||||||
import static google.registry.model.EppResourceUtils.isDeleted;
|
import static google.registry.model.EppResourceUtils.isDeleted;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.util.DateTimeUtils.latestOf;
|
import static google.registry.util.DateTimeUtils.latestOf;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
import static java.util.concurrent.TimeUnit.DAYS;
|
import static java.util.concurrent.TimeUnit.DAYS;
|
||||||
import static java.util.concurrent.TimeUnit.SECONDS;
|
import static java.util.concurrent.TimeUnit.SECONDS;
|
||||||
import static java.util.logging.Level.INFO;
|
import static java.util.logging.Level.INFO;
|
||||||
|
@ -159,18 +158,16 @@ public class RefreshDnsOnHostRenameAction implements Runnable {
|
||||||
|
|
||||||
private void runMapreduce(ImmutableList<DnsRefreshRequest> refreshRequests, Optional<Lock> lock) {
|
private void runMapreduce(ImmutableList<DnsRefreshRequest> refreshRequests, Optional<Lock> lock) {
|
||||||
try {
|
try {
|
||||||
response.sendJavaScriptRedirect(
|
mrRunner
|
||||||
createJobPath(
|
.setJobName("Enqueue DNS refreshes for domains referencing renamed hosts")
|
||||||
mrRunner
|
.setModuleName("backend")
|
||||||
.setJobName("Enqueue DNS refreshes for domains referencing renamed hosts")
|
.setDefaultReduceShards(1)
|
||||||
.setModuleName("backend")
|
.runMapreduce(
|
||||||
.setDefaultReduceShards(1)
|
new RefreshDnsOnHostRenameMapper(refreshRequests, retrier),
|
||||||
.runMapreduce(
|
new RefreshDnsOnHostRenameReducer(refreshRequests, lock.get(), retrier),
|
||||||
new RefreshDnsOnHostRenameMapper(refreshRequests, retrier),
|
// Add an extra NullInput so that the reducer always fires exactly once.
|
||||||
new RefreshDnsOnHostRenameReducer(refreshRequests, lock.get(), retrier),
|
ImmutableList.of(new NullInput<>(), createEntityInput(DomainResource.class)))
|
||||||
// Add an extra NullInput so that the reducer always fires exactly once.
|
.sendLinkToMapreduceConsole(response);
|
||||||
ImmutableList.of(
|
|
||||||
new NullInput<>(), createEntityInput(DomainResource.class)))));
|
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
logRespondAndUnlock(
|
logRespondAndUnlock(
|
||||||
SEVERE, "Error starting mapreduce to refresh DNS for renamed hosts.", lock);
|
SEVERE, "Error starting mapreduce to refresh DNS for renamed hosts.", lock);
|
||||||
|
|
|
@ -15,7 +15,6 @@
|
||||||
package google.registry.batch;
|
package google.registry.batch;
|
||||||
|
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
|
|
||||||
import com.google.appengine.tools.mapreduce.Mapper;
|
import com.google.appengine.tools.mapreduce.Mapper;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
@ -52,12 +51,13 @@ public class ResaveAllEppResourcesAction implements Runnable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Re-save all EPP resources")
|
.setJobName("Re-save all EPP resources")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.runMapOnly(
|
.runMapOnly(
|
||||||
new ResaveAllEppResourcesActionMapper(),
|
new ResaveAllEppResourcesActionMapper(),
|
||||||
ImmutableList.of(EppResourceInputs.createKeyInput(EppResource.class)))));
|
ImmutableList.of(EppResourceInputs.createKeyInput(EppResource.class)))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Mapper to re-save all EPP resources. */
|
/** Mapper to re-save all EPP resources. */
|
||||||
|
|
|
@ -20,7 +20,6 @@ import static google.registry.mapreduce.inputs.EppResourceInputs.createEntityInp
|
||||||
import static google.registry.model.EppResourceUtils.isActive;
|
import static google.registry.model.EppResourceUtils.isActive;
|
||||||
import static google.registry.model.registry.Registries.getTldsOfType;
|
import static google.registry.model.registry.Registries.getTldsOfType;
|
||||||
import static google.registry.request.Action.Method.POST;
|
import static google.registry.request.Action.Method.POST;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||||
import static org.joda.time.DateTimeZone.UTC;
|
import static org.joda.time.DateTimeZone.UTC;
|
||||||
|
|
||||||
|
@ -78,14 +77,15 @@ public class ExportDomainListsAction implements Runnable {
|
||||||
public void run() {
|
public void run() {
|
||||||
ImmutableSet<String> realTlds = getTldsOfType(TldType.REAL);
|
ImmutableSet<String> realTlds = getTldsOfType(TldType.REAL);
|
||||||
logger.atInfo().log("Exporting domain lists for tlds %s", realTlds);
|
logger.atInfo().log("Exporting domain lists for tlds %s", realTlds);
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Export domain lists")
|
.setJobName("Export domain lists")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.setDefaultReduceShards(Math.min(realTlds.size(), MAX_NUM_REDUCE_SHARDS))
|
.setDefaultReduceShards(Math.min(realTlds.size(), MAX_NUM_REDUCE_SHARDS))
|
||||||
.runMapreduce(
|
.runMapreduce(
|
||||||
new ExportDomainListsMapper(DateTime.now(UTC), realTlds),
|
new ExportDomainListsMapper(DateTime.now(UTC), realTlds),
|
||||||
new ExportDomainListsReducer(gcsBucket, gcsBufferSize),
|
new ExportDomainListsReducer(gcsBucket, gcsBufferSize),
|
||||||
ImmutableList.of(createEntityInput(DomainResource.class)))));
|
ImmutableList.of(createEntityInput(DomainResource.class)))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
static class ExportDomainListsMapper extends Mapper<DomainResource, String, String> {
|
static class ExportDomainListsMapper extends Mapper<DomainResource, String, String> {
|
||||||
|
|
|
@ -35,7 +35,8 @@ import com.google.appengine.tools.pipeline.JobSetting;
|
||||||
import com.google.common.flogger.FluentLogger;
|
import com.google.common.flogger.FluentLogger;
|
||||||
import google.registry.mapreduce.inputs.ConcatenatingInput;
|
import google.registry.mapreduce.inputs.ConcatenatingInput;
|
||||||
import google.registry.request.Parameter;
|
import google.registry.request.Parameter;
|
||||||
import google.registry.util.PipelineUtils;
|
import google.registry.request.Response;
|
||||||
|
import google.registry.util.AppEngineServiceUtils;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
|
@ -58,8 +59,12 @@ public class MapreduceRunner {
|
||||||
private static final String BASE_URL = "/_dr/mapreduce/";
|
private static final String BASE_URL = "/_dr/mapreduce/";
|
||||||
private static final String QUEUE_NAME = "mapreduce";
|
private static final String QUEUE_NAME = "mapreduce";
|
||||||
|
|
||||||
|
private static final String MAPREDUCE_CONSOLE_LINK_FORMAT =
|
||||||
|
"Mapreduce console: https://%s/_ah/pipeline/status.html?root=%s";
|
||||||
|
|
||||||
private final Optional<Integer> httpParamMapShards;
|
private final Optional<Integer> httpParamMapShards;
|
||||||
private final Optional<Integer> httpParamReduceShards;
|
private final Optional<Integer> httpParamReduceShards;
|
||||||
|
private final AppEngineServiceUtils appEngineServiceUtils;
|
||||||
|
|
||||||
// Default to 3 minutes since many slices will contain Datastore queries that time out at 4:30.
|
// Default to 3 minutes since many slices will contain Datastore queries that time out at 4:30.
|
||||||
private Duration sliceDuration = Duration.standardMinutes(3);
|
private Duration sliceDuration = Duration.standardMinutes(3);
|
||||||
|
@ -87,9 +92,11 @@ public class MapreduceRunner {
|
||||||
@Inject
|
@Inject
|
||||||
public MapreduceRunner(
|
public MapreduceRunner(
|
||||||
@Parameter(PARAM_MAP_SHARDS) Optional<Integer> mapShards,
|
@Parameter(PARAM_MAP_SHARDS) Optional<Integer> mapShards,
|
||||||
@Parameter(PARAM_REDUCE_SHARDS) Optional<Integer> reduceShards) {
|
@Parameter(PARAM_REDUCE_SHARDS) Optional<Integer> reduceShards,
|
||||||
|
AppEngineServiceUtils appEngineServiceUtils) {
|
||||||
this.httpParamMapShards = mapShards;
|
this.httpParamMapShards = mapShards;
|
||||||
this.httpParamReduceShards = reduceShards;
|
this.httpParamReduceShards = reduceShards;
|
||||||
|
this.appEngineServiceUtils = appEngineServiceUtils;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Set the max time to run a slice before serializing; defaults to 3 minutes. */
|
/** Set the max time to run a slice before serializing; defaults to 3 minutes. */
|
||||||
|
@ -160,15 +167,13 @@ public class MapreduceRunner {
|
||||||
* all work will be accomplished via side effects during the map phase.
|
* all work will be accomplished via side effects during the map phase.
|
||||||
*
|
*
|
||||||
* @see #createMapOnlyJob for creating and running a map-only mapreduce as part of a pipeline
|
* @see #createMapOnlyJob for creating and running a map-only mapreduce as part of a pipeline
|
||||||
*
|
|
||||||
* @param mapper instance of a mapper class
|
* @param mapper instance of a mapper class
|
||||||
* @param inputs input sources for the mapper
|
* @param inputs input sources for the mapper
|
||||||
* @param <I> mapper input type
|
* @param <I> mapper input type
|
||||||
* @return the job id
|
* @return the job id
|
||||||
*/
|
*/
|
||||||
public <I> String runMapOnly(
|
public <I> MapreduceRunnerResult runMapOnly(
|
||||||
Mapper<I, Void, Void> mapper,
|
Mapper<I, Void, Void> mapper, Iterable<? extends Input<? extends I>> inputs) {
|
||||||
Iterable<? extends Input<? extends I>> inputs) {
|
|
||||||
return runAsPipeline(createMapOnlyJob(mapper, new NoOutput<Void, Void>(), inputs));
|
return runAsPipeline(createMapOnlyJob(mapper, new NoOutput<Void, Void>(), inputs));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,7 +225,6 @@ public class MapreduceRunner {
|
||||||
* all work will be accomplished via side effects during the map or reduce phases.
|
* all work will be accomplished via side effects during the map or reduce phases.
|
||||||
*
|
*
|
||||||
* @see #createMapreduceJob for creating and running a mapreduce as part of a pipeline
|
* @see #createMapreduceJob for creating and running a mapreduce as part of a pipeline
|
||||||
*
|
|
||||||
* @param mapper instance of a mapper class
|
* @param mapper instance of a mapper class
|
||||||
* @param reducer instance of a reducer class
|
* @param reducer instance of a reducer class
|
||||||
* @param inputs input sources for the mapper
|
* @param inputs input sources for the mapper
|
||||||
|
@ -229,10 +233,11 @@ public class MapreduceRunner {
|
||||||
* @param <V> emitted value type
|
* @param <V> emitted value type
|
||||||
* @return the job id
|
* @return the job id
|
||||||
*/
|
*/
|
||||||
public final <I, K extends Serializable, V extends Serializable> String runMapreduce(
|
public final <I, K extends Serializable, V extends Serializable>
|
||||||
Mapper<I, K, V> mapper,
|
MapreduceRunnerResult runMapreduce(
|
||||||
Reducer<K, V, Void> reducer,
|
Mapper<I, K, V> mapper,
|
||||||
Iterable<? extends Input<? extends I>> inputs) {
|
Reducer<K, V, Void> reducer,
|
||||||
|
Iterable<? extends Input<? extends I>> inputs) {
|
||||||
return runMapreduce(mapper, reducer, inputs, new NoOutput<Void, Void>());
|
return runMapreduce(mapper, reducer, inputs, new NoOutput<Void, Void>());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -240,7 +245,6 @@ public class MapreduceRunner {
|
||||||
* Kick off a mapreduce job with specified Output handler.
|
* Kick off a mapreduce job with specified Output handler.
|
||||||
*
|
*
|
||||||
* @see #createMapreduceJob for creating and running a mapreduce as part of a pipeline
|
* @see #createMapreduceJob for creating and running a mapreduce as part of a pipeline
|
||||||
*
|
|
||||||
* @param mapper instance of a mapper class
|
* @param mapper instance of a mapper class
|
||||||
* @param reducer instance of a reducer class
|
* @param reducer instance of a reducer class
|
||||||
* @param inputs input sources for the mapper
|
* @param inputs input sources for the mapper
|
||||||
|
@ -251,11 +255,12 @@ public class MapreduceRunner {
|
||||||
* @param <R> return value of output
|
* @param <R> return value of output
|
||||||
* @return the job id
|
* @return the job id
|
||||||
*/
|
*/
|
||||||
public final <I, K extends Serializable, V extends Serializable, O, R> String runMapreduce(
|
public final <I, K extends Serializable, V extends Serializable, O, R>
|
||||||
Mapper<I, K, V> mapper,
|
MapreduceRunnerResult runMapreduce(
|
||||||
Reducer<K, V, O> reducer,
|
Mapper<I, K, V> mapper,
|
||||||
Iterable<? extends Input<? extends I>> inputs,
|
Reducer<K, V, O> reducer,
|
||||||
Output<O, R> output) {
|
Iterable<? extends Input<? extends I>> inputs,
|
||||||
|
Output<O, R> output) {
|
||||||
return runAsPipeline(createMapreduceJob(mapper, reducer, inputs, output));
|
return runAsPipeline(createMapreduceJob(mapper, reducer, inputs, output));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -266,14 +271,41 @@ public class MapreduceRunner {
|
||||||
checkArgumentNotNull(mapper, "mapper");
|
checkArgumentNotNull(mapper, "mapper");
|
||||||
}
|
}
|
||||||
|
|
||||||
private String runAsPipeline(Job0<?> job) {
|
private MapreduceRunnerResult runAsPipeline(Job0<?> job) {
|
||||||
String jobId = newPipelineService().startNewPipeline(
|
String jobId =
|
||||||
job,
|
newPipelineService()
|
||||||
new JobSetting.OnModule(moduleName),
|
.startNewPipeline(
|
||||||
new JobSetting.OnQueue(QUEUE_NAME));
|
job, new JobSetting.OnModule(moduleName), new JobSetting.OnQueue(QUEUE_NAME));
|
||||||
logger.atInfo().log(
|
logger.atInfo().log(
|
||||||
"Started '%s' %s job: %s",
|
"Started '%s' %s job: %s",
|
||||||
jobName, job instanceof MapJob ? "map" : "mapreduce", PipelineUtils.createJobPath(jobId));
|
jobName, job instanceof MapJob ? "map" : "mapreduce", renderMapreduceConsoleLink(jobId));
|
||||||
return jobId;
|
return new MapreduceRunnerResult(jobId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String renderMapreduceConsoleLink(String jobId) {
|
||||||
|
return String.format(
|
||||||
|
MAPREDUCE_CONSOLE_LINK_FORMAT, appEngineServiceUtils.getServiceHostname("backend"), jobId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class representing the result of kicking off a mapreduce.
|
||||||
|
*
|
||||||
|
* <p>This is used to send a link to the mapreduce console.
|
||||||
|
*/
|
||||||
|
public class MapreduceRunnerResult {
|
||||||
|
|
||||||
|
private final String jobId;
|
||||||
|
|
||||||
|
private MapreduceRunnerResult(String jobId) {
|
||||||
|
this.jobId = jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void sendLinkToMapreduceConsole(Response response) {
|
||||||
|
response.setPayload(getLinkToMapreduceConsole());
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getLinkToMapreduceConsole() {
|
||||||
|
return renderMapreduceConsoleLink(jobId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,6 @@ package google.registry.rde;
|
||||||
|
|
||||||
import static google.registry.request.Action.Method.GET;
|
import static google.registry.request.Action.Method.GET;
|
||||||
import static google.registry.request.Action.Method.POST;
|
import static google.registry.request.Action.Method.POST;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
import static google.registry.xml.ValidationMode.LENIENT;
|
import static google.registry.xml.ValidationMode.LENIENT;
|
||||||
import static google.registry.xml.ValidationMode.STRICT;
|
import static google.registry.xml.ValidationMode.STRICT;
|
||||||
import static javax.servlet.http.HttpServletResponse.SC_NO_CONTENT;
|
import static javax.servlet.http.HttpServletResponse.SC_NO_CONTENT;
|
||||||
|
@ -228,7 +227,7 @@ public final class RdeStagingAction implements Runnable {
|
||||||
}
|
}
|
||||||
RdeStagingMapper mapper = new RdeStagingMapper(lenient ? LENIENT : STRICT, pendings);
|
RdeStagingMapper mapper = new RdeStagingMapper(lenient ? LENIENT : STRICT, pendings);
|
||||||
|
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Stage escrow deposits for all TLDs")
|
.setJobName("Stage escrow deposits for all TLDs")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.setDefaultReduceShards(pendings.size())
|
.setDefaultReduceShards(pendings.size())
|
||||||
|
@ -237,8 +236,8 @@ public final class RdeStagingAction implements Runnable {
|
||||||
reducer,
|
reducer,
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
// Add an extra shard that maps over a null resource. See the mapper code for why.
|
// Add an extra shard that maps over a null resource. See the mapper code for why.
|
||||||
new NullInput<>(),
|
new NullInput<>(), EppResourceInputs.createEntityInput(EppResource.class)))
|
||||||
EppResourceInputs.createEntityInput(EppResource.class)))));
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
private ImmutableSetMultimap<String, PendingDeposit> getStandardPendingDeposits() {
|
private ImmutableSetMultimap<String, PendingDeposit> getStandardPendingDeposits() {
|
||||||
|
|
|
@ -16,7 +16,6 @@ package google.registry.rde.imports;
|
||||||
|
|
||||||
import static google.registry.mapreduce.MapreduceRunner.PARAM_MAP_SHARDS;
|
import static google.registry.mapreduce.MapreduceRunner.PARAM_MAP_SHARDS;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
|
|
||||||
import com.google.appengine.tools.cloudstorage.GcsService;
|
import com.google.appengine.tools.cloudstorage.GcsService;
|
||||||
import com.google.appengine.tools.cloudstorage.GcsServiceFactory;
|
import com.google.appengine.tools.cloudstorage.GcsServiceFactory;
|
||||||
|
@ -77,12 +76,11 @@ public class RdeContactImportAction implements Runnable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Import contacts from escrow file")
|
.setJobName("Import contacts from escrow file")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.runMapOnly(
|
.runMapOnly(createMapper(), ImmutableList.of(createInput()))
|
||||||
createMapper(),
|
.sendLinkToMapreduceConsole(response);
|
||||||
ImmutableList.of(createInput()))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -25,7 +25,6 @@ import static google.registry.rde.imports.RdeImportUtils.createAutoRenewBillingE
|
||||||
import static google.registry.rde.imports.RdeImportUtils.createAutoRenewPollMessageForDomainImport;
|
import static google.registry.rde.imports.RdeImportUtils.createAutoRenewPollMessageForDomainImport;
|
||||||
import static google.registry.rde.imports.RdeImportUtils.createHistoryEntryForDomainImport;
|
import static google.registry.rde.imports.RdeImportUtils.createHistoryEntryForDomainImport;
|
||||||
import static google.registry.rde.imports.RdeImportsModule.PATH;
|
import static google.registry.rde.imports.RdeImportsModule.PATH;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
import static google.registry.util.PreconditionsUtils.checkArgumentNotNull;
|
import static google.registry.util.PreconditionsUtils.checkArgumentNotNull;
|
||||||
|
|
||||||
import com.google.appengine.tools.cloudstorage.GcsService;
|
import com.google.appengine.tools.cloudstorage.GcsService;
|
||||||
|
@ -107,12 +106,11 @@ public class RdeDomainImportAction implements Runnable {
|
||||||
logger.atInfo().log(
|
logger.atInfo().log(
|
||||||
"Launching domains import mapreduce: bucket=%s, filename=%s",
|
"Launching domains import mapreduce: bucket=%s, filename=%s",
|
||||||
this.importBucketName, this.importFileName);
|
this.importBucketName, this.importFileName);
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Import domains from escrow file")
|
.setJobName("Import domains from escrow file")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.runMapOnly(
|
.runMapOnly(createMapper(), ImmutableList.of(createInput()))
|
||||||
createMapper(),
|
.sendLinkToMapreduceConsole(response);
|
||||||
ImmutableList.of(createInput()))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -16,7 +16,6 @@ package google.registry.rde.imports;
|
||||||
|
|
||||||
import static google.registry.mapreduce.MapreduceRunner.PARAM_MAP_SHARDS;
|
import static google.registry.mapreduce.MapreduceRunner.PARAM_MAP_SHARDS;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
|
|
||||||
import com.google.appengine.tools.cloudstorage.GcsService;
|
import com.google.appengine.tools.cloudstorage.GcsService;
|
||||||
import com.google.appengine.tools.cloudstorage.GcsServiceFactory;
|
import com.google.appengine.tools.cloudstorage.GcsServiceFactory;
|
||||||
|
@ -77,12 +76,13 @@ public class RdeHostImportAction implements Runnable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Import hosts from escrow file")
|
.setJobName("Import hosts from escrow file")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.runMapOnly(
|
.runMapOnly(
|
||||||
new RdeHostImportMapper(importBucketName),
|
new RdeHostImportMapper(importBucketName),
|
||||||
ImmutableList.of(new RdeHostInput(mapShards, importBucketName, importFileName)))));
|
ImmutableList.of(new RdeHostInput(mapShards, importBucketName, importFileName)))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Mapper to import hosts from an escrow file. */
|
/** Mapper to import hosts from an escrow file. */
|
||||||
|
|
|
@ -19,7 +19,6 @@ import static google.registry.mapreduce.MapreduceRunner.PARAM_MAP_SHARDS;
|
||||||
import static google.registry.model.EppResourceUtils.loadByForeignKey;
|
import static google.registry.model.EppResourceUtils.loadByForeignKey;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.model.registry.Registries.findTldForName;
|
import static google.registry.model.registry.Registries.findTldForName;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
import static java.util.stream.Collectors.joining;
|
import static java.util.stream.Collectors.joining;
|
||||||
|
|
||||||
import com.google.appengine.tools.mapreduce.Mapper;
|
import com.google.appengine.tools.mapreduce.Mapper;
|
||||||
|
@ -86,12 +85,13 @@ public class RdeHostLinkAction implements Runnable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Link hosts from escrow file")
|
.setJobName("Link hosts from escrow file")
|
||||||
.setModuleName("backend")
|
.setModuleName("backend")
|
||||||
.runMapOnly(
|
.runMapOnly(
|
||||||
new RdeHostPostImportMapper(),
|
new RdeHostPostImportMapper(),
|
||||||
ImmutableList.of(new RdeHostInput(mapShards, importBucketName, importFileName)))));
|
ImmutableList.of(new RdeHostInput(mapShards, importBucketName, importFileName)))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Mapper to link hosts from an escrow file to their superordinate domains. */
|
/** Mapper to link hosts from an escrow file to their superordinate domains. */
|
||||||
|
|
|
@ -51,12 +51,4 @@ public interface Response {
|
||||||
* @see HttpServletResponse#setDateHeader(String, long)
|
* @see HttpServletResponse#setDateHeader(String, long)
|
||||||
*/
|
*/
|
||||||
void setDateHeader(String header, DateTime timestamp);
|
void setDateHeader(String header, DateTime timestamp);
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends a JavaScript redirect HTTP response.
|
|
||||||
*
|
|
||||||
* GAE handles a HTTP 302 status as an error, so using this is helpful for responses that might
|
|
||||||
* sometimes be consumed by GAE code, since it performs a redirect while also returning HTTP 200.
|
|
||||||
*/
|
|
||||||
void sendJavaScriptRedirect(String redirectUrl);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,10 +23,6 @@ import org.joda.time.DateTime;
|
||||||
/** HTTP response object. */
|
/** HTTP response object. */
|
||||||
public final class ResponseImpl implements Response {
|
public final class ResponseImpl implements Response {
|
||||||
|
|
||||||
/** Code for a JavaScript redirect. */
|
|
||||||
private static final String REDIRECT_PAYLOAD_FORMAT =
|
|
||||||
"<script>window.location.replace(\"%1$s\");</script><a href=\"%1$s\">%1$s</a>";
|
|
||||||
|
|
||||||
private final HttpServletResponse rsp;
|
private final HttpServletResponse rsp;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
|
@ -62,9 +58,4 @@ public final class ResponseImpl implements Response {
|
||||||
public void setDateHeader(String header, DateTime timestamp) {
|
public void setDateHeader(String header, DateTime timestamp) {
|
||||||
rsp.setDateHeader(header, timestamp.getMillis());
|
rsp.setDateHeader(header, timestamp.getMillis());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void sendJavaScriptRedirect(String redirectUrl) {
|
|
||||||
setPayload(String.format(REDIRECT_PAYLOAD_FORMAT, redirectUrl));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,7 @@ final class GenerateZoneFilesCommand implements CommandWithConnection, CommandWi
|
||||||
"tlds", mainParameters,
|
"tlds", mainParameters,
|
||||||
"exportTime", exportDate.toString());
|
"exportTime", exportDate.toString());
|
||||||
Map<String, Object> response = connection.sendJson(GenerateZoneFilesAction.PATH, params);
|
Map<String, Object> response = connection.sendJson(GenerateZoneFilesAction.PATH, params);
|
||||||
System.out.printf("Job started at %s %s\n", connection.getServer(), response.get("jobPath"));
|
System.out.println(response.get("mapreduceConsoleLink"));
|
||||||
System.out.println("Output files:");
|
System.out.println("Output files:");
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
List<String> filenames = (List<String>) response.get("filenames");
|
List<String> filenames = (List<String>) response.get("filenames");
|
||||||
|
|
|
@ -22,7 +22,6 @@ import static google.registry.mapreduce.inputs.EppResourceInputs.createEntityInp
|
||||||
import static google.registry.model.EppResourceUtils.loadAtPointInTime;
|
import static google.registry.model.EppResourceUtils.loadAtPointInTime;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.request.Action.Method.POST;
|
import static google.registry.request.Action.Method.POST;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||||
import static org.joda.time.DateTimeZone.UTC;
|
import static org.joda.time.DateTimeZone.UTC;
|
||||||
|
|
||||||
|
@ -132,17 +131,17 @@ public class GenerateZoneFilesAction implements Runnable, JsonActionRunner.JsonA
|
||||||
if (!exportTime.equals(exportTime.toDateTime(UTC).withTimeAtStartOfDay())) {
|
if (!exportTime.equals(exportTime.toDateTime(UTC).withTimeAtStartOfDay())) {
|
||||||
throw new BadRequestException("Invalid export time: must be midnight UTC");
|
throw new BadRequestException("Invalid export time: must be midnight UTC");
|
||||||
}
|
}
|
||||||
String jobId = mrRunner
|
String mapreduceConsoleLink =
|
||||||
.setJobName("Generate bind file stanzas")
|
mrRunner
|
||||||
.setModuleName("tools")
|
.setJobName("Generate bind file stanzas")
|
||||||
.setDefaultReduceShards(tlds.size())
|
.setModuleName("tools")
|
||||||
.runMapreduce(
|
.setDefaultReduceShards(tlds.size())
|
||||||
new GenerateBindFileMapper(
|
.runMapreduce(
|
||||||
tlds, exportTime, dnsDefaultATtl, dnsDefaultNsTtl, dnsDefaultDsTtl),
|
new GenerateBindFileMapper(
|
||||||
new GenerateBindFileReducer(bucket, exportTime, gcsBufferSize),
|
tlds, exportTime, dnsDefaultATtl, dnsDefaultNsTtl, dnsDefaultDsTtl),
|
||||||
ImmutableList.of(
|
new GenerateBindFileReducer(bucket, exportTime, gcsBufferSize),
|
||||||
new NullInput<>(),
|
ImmutableList.of(new NullInput<>(), createEntityInput(DomainResource.class)))
|
||||||
createEntityInput(DomainResource.class)));
|
.getLinkToMapreduceConsole();
|
||||||
ImmutableList<String> filenames =
|
ImmutableList<String> filenames =
|
||||||
tlds.stream()
|
tlds.stream()
|
||||||
.map(
|
.map(
|
||||||
|
@ -151,7 +150,7 @@ public class GenerateZoneFilesAction implements Runnable, JsonActionRunner.JsonA
|
||||||
GCS_PATH_FORMAT, bucket, String.format(FILENAME_FORMAT, tld, exportTime)))
|
GCS_PATH_FORMAT, bucket, String.format(FILENAME_FORMAT, tld, exportTime)))
|
||||||
.collect(toImmutableList());
|
.collect(toImmutableList());
|
||||||
return ImmutableMap.of(
|
return ImmutableMap.of(
|
||||||
"jobPath", createJobPath(jobId),
|
"mapreduceConsoleLink", mapreduceConsoleLink,
|
||||||
"filenames", filenames);
|
"filenames", filenames);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,6 @@ import static com.google.common.base.Preconditions.checkArgument;
|
||||||
import static com.google.common.collect.ImmutableList.toImmutableList;
|
import static com.google.common.collect.ImmutableList.toImmutableList;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.request.Action.Method.POST;
|
import static google.registry.request.Action.Method.POST;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
|
|
||||||
import com.google.appengine.tools.mapreduce.Input;
|
import com.google.appengine.tools.mapreduce.Input;
|
||||||
import com.google.appengine.tools.mapreduce.Mapper;
|
import com.google.appengine.tools.mapreduce.Mapper;
|
||||||
|
@ -72,13 +71,12 @@ public class KillAllCommitLogsAction implements Runnable {
|
||||||
CommitLogBucket.getAllBucketKeys().stream())
|
CommitLogBucket.getAllBucketKeys().stream())
|
||||||
.collect(toImmutableList()),
|
.collect(toImmutableList()),
|
||||||
1));
|
1));
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Delete all commit logs")
|
.setJobName("Delete all commit logs")
|
||||||
.setModuleName("tools")
|
.setModuleName("tools")
|
||||||
.runMapreduce(
|
.runMapreduce(
|
||||||
new KillAllCommitLogsMapper(),
|
new KillAllCommitLogsMapper(), new KillAllEntitiesReducer(), ImmutableList.of(input))
|
||||||
new KillAllEntitiesReducer(),
|
.sendLinkToMapreduceConsole(response);
|
||||||
ImmutableList.of(input))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -17,7 +17,6 @@ package google.registry.tools.server;
|
||||||
import static com.google.common.base.Preconditions.checkArgument;
|
import static com.google.common.base.Preconditions.checkArgument;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.request.Action.Method.POST;
|
import static google.registry.request.Action.Method.POST;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
|
|
||||||
import com.google.appengine.tools.mapreduce.Mapper;
|
import com.google.appengine.tools.mapreduce.Mapper;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
@ -58,13 +57,14 @@ public class KillAllEppResourcesAction implements Runnable {
|
||||||
RegistryEnvironment.get() == RegistryEnvironment.CRASH
|
RegistryEnvironment.get() == RegistryEnvironment.CRASH
|
||||||
|| RegistryEnvironment.get() == RegistryEnvironment.UNITTEST,
|
|| RegistryEnvironment.get() == RegistryEnvironment.UNITTEST,
|
||||||
"DO NOT RUN ANYWHERE ELSE EXCEPT CRASH OR TESTS.");
|
"DO NOT RUN ANYWHERE ELSE EXCEPT CRASH OR TESTS.");
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Delete all EppResources, children, and indices")
|
.setJobName("Delete all EppResources, children, and indices")
|
||||||
.setModuleName("tools")
|
.setModuleName("tools")
|
||||||
.runMapreduce(
|
.runMapreduce(
|
||||||
new KillAllEppResourcesMapper(),
|
new KillAllEppResourcesMapper(),
|
||||||
new KillAllEntitiesReducer(),
|
new KillAllEntitiesReducer(),
|
||||||
ImmutableList.of(EppResourceInputs.createIndexInput()))));
|
ImmutableList.of(EppResourceInputs.createIndexInput()))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
static class KillAllEppResourcesMapper extends Mapper<EppResourceIndex, Key<?>, Key<?>> {
|
static class KillAllEppResourcesMapper extends Mapper<EppResourceIndex, Key<?>, Key<?>> {
|
||||||
|
|
|
@ -18,7 +18,6 @@ import static google.registry.mapreduce.inputs.EppResourceInputs.createEntityInp
|
||||||
import static google.registry.model.EppResourceUtils.isActive;
|
import static google.registry.model.EppResourceUtils.isActive;
|
||||||
import static google.registry.model.registry.Registries.assertTldsExist;
|
import static google.registry.model.registry.Registries.assertTldsExist;
|
||||||
import static google.registry.request.RequestParameters.PARAM_TLDS;
|
import static google.registry.request.RequestParameters.PARAM_TLDS;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
|
|
||||||
import com.google.appengine.tools.mapreduce.Mapper;
|
import com.google.appengine.tools.mapreduce.Mapper;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
@ -64,15 +63,14 @@ public class RefreshDnsForAllDomainsAction implements Runnable {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
assertTldsExist(tlds);
|
assertTldsExist(tlds);
|
||||||
response.sendJavaScriptRedirect(
|
mrRunner
|
||||||
createJobPath(
|
.setJobName("Refresh DNS for all domains")
|
||||||
mrRunner
|
.setModuleName("tools")
|
||||||
.setJobName("Refresh DNS for all domains")
|
.setDefaultMapShards(10)
|
||||||
.setModuleName("tools")
|
.runMapOnly(
|
||||||
.setDefaultMapShards(10)
|
new RefreshDnsForAllDomainsActionMapper(tlds),
|
||||||
.runMapOnly(
|
ImmutableList.of(createEntityInput(DomainResource.class)))
|
||||||
new RefreshDnsForAllDomainsActionMapper(tlds),
|
.sendLinkToMapreduceConsole(response);
|
||||||
ImmutableList.of(createEntityInput(DomainResource.class)))));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Mapper to refresh DNS for all active domain resources. */
|
/** Mapper to refresh DNS for all active domain resources. */
|
||||||
|
|
|
@ -15,7 +15,6 @@
|
||||||
package google.registry.tools.server;
|
package google.registry.tools.server;
|
||||||
|
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static google.registry.util.PipelineUtils.createJobPath;
|
|
||||||
|
|
||||||
import com.google.appengine.tools.mapreduce.Mapper;
|
import com.google.appengine.tools.mapreduce.Mapper;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
@ -51,14 +50,15 @@ public class ResaveAllHistoryEntriesAction implements Runnable {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
mrRunner
|
||||||
.setJobName("Re-save all HistoryEntry entities")
|
.setJobName("Re-save all HistoryEntry entities")
|
||||||
.setModuleName("tools")
|
.setModuleName("tools")
|
||||||
.runMapOnly(
|
.runMapOnly(
|
||||||
new ResaveAllHistoryEntriesActionMapper(),
|
new ResaveAllHistoryEntriesActionMapper(),
|
||||||
ImmutableList.of(EppResourceInputs.createChildEntityInput(
|
ImmutableList.of(
|
||||||
ImmutableSet.of(EppResource.class),
|
EppResourceInputs.createChildEntityInput(
|
||||||
ImmutableSet.of(HistoryEntry.class))))));
|
ImmutableSet.of(EppResource.class), ImmutableSet.of(HistoryEntry.class))))
|
||||||
|
.sendLinkToMapreduceConsole(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Mapper to re-save all HistoryEntry entities. */
|
/** Mapper to re-save all HistoryEntry entities. */
|
||||||
|
|
|
@ -1,27 +0,0 @@
|
||||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package google.registry.util;
|
|
||||||
|
|
||||||
/** Utilities for working with pipeline jobs. */
|
|
||||||
public class PipelineUtils {
|
|
||||||
|
|
||||||
private static final String URL_PREFIX = "/_ah/pipeline/status.html?root=";
|
|
||||||
|
|
||||||
private PipelineUtils() {}
|
|
||||||
|
|
||||||
public static String createJobPath(String jobId) {
|
|
||||||
return URL_PREFIX + jobId;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -55,6 +55,7 @@ public class ExportDomainListsActionTest extends MapreduceTestCase<ExportDomainL
|
||||||
private GcsService gcsService;
|
private GcsService gcsService;
|
||||||
private DriveConnection driveConnection = mock(DriveConnection.class);
|
private DriveConnection driveConnection = mock(DriveConnection.class);
|
||||||
private ArgumentCaptor<byte[]> bytesExportedToDrive = ArgumentCaptor.forClass(byte[].class);
|
private ArgumentCaptor<byte[]> bytesExportedToDrive = ArgumentCaptor.forClass(byte[].class);
|
||||||
|
private final FakeResponse response = new FakeResponse();
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void init() {
|
public void init() {
|
||||||
|
@ -67,7 +68,7 @@ public class ExportDomainListsActionTest extends MapreduceTestCase<ExportDomainL
|
||||||
|
|
||||||
action = new ExportDomainListsAction();
|
action = new ExportDomainListsAction();
|
||||||
action.mrRunner = makeDefaultRunner();
|
action.mrRunner = makeDefaultRunner();
|
||||||
action.response = new FakeResponse();
|
action.response = response;
|
||||||
action.gcsBucket = "outputbucket";
|
action.gcsBucket = "outputbucket";
|
||||||
action.gcsBufferSize = 500;
|
action.gcsBufferSize = 500;
|
||||||
gcsService = createGcsService();
|
gcsService = createGcsService();
|
||||||
|
@ -88,6 +89,14 @@ public class ExportDomainListsActionTest extends MapreduceTestCase<ExportDomainL
|
||||||
assertThat(new String(bytesExportedToDrive.getValue(), "UTF-8")).isEqualTo(domains);
|
assertThat(new String(bytesExportedToDrive.getValue(), "UTF-8")).isEqualTo(domains);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test_writesLinkToMapreduceConsoleToResponse() throws Exception {
|
||||||
|
runMapreduce();
|
||||||
|
assertThat(response.getPayload())
|
||||||
|
.startsWith(
|
||||||
|
"Mapreduce console: https://backend.hostname.tld/_ah/pipeline/status.html?root=");
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test_outputsOnlyActiveDomains() throws Exception {
|
public void test_outputsOnlyActiveDomains() throws Exception {
|
||||||
persistActiveDomain("onetwo.tld");
|
persistActiveDomain("onetwo.tld");
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class RdeHostImportActionTest extends MapreduceTestCase<RdeHostImportActi
|
||||||
@Before
|
@Before
|
||||||
public void before() {
|
public void before() {
|
||||||
response = new FakeResponse();
|
response = new FakeResponse();
|
||||||
mrRunner = new MapreduceRunner(Optional.empty(), Optional.empty());
|
mrRunner = makeDefaultRunner();
|
||||||
action = new RdeHostImportAction(
|
action = new RdeHostImportAction(
|
||||||
mrRunner,
|
mrRunner,
|
||||||
response,
|
response,
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class RdeHostLinkActionTest extends MapreduceTestCase<RdeHostLinkAction>
|
||||||
inject.setStaticField(Ofy.class, "clock", clock);
|
inject.setStaticField(Ofy.class, "clock", clock);
|
||||||
createTld("test");
|
createTld("test");
|
||||||
response = new FakeResponse();
|
response = new FakeResponse();
|
||||||
mrRunner = new MapreduceRunner(Optional.empty(), Optional.empty());
|
mrRunner = makeDefaultRunner();
|
||||||
action =
|
action =
|
||||||
new RdeHostLinkAction(mrRunner, response, IMPORT_BUCKET_NAME, IMPORT_FILE_NAME, mapShards);
|
new RdeHostLinkAction(mrRunner, response, IMPORT_BUCKET_NAME, IMPORT_FILE_NAME, mapShards);
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,13 +55,4 @@ public class ResponseImplTest {
|
||||||
new ResponseImpl(rsp).setPayload("hello world");
|
new ResponseImpl(rsp).setPayload("hello world");
|
||||||
assertThat(httpOutput.toString()).isEqualTo("hello world");
|
assertThat(httpOutput.toString()).isEqualTo("hello world");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSendJavaScriptRedirect_producesHtmlScript() throws Exception {
|
|
||||||
StringWriter httpOutput = new StringWriter();
|
|
||||||
when(rsp.getWriter()).thenReturn(new PrintWriter(httpOutput));
|
|
||||||
new ResponseImpl(rsp).sendJavaScriptRedirect("/hello");
|
|
||||||
assertThat(httpOutput.toString()).isEqualTo(
|
|
||||||
"<script>window.location.replace(\"/hello\");</script><a href=\"/hello\">/hello</a>");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,16 +80,11 @@ public final class FakeResponse implements Response {
|
||||||
headers.put(checkNotNull(header), checkNotNull(timestamp));
|
headers.put(checkNotNull(header), checkNotNull(timestamp));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void sendJavaScriptRedirect(String redirectUrl) {
|
|
||||||
checkResponsePerformedOnce();
|
|
||||||
this.status = 200;
|
|
||||||
this.payload = "Javascript redirect to " + redirectUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void checkResponsePerformedOnce() {
|
private void checkResponsePerformedOnce() {
|
||||||
checkState(!wasMutuallyExclusiveResponseSet,
|
checkState(
|
||||||
"Two responses were sent. Here's the previous call:\n%s", lastResponseStackTrace);
|
!wasMutuallyExclusiveResponseSet,
|
||||||
|
"Two responses were sent. Here's the previous call:\n%s",
|
||||||
|
lastResponseStackTrace);
|
||||||
wasMutuallyExclusiveResponseSet = true;
|
wasMutuallyExclusiveResponseSet = true;
|
||||||
lastResponseStackTrace = getStackTrace();
|
lastResponseStackTrace = getStackTrace();
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ java_library(
|
||||||
"//java/google/registry/config",
|
"//java/google/registry/config",
|
||||||
"//java/google/registry/mapreduce",
|
"//java/google/registry/mapreduce",
|
||||||
"//java/google/registry/model",
|
"//java/google/registry/model",
|
||||||
|
"//java/google/registry/util",
|
||||||
"//javatests/google/registry/testing",
|
"//javatests/google/registry/testing",
|
||||||
"@com_google_appengine_api_1_0_sdk",
|
"@com_google_appengine_api_1_0_sdk",
|
||||||
"@com_google_appengine_api_stubs",
|
"@com_google_appengine_api_stubs",
|
||||||
|
|
|
@ -14,7 +14,6 @@
|
||||||
|
|
||||||
package google.registry.testing.mapreduce;
|
package google.registry.testing.mapreduce;
|
||||||
|
|
||||||
import static com.google.common.truth.Truth.assertThat;
|
|
||||||
import static google.registry.config.RegistryConfig.getEppResourceIndexBucketCount;
|
import static google.registry.config.RegistryConfig.getEppResourceIndexBucketCount;
|
||||||
import static google.registry.model.ofy.ObjectifyService.ofy;
|
import static google.registry.model.ofy.ObjectifyService.ofy;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
|
@ -24,11 +23,9 @@ import com.google.appengine.api.blobstore.dev.LocalBlobstoreService;
|
||||||
import com.google.appengine.api.taskqueue.dev.LocalTaskQueue;
|
import com.google.appengine.api.taskqueue.dev.LocalTaskQueue;
|
||||||
import com.google.appengine.api.taskqueue.dev.QueueStateInfo;
|
import com.google.appengine.api.taskqueue.dev.QueueStateInfo;
|
||||||
import com.google.appengine.api.taskqueue.dev.QueueStateInfo.HeaderWrapper;
|
import com.google.appengine.api.taskqueue.dev.QueueStateInfo.HeaderWrapper;
|
||||||
import com.google.appengine.api.taskqueue.dev.QueueStateInfo.TaskStateInfo;
|
|
||||||
import com.google.appengine.tools.development.ApiProxyLocal;
|
import com.google.appengine.tools.development.ApiProxyLocal;
|
||||||
import com.google.appengine.tools.development.testing.LocalTaskQueueTestConfig;
|
import com.google.appengine.tools.development.testing.LocalTaskQueueTestConfig;
|
||||||
import com.google.appengine.tools.mapreduce.MapReduceServlet;
|
import com.google.appengine.tools.mapreduce.MapReduceServlet;
|
||||||
import com.google.appengine.tools.mapreduce.impl.shardedjob.ShardedJobHandler;
|
|
||||||
import com.google.appengine.tools.pipeline.impl.servlets.PipelineServlet;
|
import com.google.appengine.tools.pipeline.impl.servlets.PipelineServlet;
|
||||||
import com.google.appengine.tools.pipeline.impl.servlets.TaskHandler;
|
import com.google.appengine.tools.pipeline.impl.servlets.TaskHandler;
|
||||||
import com.google.apphosting.api.ApiProxy;
|
import com.google.apphosting.api.ApiProxy;
|
||||||
|
@ -37,7 +34,9 @@ import com.google.common.flogger.FluentLogger;
|
||||||
import google.registry.mapreduce.MapreduceRunner;
|
import google.registry.mapreduce.MapreduceRunner;
|
||||||
import google.registry.testing.AppEngineRule;
|
import google.registry.testing.AppEngineRule;
|
||||||
import google.registry.testing.FakeClock;
|
import google.registry.testing.FakeClock;
|
||||||
|
import google.registry.testing.MockitoJUnitRule;
|
||||||
import google.registry.testing.ShardableTestCase;
|
import google.registry.testing.ShardableTestCase;
|
||||||
|
import google.registry.util.AppEngineServiceUtils;
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.ObjectInputStream;
|
import java.io.ObjectInputStream;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
|
@ -52,13 +51,16 @@ import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
|
import org.mockito.Mock;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base test class for mapreduces. Adapted from EndToEndTestCase with some modifications that
|
* Base test class for mapreduces.
|
||||||
* allow it to work with the Nomulus project, most notably inside knowledge of our
|
|
||||||
* routing paths and our Datastore/Task Queue configurations.
|
|
||||||
*
|
*
|
||||||
* <p>See https://github.com/GoogleCloudPlatform/appengine-mapreduce/blob/master/java/src/test/java/com/google/appengine/tools/mapreduce/EndToEndTestCase.java
|
* <p>Adapted from EndToEndTestCase with some modifications that allow it to work with Nomulus, most
|
||||||
|
* notably inside knowledge of our routing paths and our Datastore/Task Queue configurations.
|
||||||
|
*
|
||||||
|
* <p>See
|
||||||
|
* https://github.com/GoogleCloudPlatform/appengine-mapreduce/blob/master/java/src/test/java/com/google/appengine/tools/mapreduce/EndToEndTestCase.java
|
||||||
*
|
*
|
||||||
* @param <T> The type of the Action class that implements the mapreduce.
|
* @param <T> The type of the Action class that implements the mapreduce.
|
||||||
*/
|
*/
|
||||||
|
@ -79,16 +81,23 @@ public abstract class MapreduceTestCase<T> extends ShardableTestCase {
|
||||||
.withTaskQueue()
|
.withTaskQueue()
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
|
@Rule public final MockitoJUnitRule mocks = MockitoJUnitRule.create();
|
||||||
|
|
||||||
|
@Mock
|
||||||
|
AppEngineServiceUtils appEngineServiceUtils;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() {
|
public void setUp() {
|
||||||
taskQueue = LocalTaskQueueTestConfig.getLocalTaskQueue();
|
taskQueue = LocalTaskQueueTestConfig.getLocalTaskQueue();
|
||||||
ApiProxyLocal proxy = (ApiProxyLocal) ApiProxy.getDelegate();
|
ApiProxyLocal proxy = (ApiProxyLocal) ApiProxy.getDelegate();
|
||||||
// Creating files is not allowed in some test execution environments, so don't.
|
// Creating files is not allowed in some test execution environments, so don't.
|
||||||
proxy.setProperty(LocalBlobstoreService.NO_STORAGE_PROPERTY, "true");
|
proxy.setProperty(LocalBlobstoreService.NO_STORAGE_PROPERTY, "true");
|
||||||
|
when(appEngineServiceUtils.getServiceHostname("backend")).thenReturn("backend.hostname.tld");
|
||||||
}
|
}
|
||||||
|
|
||||||
protected MapreduceRunner makeDefaultRunner() {
|
protected MapreduceRunner makeDefaultRunner() {
|
||||||
return new MapreduceRunner(Optional.of(getEppResourceIndexBucketCount()), Optional.of(1));
|
return new MapreduceRunner(
|
||||||
|
Optional.of(getEppResourceIndexBucketCount()), Optional.of(1), appEngineServiceUtils);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<QueueStateInfo.TaskStateInfo> getTasks(String queueName) {
|
protected List<QueueStateInfo.TaskStateInfo> getTasks(String queueName) {
|
||||||
|
@ -210,14 +219,6 @@ public abstract class MapreduceTestCase<T> extends ShardableTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected TaskStateInfo grabNextTaskFromQueue(String queueName) {
|
|
||||||
List<TaskStateInfo> taskInfo = getTasks(queueName);
|
|
||||||
assertThat(taskInfo).isNotEmpty();
|
|
||||||
TaskStateInfo taskStateInfo = taskInfo.get(0);
|
|
||||||
taskQueue.deleteTask(queueName, taskStateInfo.getTaskName());
|
|
||||||
return taskStateInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sadly there's no way to parse query string with JDK. This is a good enough approximation.
|
// Sadly there's no way to parse query string with JDK. This is a good enough approximation.
|
||||||
private static Map<String, String> decodeParameters(String requestBody)
|
private static Map<String, String> decodeParameters(String requestBody)
|
||||||
throws UnsupportedEncodingException {
|
throws UnsupportedEncodingException {
|
||||||
|
@ -236,8 +237,4 @@ public abstract class MapreduceTestCase<T> extends ShardableTestCase {
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String getTaskId(TaskStateInfo taskStateInfo) throws UnsupportedEncodingException {
|
|
||||||
return decodeParameters(taskStateInfo.getBody()).get(ShardedJobHandler.TASK_ID_PARAM);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ package google.registry.tools.server;
|
||||||
|
|
||||||
import static com.google.appengine.tools.cloudstorage.GcsServiceFactory.createGcsService;
|
import static com.google.appengine.tools.cloudstorage.GcsServiceFactory.createGcsService;
|
||||||
import static com.google.common.truth.Truth.assertThat;
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
import static google.registry.testing.DatastoreHelper.createTld;
|
import static google.registry.testing.DatastoreHelper.createTlds;
|
||||||
import static google.registry.testing.DatastoreHelper.newDomainResource;
|
import static google.registry.testing.DatastoreHelper.newDomainResource;
|
||||||
import static google.registry.testing.DatastoreHelper.newHostResource;
|
import static google.registry.testing.DatastoreHelper.newHostResource;
|
||||||
import static google.registry.testing.DatastoreHelper.persistActiveContact;
|
import static google.registry.testing.DatastoreHelper.persistActiveContact;
|
||||||
|
@ -58,9 +58,7 @@ public class GenerateZoneFilesActionTest extends MapreduceTestCase<GenerateZoneF
|
||||||
@Test
|
@Test
|
||||||
public void testGenerate() throws Exception {
|
public void testGenerate() throws Exception {
|
||||||
DateTime now = DateTime.now(DateTimeZone.UTC).withTimeAtStartOfDay();
|
DateTime now = DateTime.now(DateTimeZone.UTC).withTimeAtStartOfDay();
|
||||||
|
createTlds("tld", "com");
|
||||||
createTld("tld");
|
|
||||||
createTld("com");
|
|
||||||
|
|
||||||
ImmutableSet<InetAddress> ips =
|
ImmutableSet<InetAddress> ips =
|
||||||
ImmutableSet.of(InetAddress.getByName("127.0.0.1"), InetAddress.getByName("::1"));
|
ImmutableSet.of(InetAddress.getByName("127.0.0.1"), InetAddress.getByName("::1"));
|
||||||
|
@ -126,12 +124,15 @@ public class GenerateZoneFilesActionTest extends MapreduceTestCase<GenerateZoneF
|
||||||
action.dnsDefaultDsTtl = Duration.standardSeconds(3333);
|
action.dnsDefaultDsTtl = Duration.standardSeconds(3333);
|
||||||
action.clock = new FakeClock(now.plusMinutes(2)); // Move past the actions' 2 minute check.
|
action.clock = new FakeClock(now.plusMinutes(2)); // Move past the actions' 2 minute check.
|
||||||
|
|
||||||
Map<String, Object> response = action.handleJsonRequest(ImmutableMap.<String, Object>of(
|
Map<String, Object> response =
|
||||||
"tlds", ImmutableList.of("tld"),
|
action.handleJsonRequest(
|
||||||
"exportTime", now));
|
ImmutableMap.<String, Object>of("tlds", ImmutableList.of("tld"), "exportTime", now));
|
||||||
assertThat(response).containsEntry(
|
assertThat(response)
|
||||||
"filenames",
|
.containsEntry("filenames", ImmutableList.of("gs://zonefiles-bucket/tld-" + now + ".zone"));
|
||||||
ImmutableList.of("gs://zonefiles-bucket/tld-" + now + ".zone"));
|
assertThat(response).containsKey("mapreduceConsoleLink");
|
||||||
|
assertThat(response.get("mapreduceConsoleLink").toString())
|
||||||
|
.startsWith(
|
||||||
|
"Mapreduce console: https://backend.hostname.tld/_ah/pipeline/status.html?root=");
|
||||||
|
|
||||||
executeTasksUntilEmpty("mapreduce");
|
executeTasksUntilEmpty("mapreduce");
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue