Fix handling around size of batches of mapreduce entities to process

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=149562105
This commit is contained in:
mcilwain 2017-03-08 11:27:29 -08:00 committed by Ben McIlwain
parent 1dfb6afa4b
commit 01bb3a30f2

View file

@ -172,9 +172,12 @@ public class MapreduceEntityCleanupAction implements Runnable {
DateTime cutoffDate = clock.nowUtc().minusDays(defaultedDaysOld); DateTime cutoffDate = clock.nowUtc().minusDays(defaultedDaysOld);
Optional<String> cursor = Optional.absent(); Optional<String> cursor = Optional.absent();
do { do {
Optional<Integer> numJobsToRequest =
Optional.fromNullable(
numJobsToDelete.isPresent() ? numJobsToDelete.get() - numJobsProcessed : null);
EligibleJobResults batch = EligibleJobResults batch =
mapreduceEntityCleanupUtil.findEligibleJobsByJobName( mapreduceEntityCleanupUtil.findEligibleJobsByJobName(
jobName.orNull(), cutoffDate, numJobsToDelete, force.or(false), cursor); jobName.orNull(), cutoffDate, numJobsToRequest, force.or(false), cursor);
cursor = batch.cursor(); cursor = batch.cursor();
// Individual batches can come back empty if none of the returned jobs meet the requirements // Individual batches can come back empty if none of the returned jobs meet the requirements
// or if all jobs have been exhausted. // or if all jobs have been exhausted.