Remove unnecessary explicit generic type declarations

They can be inferred correctly even in Java 7, and display as
compiler warnings in IntelliJ.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=173451087
This commit is contained in:
mcilwain 2017-10-25 14:35:29 -07:00 committed by jianglai
parent 0fdc189e9c
commit eed2e0c45f
20 changed files with 37 additions and 46 deletions

View file

@ -200,7 +200,7 @@ public class DeleteContactsAndHostsAction implements Runnable {
new DeleteEppResourceReducer(),
ImmutableList.of(
// Add an extra shard that maps over a null domain. See the mapper code for why.
new NullInput<DomainBase>(),
new NullInput<>(),
EppResourceInputs.createEntityInput(DomainBase.class)))));
} catch (Throwable t) {
logger.severefmt(t, "Error while kicking off mapreduce to delete contacts/hosts");

View file

@ -113,7 +113,7 @@ public class ExpandRecurringBillingEventsAction implements Runnable {
new ExpandRecurringBillingEventsReducer(isDryRun, persistedCursorTime),
// Add an extra shard that maps over a null recurring event (see the mapper for why).
ImmutableList.of(
new NullInput<Recurring>(),
new NullInput<>(),
createChildEntityInput(
ImmutableSet.<Class<? extends DomainResource>>of(DomainResource.class),
ImmutableSet.<Class<? extends Recurring>>of(Recurring.class))))));

View file

@ -168,7 +168,7 @@ class MapreduceEntityCleanupUtil {
*/
private ImmutableSet<String> getPossibleIdsForPipelineJob(
BaseDatastoreService datastore, String jobId) {
return getPossibleIdsForPipelineJobRecur(datastore, jobId, new HashSet<String>());
return getPossibleIdsForPipelineJobRecur(datastore, jobId, new HashSet<>());
}
/**

View file

@ -139,7 +139,7 @@ public class RefreshDnsOnHostRenameAction implements Runnable {
new RefreshDnsOnHostRenameReducer(refreshRequests, retrier),
// Add an extra NullInput so that the reducer always fires exactly once.
ImmutableList.of(
new NullInput<DomainResource>(), createEntityInput(DomainResource.class)))));
new NullInput<>(), createEntityInput(DomainResource.class)))));
} catch (Throwable t) {
logger.severefmt(t, "Error while kicking off mapreduce to refresh DNS for renamed hosts.");
}

View file

@ -83,8 +83,7 @@ public class CloudDnsWriter extends BaseDnsWriter {
private final String projectId;
private final String zoneName;
private final Dns dnsConnection;
private final HashMap<String, ImmutableSet<ResourceRecordSet>> desiredRecords =
new HashMap<String, ImmutableSet<ResourceRecordSet>>();
private final HashMap<String, ImmutableSet<ResourceRecordSet>> desiredRecords = new HashMap<>();
@Inject
CloudDnsWriter(

View file

@ -37,6 +37,6 @@ class EppResourceEntityInput<R extends EppResource> extends EppResourceBaseInput
@Override
protected InputReader<R> bucketToReader(Key<EppResourceIndexBucket> bucketKey) {
return new EppResourceEntityReader<R>(bucketKey, resourceClasses);
return new EppResourceEntityReader<>(bucketKey, resourceClasses);
}
}

View file

@ -53,11 +53,10 @@ public final class EppResourceInputs {
public static <R extends EppResource> Input<R> createEntityInput(
Class<? extends R> resourceClass,
Class<? extends R>... moreResourceClasses) {
return new EppResourceEntityInput<R>(
return new EppResourceEntityInput<>(
ImmutableSet.copyOf(asList(resourceClass, moreResourceClasses)));
}
/**
* Returns a MapReduce {@link Input} that loads all {@link ImmutableObject} objects of a given
* type, including deleted resources, that are child entities of all {@link EppResource} objects

View file

@ -171,8 +171,7 @@ public final class RegistrarCreditBalance extends ImmutableObject implements Bui
ofy().load().type(RegistrarCreditBalance.class).ancestor(registrarCredit)) {
// Create the submap at this key if it doesn't exist already.
Map<DateTime, Money> submap =
Optional.ofNullable(map.get(balance.effectiveTime))
.orElse(new HashMap<DateTime, Money>());
Optional.ofNullable(map.get(balance.effectiveTime)).orElse(new HashMap<>());
submap.put(balance.writtenTime, balance.amount);
map.put(balance.effectiveTime, submap);
}

View file

@ -62,7 +62,7 @@ public class MetricReporter extends AbstractScheduledService {
writeInterval,
threadFactory,
MetricRegistryImpl.getDefault(),
new ArrayBlockingQueue<Optional<ImmutableList<MetricPoint<?>>>>(1000));
new ArrayBlockingQueue<>(1000));
}
@VisibleForTesting

View file

@ -29,7 +29,7 @@ abstract class RdapResourcesAndIncompletenessWarningType<T extends EppResource>
static <S extends EppResource> RdapResourcesAndIncompletenessWarningType<S> create(
List<S> resources, IncompletenessWarningType incompletenessWarningType) {
return new AutoValue_RdapResourcesAndIncompletenessWarningType<S>(
return new AutoValue_RdapResourcesAndIncompletenessWarningType<>(
resources, incompletenessWarningType);
}

View file

@ -237,7 +237,7 @@ public final class RdeStagingAction implements Runnable {
reducer,
ImmutableList.of(
// Add an extra shard that maps over a null resource. See the mapper code for why.
new NullInput<EppResource>(),
new NullInput<>(),
EppResourceInputs.createEntityInput(EppResource.class)))));
}

View file

@ -62,7 +62,7 @@ public class RemoveIpAddressCommand extends MutatingEppToolCommand {
continue;
}
ArrayList<SoyMapData> ipAddresses = new ArrayList<SoyMapData>();
ArrayList<SoyMapData> ipAddresses = new ArrayList<>();
for (InetAddress address : host.getInetAddresses()) {
SoyMapData dataMap = new SoyMapData(
"address", address.getHostAddress(),

View file

@ -141,7 +141,7 @@ public class GenerateZoneFilesAction implements Runnable, JsonActionRunner.JsonA
tlds, exportTime, dnsDefaultATtl, dnsDefaultNsTtl, dnsDefaultDsTtl),
new GenerateBindFileReducer(bucket, exportTime, gcsBufferSize),
ImmutableList.of(
new NullInput<EppResource>(),
new NullInput<>(),
createEntityInput(DomainResource.class)));
ImmutableList<String> filenames =
tlds.stream()

View file

@ -58,8 +58,7 @@ public final class ListDomainsAction extends ListObjectsAction<DomainResource> {
checkArgument(!tlds.isEmpty(), "Must specify TLDs to query");
assertTldsExist(tlds);
ImmutableSortedSet.Builder<DomainResource> builder =
new ImmutableSortedSet.Builder<DomainResource>(
comparing(DomainResource::getFullyQualifiedDomainName));
new ImmutableSortedSet.Builder<>(comparing(DomainResource::getFullyQualifiedDomainName));
for (List<String> batch : Lists.partition(tlds.asList(), MAX_NUM_SUBQUERIES)) {
builder.addAll(queryNotDeleted(DomainResource.class, clock.nowUtc(), "tld in", batch));
}

View file

@ -359,7 +359,7 @@ public final class FormField<I, O> {
public Builder<I, O> matches(Pattern pattern, @Nullable String errorMessage) {
checkState(CharSequence.class.isAssignableFrom(typeOut));
return transform(
new MatchesFunction<O>(checkNotNull(pattern), Optional.ofNullable(errorMessage)));
new MatchesFunction<>(checkNotNull(pattern), Optional.ofNullable(errorMessage)));
}
/** Alias for {@link #matches(Pattern, String) matches(pattern, null)} */
@ -404,7 +404,7 @@ public final class FormField<I, O> {
checkState(CharSequence.class.isAssignableFrom(typeOut)
|| Collection.class.isAssignableFrom(typeOut)
|| Number.class.isAssignableFrom(typeOut));
return transform(new RangeFunction<O>(checkNotNull(range)));
return transform(new RangeFunction<>(checkNotNull(range)));
}
/**
@ -471,7 +471,7 @@ public final class FormField<I, O> {
public <C extends Enum<C>> Builder<I, C> asEnum(Class<C> enumClass) {
checkArgument(enumClass.isEnum());
checkState(String.class.isAssignableFrom(typeOut));
return transform(enumClass, new ToEnumFunction<O, C>(enumClass));
return transform(enumClass, new ToEnumFunction<>(enumClass));
}
/**

View file

@ -33,7 +33,7 @@ public final class XmlToEnumMapper<T extends Enum<?>> {
* Creates a new {@link XmlToEnumMapper} from xml value to enum value.
*/
public static <T extends Enum<?>> XmlToEnumMapper<T> create(T[] enumValues) {
return new XmlToEnumMapper<T>(enumValues);
return new XmlToEnumMapper<>(enumValues);
}
private XmlToEnumMapper(T[] enumValues) {