mirror of
https://github.com/google/nomulus.git
synced 2025-05-28 16:30:12 +02:00
Rationalize logging statements across codebase
This fixes up the following problems: 1. Using string concatenation instead of the formatting variant methods. 2. Logging or swallowing exception messages without logging the exception itself (this swallows the stack trace). 3. Unnecessary logging on re-thrown exceptions. 4. Unnecessary use of formatting variant methods when not necessary. 5. Complicated logging statements involving significant processing not being wrapped inside of a logging level check. 6. Redundant logging both of an exception itself and its message (this is unnecessary duplication). 7. Use of the base Logger class instead of our FormattingLogger class. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=182419837
This commit is contained in:
parent
f22a42cd42
commit
81dc2bbbc3
47 changed files with 172 additions and 154 deletions
|
@ -62,13 +62,13 @@ public final class CommitLogCheckpointAction implements Runnable {
|
|||
@Override
|
||||
public void run() {
|
||||
final CommitLogCheckpoint checkpoint = strategy.computeCheckpoint();
|
||||
logger.info("Generated candidate checkpoint for time " + checkpoint.getCheckpointTime());
|
||||
logger.infofmt("Generated candidate checkpoint for time: %s", checkpoint.getCheckpointTime());
|
||||
ofy()
|
||||
.transact(
|
||||
() -> {
|
||||
DateTime lastWrittenTime = CommitLogCheckpointRoot.loadRoot().getLastWrittenTime();
|
||||
if (isBeforeOrAt(checkpoint.getCheckpointTime(), lastWrittenTime)) {
|
||||
logger.info("Newer checkpoint already written at time: " + lastWrittenTime);
|
||||
logger.infofmt("Newer checkpoint already written at time: %s", lastWrittenTime);
|
||||
return;
|
||||
}
|
||||
ofy()
|
||||
|
|
|
@ -68,13 +68,13 @@ class GcsDiffFileLister {
|
|||
metadata = Futures.getUnchecked(upperBoundTimesToMetadata.get(checkpointTime));
|
||||
} else {
|
||||
String filename = DIFF_FILE_PREFIX + checkpointTime;
|
||||
logger.info("Patching GCS list; discovered file " + filename);
|
||||
logger.infofmt("Patching GCS list; discovered file: %s", filename);
|
||||
metadata = getMetadata(filename);
|
||||
|
||||
// If we hit a gap, quit.
|
||||
if (metadata == null) {
|
||||
logger.infofmt(
|
||||
"Gap discovered in sequence terminating at %s, missing file %s",
|
||||
"Gap discovered in sequence terminating at %s, missing file: %s",
|
||||
sequence.lastKey(),
|
||||
filename);
|
||||
logger.infofmt("Found sequence from %s to %s", checkpointTime, lastTime);
|
||||
|
@ -89,9 +89,9 @@ class GcsDiffFileLister {
|
|||
}
|
||||
|
||||
ImmutableList<GcsFileMetadata> listDiffFiles(DateTime fromTime, @Nullable DateTime toTime) {
|
||||
logger.info("Requested restore from time: " + fromTime);
|
||||
logger.infofmt("Requested restore from time: %s", fromTime);
|
||||
if (toTime != null) {
|
||||
logger.info(" Until time: " + toTime);
|
||||
logger.infofmt(" Until time: %s", toTime);
|
||||
}
|
||||
// List all of the diff files on GCS and build a map from each file's upper checkpoint time
|
||||
// (extracted from the filename) to its asynchronously-loaded metadata, keeping only files with
|
||||
|
@ -130,7 +130,7 @@ class GcsDiffFileLister {
|
|||
// last file and work backwards we can verify that we have no holes in our chain (although we
|
||||
// may be missing files at the end).
|
||||
TreeMap<DateTime, GcsFileMetadata> sequence = new TreeMap<>();
|
||||
logger.info("Restoring until: " + lastUpperBoundTime);
|
||||
logger.infofmt("Restoring until: %s", lastUpperBoundTime);
|
||||
boolean inconsistentFileSet = !constructDiffSequence(
|
||||
upperBoundTimesToMetadata, fromTime, lastUpperBoundTime, sequence);
|
||||
|
||||
|
@ -157,7 +157,8 @@ class GcsDiffFileLister {
|
|||
"Unable to compute commit diff history, there are either gaps or forks in the history "
|
||||
+ "file set. Check log for details.");
|
||||
|
||||
logger.info("Actual restore from time: " + getLowerBoundTime(sequence.firstEntry().getValue()));
|
||||
logger.infofmt(
|
||||
"Actual restore from time: %s", getLowerBoundTime(sequence.firstEntry().getValue()));
|
||||
logger.infofmt("Found %d files to restore", sequence.size());
|
||||
return ImmutableList.copyOf(sequence.values());
|
||||
}
|
||||
|
|
|
@ -645,7 +645,7 @@ public class BigqueryConnection implements AutoCloseable {
|
|||
logger.info(summarizeCompletedJob(job));
|
||||
if (jobStatus.getErrors() != null) {
|
||||
for (ErrorProto error : jobStatus.getErrors()) {
|
||||
logger.warning(String.format("%s: %s", error.getReason(), error.getMessage()));
|
||||
logger.warningfmt("%s: %s", error.getReason(), error.getMessage());
|
||||
}
|
||||
}
|
||||
return job;
|
||||
|
|
|
@ -82,7 +82,7 @@ public final class CopyDetailReportsAction implements Runnable {
|
|||
.filter(objectName -> objectName.startsWith(BillingModule.DETAIL_REPORT_PREFIX))
|
||||
.collect(ImmutableList.toImmutableList());
|
||||
} catch (IOException e) {
|
||||
logger.severefmt("Copy failed due to %s", e.getMessage());
|
||||
logger.severe(e, "Copying registrar detail report failed");
|
||||
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
||||
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
||||
response.setPayload(String.format("Failure, encountered %s", e.getMessage()));
|
||||
|
|
|
@ -101,7 +101,7 @@ public class GenerateInvoicesAction implements Runnable {
|
|||
String jobId = launchResponse.getJob().getId();
|
||||
enqueuePublishTask(jobId);
|
||||
} catch (IOException e) {
|
||||
logger.warningfmt("Template Launch failed due to: %s", e.getMessage());
|
||||
logger.warning(e, "Template Launch failed");
|
||||
emailUtils.sendAlertEmail(String.format("Template Launch failed due to %s", e.getMessage()));
|
||||
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
||||
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
||||
|
|
|
@ -92,9 +92,9 @@ public final class YamlUtils {
|
|||
Optional<Map<String, Object>> customMap = loadAsMap(yaml, customYaml);
|
||||
if (customMap.isPresent()) {
|
||||
yamlMap = mergeMaps(yamlMap, customMap.get());
|
||||
logger.infofmt("Successfully loaded environment configuration YAML file.");
|
||||
logger.info("Successfully loaded environment configuration YAML file.");
|
||||
} else {
|
||||
logger.infofmt("Ignoring empty environment configuration YAML file.");
|
||||
logger.info("Ignoring empty environment configuration YAML file.");
|
||||
}
|
||||
return yaml.dump(yamlMap);
|
||||
}
|
||||
|
|
|
@ -167,15 +167,16 @@ public final class PublishDnsUpdatesAction implements Runnable, Callable<Void> {
|
|||
duration,
|
||||
domainsPublished,
|
||||
hostsPublished);
|
||||
logger.info(
|
||||
"writer.commit() statistics"
|
||||
+ "\nTLD: " + tld
|
||||
+ "\ncommitStatus: " + commitStatus
|
||||
+ "\nduration: " + duration
|
||||
+ "\ndomainsPublished: " + domainsPublished
|
||||
+ "\ndomainsRejected: " + domainsRejected
|
||||
+ "\nhostsPublished: " + hostsPublished
|
||||
+ "\nhostsRejected: " + hostsRejected);
|
||||
logger.infofmt(
|
||||
"writer.commit() statistics:: TLD: %s, commitStatus: %s, duration: %s, "
|
||||
+ "domainsPublished: %d, domainsRejected: %d, hostsPublished: %d, hostsRejected: %d",
|
||||
tld,
|
||||
commitStatus,
|
||||
duration,
|
||||
domainsPublished,
|
||||
domainsRejected,
|
||||
hostsPublished,
|
||||
hostsRejected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ java_library(
|
|||
name = "writer",
|
||||
srcs = glob(["*.java"]),
|
||||
deps = [
|
||||
"//java/google/registry/util",
|
||||
"@com_google_dagger",
|
||||
"@com_google_guava",
|
||||
],
|
||||
|
|
|
@ -14,10 +14,9 @@
|
|||
|
||||
package google.registry.dns.writer;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import google.registry.util.FormattingLogger;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Logger;
|
||||
import javax.inject.Inject;
|
||||
|
||||
/**
|
||||
|
@ -33,7 +32,7 @@ public final class VoidDnsWriter extends BaseDnsWriter {
|
|||
*/
|
||||
public static final String NAME = "VoidDnsWriter";
|
||||
|
||||
private static final Logger logger = Logger.getLogger(VoidDnsWriter.class.getName());
|
||||
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
|
||||
|
||||
private final Set<String> names = new HashSet<>();
|
||||
|
||||
|
@ -52,7 +51,7 @@ public final class VoidDnsWriter extends BaseDnsWriter {
|
|||
|
||||
@Override
|
||||
protected void commitUnchecked() {
|
||||
logger.warning("Ignoring DNS zone updates! No DnsWriterFactory implementation specified!\n"
|
||||
+ Joiner.on('\n').join(names));
|
||||
logger.warningfmt(
|
||||
"No DnsWriterFactory implementation specified; ignoring names to commit: %s", names);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,7 +82,6 @@ public class BigqueryPollJobAction implements Runnable {
|
|||
try {
|
||||
task = (TaskOptions) new ObjectInputStream(new ByteArrayInputStream(payload)).readObject();
|
||||
} catch (ClassNotFoundException | IOException e) {
|
||||
logger.severe(e, e.toString());
|
||||
throw new BadRequestException("Cannot deserialize task from payload", e);
|
||||
}
|
||||
String taskName = enqueuer.enqueue(getQueue(chainedQueueName.get()), task).getName();
|
||||
|
@ -107,7 +106,7 @@ public class BigqueryPollJobAction implements Runnable {
|
|||
job = bigquery.jobs().get(projectId, jobId).execute();
|
||||
} catch (IOException e) {
|
||||
// We will throw a new exception because done==false, but first log this exception.
|
||||
logger.warning(e, e.getMessage());
|
||||
logger.warningfmt(e, "Error checking outcome of BigQuery job %s.", jobId);
|
||||
}
|
||||
// If job is not yet done, then throw an exception so that we'll return a failing HTTP status
|
||||
// code and the task will be retried.
|
||||
|
|
|
@ -121,10 +121,9 @@ public final class PublishDetailReportAction implements Runnable, JsonAction {
|
|||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
logger.severe(e, e.toString());
|
||||
String message = firstNonNull(e.getMessage(), e.toString());
|
||||
throw e instanceof IllegalArgumentException
|
||||
? new BadRequestException(message) : new InternalServerErrorException(message);
|
||||
? new BadRequestException(message, e) : new InternalServerErrorException(message, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ public final class SyncGroupMembersAction implements Runnable {
|
|||
FAILED(SC_INTERNAL_SERVER_ERROR, "Error occurred while updating registrar contacts.") {
|
||||
@Override
|
||||
protected void log(Throwable cause) {
|
||||
logger.severefmt(cause, "%s", message);
|
||||
logger.severe(cause, message);
|
||||
}};
|
||||
|
||||
final int statusCode;
|
||||
|
@ -79,7 +79,7 @@ public final class SyncGroupMembersAction implements Runnable {
|
|||
|
||||
/** Log an error message. Results that use log levels other than info should override this. */
|
||||
void log(@Nullable Throwable cause) {
|
||||
logger.infofmt(cause, "%s", message);
|
||||
logger.info(cause, message);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,9 +90,7 @@ public final class SyncGroupMembersAction implements Runnable {
|
|||
@Inject SyncGroupMembersAction() {}
|
||||
|
||||
private void sendResponse(Result result, @Nullable List<Throwable> causes) {
|
||||
for (Throwable cause : nullToEmpty(causes)) {
|
||||
result.log(cause);
|
||||
}
|
||||
nullToEmpty(causes).forEach(result::log);
|
||||
response.setStatus(result.statusCode);
|
||||
response.setPayload(String.format("%s %s\n", result.name(), result.message));
|
||||
}
|
||||
|
|
|
@ -135,9 +135,8 @@ public class UpdateSnapshotViewAction implements Runnable {
|
|||
.build())));
|
||||
|
||||
logger.infofmt(
|
||||
"Updated view %s to point at snapshot table %s.",
|
||||
String.format("[%s:%s.%s]", projectId, viewDataset, kindName),
|
||||
String.format("[%s:%s.%s]", projectId, sourceDatasetId, sourceTableId));
|
||||
"Updated view [%s:%s.%s] to point at snapshot table [%s:%s.%s].",
|
||||
projectId, viewDataset, kindName, projectId, sourceDatasetId, sourceTableId);
|
||||
}
|
||||
|
||||
private static void updateTable(Bigquery bigquery, Table table) throws IOException {
|
||||
|
@ -151,7 +150,8 @@ public class UpdateSnapshotViewAction implements Runnable {
|
|||
if (e.getDetails().getCode() == 404) {
|
||||
bigquery.tables().insert(ref.getProjectId(), ref.getDatasetId(), table).execute();
|
||||
} else {
|
||||
logger.warningfmt("UpdateSnapshotViewAction failed, caught exception %s", e.getDetails());
|
||||
logger.warningfmt(
|
||||
e, "UpdateSnapshotViewAction failed, caught exception %s", e.getDetails());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,12 +74,12 @@ public class SyncRegistrarsSheetAction implements Runnable {
|
|||
MISSINGNO(SC_BAD_REQUEST, "No sheet ID specified or configured; dropping task.") {
|
||||
@Override
|
||||
protected void log(Exception cause) {
|
||||
logger.warningfmt(cause, "%s", message);
|
||||
logger.warning(cause, message);
|
||||
}},
|
||||
FAILED(SC_INTERNAL_SERVER_ERROR, "Spreadsheet synchronization failed") {
|
||||
@Override
|
||||
protected void log(Exception cause) {
|
||||
logger.severefmt(cause, "%s", message);
|
||||
logger.severe(cause, message);
|
||||
}};
|
||||
|
||||
private final int statusCode;
|
||||
|
@ -92,7 +92,7 @@ public class SyncRegistrarsSheetAction implements Runnable {
|
|||
|
||||
/** Log an error message. Results that use log levels other than info should override this. */
|
||||
protected void log(@Nullable Exception cause) {
|
||||
logger.infofmt(cause, "%s", message);
|
||||
logger.info(cause, message);
|
||||
}
|
||||
|
||||
private void send(Response response, @Nullable Exception cause) {
|
||||
|
|
|
@ -35,6 +35,7 @@ import google.registry.monitoring.whitebox.BigQueryMetricsEnqueuer;
|
|||
import google.registry.monitoring.whitebox.EppMetric;
|
||||
import google.registry.util.FormattingLogger;
|
||||
import java.util.Optional;
|
||||
import java.util.logging.Level;
|
||||
import javax.inject.Inject;
|
||||
import org.json.simple.JSONValue;
|
||||
|
||||
|
@ -70,19 +71,25 @@ public final class EppController {
|
|||
eppInput = unmarshal(EppInput.class, inputXmlBytes);
|
||||
} catch (EppException e) {
|
||||
// Log the unmarshalling error, with the raw bytes (in base64) to help with debugging.
|
||||
if (logger.isLoggable(Level.INFO)) {
|
||||
logger.infofmt(
|
||||
e,
|
||||
"EPP request XML unmarshalling failed - \"%s\":\n%s\n%s\n%s\n%s",
|
||||
e.getMessage(),
|
||||
JSONValue.toJSONString(
|
||||
ImmutableMap.<String, Object>of(
|
||||
"clientId", nullToEmpty(sessionMetadata.getClientId()),
|
||||
"resultCode", e.getResult().getCode().code,
|
||||
"resultMessage", e.getResult().getCode().msg,
|
||||
"xmlBytes", base64().encode(inputXmlBytes))),
|
||||
"clientId",
|
||||
nullToEmpty(sessionMetadata.getClientId()),
|
||||
"resultCode",
|
||||
e.getResult().getCode().code,
|
||||
"resultMessage",
|
||||
e.getResult().getCode().msg,
|
||||
"xmlBytes",
|
||||
base64().encode(inputXmlBytes))),
|
||||
Strings.repeat("=", 40),
|
||||
new String(inputXmlBytes, UTF_8).trim(), // Charset decoding failures are swallowed.
|
||||
Strings.repeat("=", 40));
|
||||
}
|
||||
// Return early by sending an error message, with no clTRID since we couldn't unmarshal it.
|
||||
eppMetricBuilder.setStatus(e.getResult().getCode());
|
||||
return getErrorResponse(
|
||||
|
|
|
@ -130,7 +130,8 @@ public class EppXmlTransformer {
|
|||
try {
|
||||
byte[] lenient = EppXmlTransformer.marshal(eppOutput, LENIENT);
|
||||
// Marshaling worked even though the results didn't validate against the schema.
|
||||
logger.severe(e, "Result marshaled but did not validate: " + new String(lenient, UTF_8));
|
||||
logger.severefmt(
|
||||
e, "Result marshaled but did not validate: %s", new String(lenient, UTF_8));
|
||||
return lenient;
|
||||
} catch (XmlException e2) {
|
||||
throw new RuntimeException(e2); // Failing to marshal at all is not recoverable.
|
||||
|
|
|
@ -31,6 +31,7 @@ import google.registry.model.eppcommon.Trid;
|
|||
import google.registry.model.eppinput.EppInput;
|
||||
import google.registry.util.FormattingLogger;
|
||||
import java.util.Optional;
|
||||
import java.util.logging.Level;
|
||||
import javax.inject.Inject;
|
||||
import org.json.simple.JSONValue;
|
||||
|
||||
|
@ -64,6 +65,7 @@ public class FlowReporter {
|
|||
public void recordToLogs() {
|
||||
// WARNING: These log statements are parsed by reporting pipelines - be careful when changing.
|
||||
// It should be safe to add new keys, but be very cautious in changing existing keys.
|
||||
if (logger.isLoggable(Level.INFO)) {
|
||||
logger.infofmt(
|
||||
"%s: %s",
|
||||
EPPINPUT_LOG_SIGNATURE,
|
||||
|
@ -71,11 +73,13 @@ public class FlowReporter {
|
|||
ImmutableMap.<String, Object>of(
|
||||
"xml", prettyPrint(inputXmlBytes),
|
||||
"xmlBytes", base64().encode(inputXmlBytes))));
|
||||
}
|
||||
// Explicitly log flow metadata separately from the EPP XML itself so that it stays compact
|
||||
// enough to be sure to fit in a single log entry (the XML part in rare cases could be long
|
||||
// enough to overflow into multiple log entries, breaking routine parsing of the JSON format).
|
||||
String singleTargetId = eppInput.getSingleTargetId().orElse("");
|
||||
ImmutableList<String> targetIds = eppInput.getTargetIds();
|
||||
if (logger.isLoggable(Level.INFO)) {
|
||||
logger.infofmt(
|
||||
"%s: %s",
|
||||
METADATA_LOG_SIGNATURE,
|
||||
|
@ -89,13 +93,17 @@ public class FlowReporter {
|
|||
.put("targetId", singleTargetId)
|
||||
.put("targetIds", targetIds)
|
||||
.put(
|
||||
"tld", eppInput.isDomainResourceType() ? extractTld(singleTargetId).orElse("") : "")
|
||||
"tld",
|
||||
eppInput.isDomainResourceType() ? extractTld(singleTargetId).orElse("") : "")
|
||||
.put(
|
||||
"tlds",
|
||||
eppInput.isDomainResourceType() ? extractTlds(targetIds).asList() : EMPTY_LIST)
|
||||
eppInput.isDomainResourceType()
|
||||
? extractTlds(targetIds).asList()
|
||||
: EMPTY_LIST)
|
||||
.put("icannActivityReportField", extractActivityReportField(flowClass))
|
||||
.build()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the guessed TLD of the given domain name, assuming a second-level domain name, or
|
||||
|
|
|
@ -28,6 +28,7 @@ import google.registry.model.eppcommon.Trid;
|
|||
import google.registry.model.eppoutput.EppOutput;
|
||||
import google.registry.monitoring.whitebox.EppMetric;
|
||||
import google.registry.util.FormattingLogger;
|
||||
import java.util.logging.Level;
|
||||
import javax.inject.Inject;
|
||||
import javax.inject.Provider;
|
||||
|
||||
|
@ -62,16 +63,18 @@ public class FlowRunner {
|
|||
// New data to be logged should be added only to the JSON log statement below.
|
||||
// TODO(b/20725722): remove this log statement entirely once we've transitioned to using the
|
||||
// log line below instead, or change this one to be for human consumption only.
|
||||
if (logger.isLoggable(Level.INFO)) {
|
||||
logger.infofmt(
|
||||
COMMAND_LOG_FORMAT,
|
||||
trid.getServerTransactionId(),
|
||||
clientId,
|
||||
sessionMetadata,
|
||||
prettyXml.replaceAll("\n", "\n\t"),
|
||||
prettyXml.replace("\n", "\n\t"),
|
||||
credentials,
|
||||
eppRequestSource,
|
||||
isDryRun ? "DRY_RUN" : "LIVE",
|
||||
isSuperuser ? "SUPERUSER" : "NORMAL");
|
||||
}
|
||||
// Record flow info to the GAE request logs for reporting purposes if it's not a dry run.
|
||||
if (!isDryRun) {
|
||||
flowReporter.recordToLogs();
|
||||
|
|
|
@ -124,7 +124,7 @@ public final class ComparatorKeyring extends ComparingInvocationHandler<Keyring>
|
|||
return Arrays.equals(a.getFingerprint(), b.getFingerprint())
|
||||
&& Arrays.equals(a.getEncoded(), b.getEncoded());
|
||||
} catch (IOException e) {
|
||||
logger.severefmt("ComparatorKeyring error: PGPPublicKey.getEncoded failed: %s", e);
|
||||
logger.severe(e, "ComparatorKeyring error: PGPPublicKey.getEncoded failed.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -147,7 +147,7 @@ public final class ComparatorKeyring extends ComparingInvocationHandler<Keyring>
|
|||
try {
|
||||
return Arrays.equals(a.getEncoded(), b.getEncoded());
|
||||
} catch (IOException e) {
|
||||
logger.severefmt("ComparatorKeyring error: PublicKeyPacket.getEncoded failed: %s", e);
|
||||
logger.severe(e, "ComparatorKeyring error: PublicKeyPacket.getEncoded failed.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,12 +51,12 @@ public final class FrontendServlet extends HttpServlet {
|
|||
try {
|
||||
metricReporter.get().stopAsync().awaitTerminated(10, TimeUnit.SECONDS);
|
||||
logger.info("Shut down MetricReporter");
|
||||
} catch (TimeoutException timeoutException) {
|
||||
logger.severefmt("Failed to stop MetricReporter: %s", timeoutException);
|
||||
} catch (TimeoutException e) {
|
||||
logger.severe(e, "Failed to stop MetricReporter.");
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
logger.severefmt(e, "Failed to initialize MetricReporter: %s");
|
||||
logger.severe(e, "Failed to initialize MetricReporter.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ public class BigQueryMetricsEnqueuer {
|
|||
queue.add(opts);
|
||||
} catch (TransientFailureException e) {
|
||||
// Log and swallow. We may drop some metrics here but this should be rare.
|
||||
logger.info(e, e.getMessage());
|
||||
logger.info(e, "Transient error occurred while recording metric; metric dropped.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -97,7 +97,7 @@ public class MetricsExportAction implements Runnable {
|
|||
.collect(joining("\n")));
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
logger.warningfmt("Caught Unknown Exception: %s", e);
|
||||
logger.warning(e, "Unknown error while exporting metrics to BigQuery.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -399,7 +399,7 @@ public class RdapDomainSearchAction extends RdapSearchActionBase {
|
|||
if (hostKey != null) {
|
||||
builder.add(hostKey);
|
||||
} else {
|
||||
logger.warningfmt("Host key unexpectedly null");
|
||||
logger.warning("Host key unexpectedly null");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -87,14 +87,14 @@ class EscrowTaskRunner {
|
|||
final Duration interval) {
|
||||
Callable<Void> lockRunner =
|
||||
() -> {
|
||||
logger.info("tld=" + registry.getTld());
|
||||
logger.infofmt("TLD: %s", registry.getTld());
|
||||
DateTime startOfToday = clock.nowUtc().withTimeAtStartOfDay();
|
||||
Cursor cursor = ofy().load().key(Cursor.createKey(cursorType, registry)).now();
|
||||
final DateTime nextRequiredRun = (cursor == null ? startOfToday : cursor.getCursorTime());
|
||||
if (nextRequiredRun.isAfter(startOfToday)) {
|
||||
throw new NoContentException("Already completed");
|
||||
}
|
||||
logger.info("cursor=" + nextRequiredRun);
|
||||
logger.infofmt("Cursor: %s", nextRequiredRun);
|
||||
task.runWithLock(nextRequiredRun);
|
||||
ofy()
|
||||
.transact(
|
||||
|
|
|
@ -60,7 +60,7 @@ final class JSchSshSession implements Closeable {
|
|||
*/
|
||||
JSchSshSession create(JSch jsch, URI uri) throws JSchException {
|
||||
RdeUploadUrl url = RdeUploadUrl.create(uri);
|
||||
logger.info("Connecting to SSH endpoint: " + url);
|
||||
logger.infofmt("Connecting to SSH endpoint: %s", url);
|
||||
Session session = jsch.getSession(
|
||||
url.getUser().orElse("domain-registry"),
|
||||
url.getHost(),
|
||||
|
@ -99,7 +99,7 @@ final class JSchSshSession implements Closeable {
|
|||
try {
|
||||
chan.cd(dir);
|
||||
} catch (SftpException e) {
|
||||
logger.warning(e.toString());
|
||||
logger.warning(e, "Could not open SFTP channel.");
|
||||
mkdirs(chan, dir);
|
||||
chan.cd(dir);
|
||||
}
|
||||
|
|
|
@ -223,9 +223,7 @@ public final class RdeStagingAction implements Runnable {
|
|||
response.setPayload(message);
|
||||
return;
|
||||
}
|
||||
for (PendingDeposit pending : pendings.values()) {
|
||||
logger.infofmt("%s", pending);
|
||||
}
|
||||
pendings.values().stream().map(Object::toString).forEach(logger::info);
|
||||
RdeStagingMapper mapper = new RdeStagingMapper(lenient ? LENIENT : STRICT, pendings);
|
||||
|
||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
||||
|
|
|
@ -167,7 +167,7 @@ public final class RdeStagingReducer extends Reducer<PendingDeposit, DepositFrag
|
|||
}
|
||||
if (!fragment.error().isEmpty()) {
|
||||
failed = true;
|
||||
logger.severe(fragment.error());
|
||||
logger.severefmt("Fragment error: %s", fragment.error());
|
||||
}
|
||||
}
|
||||
for (IdnTableEnum idn : IdnTableEnum.values()) {
|
||||
|
|
|
@ -149,9 +149,7 @@ public class RdeHostLinkAction implements Runnable {
|
|||
case HOST_NOT_FOUND:
|
||||
getContext().incrementCounter("hosts not found");
|
||||
logger.severefmt(
|
||||
"Host with name %s and repoid %s not found",
|
||||
xjcHost.getName(),
|
||||
xjcHost.getRoid());
|
||||
"Host with name %s and repoid %s not found", xjcHost.getName(), xjcHost.getRoid());
|
||||
break;
|
||||
case SUPERORDINATE_DOMAIN_IN_PENDING_DELETE:
|
||||
getContext()
|
||||
|
|
|
@ -65,7 +65,7 @@ public class RdeHostReader extends InputReader<JaxbFragment<XjcRdeHostElement>>
|
|||
parser.skipHosts(offset + count);
|
||||
return parser;
|
||||
} catch (Exception e) {
|
||||
logger.severefmt(e, "Error opening rde file %s/%s", importBucketName, importFileName);
|
||||
logger.severefmt(e, "Error opening RDE file %s/%s", importBucketName, importFileName);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -111,7 +111,7 @@ public final class IcannReportingStagingAction implements Runnable {
|
|||
"ICANN Monthly report staging summary [FAILURE]",
|
||||
String.format(
|
||||
"Staging failed due to %s, check logs for more details.", thrown.toString()));
|
||||
logger.severefmt("Staging action failed due to %s", thrown.toString());
|
||||
logger.severe(thrown, "Staging action failed.");
|
||||
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
||||
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
||||
response.setPayload(String.format("Staging failed due to %s", thrown.toString()));
|
||||
|
|
|
@ -95,7 +95,7 @@ public final class IcannReportingUploadAction implements Runnable {
|
|||
},
|
||||
IOException.class);
|
||||
} catch (RuntimeException e) {
|
||||
logger.warningfmt("Upload to %s failed due to %s", gcsFilename.toString(), e.toString());
|
||||
logger.warningfmt(e, "Upload to %s failed.", gcsFilename.toString());
|
||||
}
|
||||
reportSummaryBuilder.put(reportFilename, success);
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ public class ReportingEmailUtils {
|
|||
msg.setText(body);
|
||||
emailService.sendMessage(msg);
|
||||
} catch (Exception e) {
|
||||
logger.warningfmt("E-mail service failed due to %s", e.toString());
|
||||
logger.warning(e, "E-mail service failed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -195,6 +195,10 @@ public abstract class HttpException extends RuntimeException {
|
|||
super(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, message, null);
|
||||
}
|
||||
|
||||
public InternalServerErrorException(String message, Throwable cause) {
|
||||
super(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, message, cause);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getResponseCodeString() {
|
||||
return "Internal Server Error";
|
||||
|
|
|
@ -136,8 +136,8 @@ public final class NordnVerifyAction implements Runnable {
|
|||
logger.warning(result.toString());
|
||||
break;
|
||||
default:
|
||||
logger.warningfmt("LORDN verify task %s: Unexpected outcome: %s",
|
||||
actionLogId, result.toString());
|
||||
logger.warningfmt(
|
||||
"LORDN verify task %s: Unexpected outcome: %s", actionLogId, result.toString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,7 +56,8 @@ public final class TmchSmdrlAction implements Runnable {
|
|||
}
|
||||
SignedMarkRevocationList smdrl = SmdrlCsvParser.parse(lines);
|
||||
smdrl.save();
|
||||
logger.infofmt("Inserted %,d smd revocations into Datastore, created at %s",
|
||||
logger.infofmt(
|
||||
"Inserted %,d smd revocations into Datastore, created at %s",
|
||||
smdrl.size(), smdrl.getCreationTime());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,6 +32,6 @@ class LogoutCommand implements Command {
|
|||
@Override
|
||||
public void run() throws IOException {
|
||||
StoredCredential.getDefaultDataStore(dataStoreFactory).clear();
|
||||
logger.infofmt("Logged out - credentials have been removed.");
|
||||
logger.info("Logged out - credentials have been removed.");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -110,7 +110,7 @@ public class CreateGroupsAction implements Runnable {
|
|||
} else {
|
||||
response.setStatus(SC_OK);
|
||||
response.setPayload("Success!");
|
||||
logger.info("Successfully created groups for registrar: " + registrar.getRegistrarName());
|
||||
logger.infofmt("Successfully created groups for registrar: %s", registrar.getRegistrarName());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,9 +38,9 @@ public abstract class CreateOrUpdatePremiumListAction implements Runnable {
|
|||
public void run() {
|
||||
try {
|
||||
savePremiumList();
|
||||
} catch (IllegalArgumentException iae) {
|
||||
logger.info(iae, "Usage error in attempting to save premium list from nomulus tool command");
|
||||
response.setPayload(ImmutableMap.of("error", iae.toString(), "status", "error"));
|
||||
} catch (IllegalArgumentException e) {
|
||||
logger.info(e, "Usage error in attempting to save premium list from nomulus tool command");
|
||||
response.setPayload(ImmutableMap.of("error", e.toString(), "status", "error"));
|
||||
} catch (Exception e) {
|
||||
logger.severe(e, "Unexpected error saving premium list from nomulus tool command");
|
||||
response.setPayload(ImmutableMap.of("error", e.toString(), "status", "error"));
|
||||
|
|
|
@ -112,8 +112,8 @@ public class DeleteEntityAction implements Runnable {
|
|||
try {
|
||||
return Optional.ofNullable(getDatastoreService().get(rawKey));
|
||||
} catch (EntityNotFoundException e) {
|
||||
logger.warningfmt(e, "Could not load entity from Datastore service with key %s",
|
||||
rawKey.toString());
|
||||
logger.warningfmt(
|
||||
e, "Could not load entity from Datastore service with key %s", rawKey.toString());
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -121,12 +121,12 @@ public abstract class ListObjectsAction<T extends ImmutableObject> implements Ru
|
|||
"lines", lines,
|
||||
"status", "success"));
|
||||
} catch (IllegalArgumentException e) {
|
||||
String message = firstNonNull(e.getMessage(), e.getClass().getName());
|
||||
logger.warning(e, message);
|
||||
logger.warning(e, "Error while listing objects.");
|
||||
// Don't return a non-200 response, since that will cause RegistryTool to barf instead of
|
||||
// letting ListObjectsCommand parse the JSON response and return a clean error.
|
||||
response.setPayload(ImmutableMap.of(
|
||||
"error", message,
|
||||
response.setPayload(
|
||||
ImmutableMap.of(
|
||||
"error", firstNonNull(e.getMessage(), e.getClass().getName()),
|
||||
"status", "error"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -177,7 +177,7 @@ public final class RegistrarPaymentAction implements Runnable, JsonAction {
|
|||
throw new FormFieldException(CURRENCY_FIELD.name(), "Unsupported currency.");
|
||||
}
|
||||
} catch (FormFieldException e) {
|
||||
logger.warning(e.toString());
|
||||
logger.warning(e, "Form field error in RegistrarPaymentAction.");
|
||||
return JsonResponseHelper.createFormFieldError(e.getMessage(), e.getFieldName());
|
||||
}
|
||||
Result<Transaction> result =
|
||||
|
|
|
@ -23,7 +23,6 @@ import java.net.InetAddress;
|
|||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.logging.Logger;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
/**
|
||||
|
@ -41,8 +40,7 @@ import javax.annotation.Nullable;
|
|||
// TODO(b/21870796): Migrate to Guava version when this is open-sourced.
|
||||
public class CidrAddressBlock implements Iterable<InetAddress>, Serializable {
|
||||
|
||||
private static final Logger logger =
|
||||
Logger.getLogger(CidrAddressBlock.class.getName());
|
||||
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
|
||||
|
||||
private final InetAddress ip;
|
||||
|
||||
|
@ -338,13 +336,13 @@ public class CidrAddressBlock implements Iterable<InetAddress>, Serializable {
|
|||
|
||||
try {
|
||||
return ip.equals(applyNetmask(ipAddr, netmask));
|
||||
} catch (IllegalArgumentException iae) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
||||
// Something has gone very wrong. This CidrAddressBlock should
|
||||
// not have been created with an invalid netmask and a valid
|
||||
// netmask should have been successfully applied to "ipAddr" as long
|
||||
// as it represents an address of the same family as "this.ip".
|
||||
logger.warning(iae.getMessage());
|
||||
logger.warning(e, "Error while applying netmask.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -119,6 +119,10 @@ public class FormattingLogger {
|
|||
log(Level.SEVERE, cause, String.format(fmt, args));
|
||||
}
|
||||
|
||||
public boolean isLoggable(Level level) {
|
||||
return logger.isLoggable(level);
|
||||
}
|
||||
|
||||
public void addHandler(Handler handler) {
|
||||
logger.addHandler(handler);
|
||||
}
|
||||
|
|
|
@ -56,9 +56,9 @@ final class RegistrarLookupCommand implements WhoisCommand {
|
|||
}
|
||||
String normalized = normalizeRegistrarName(registrar.getRegistrarName());
|
||||
if (map.put(normalized, registrar) != null) {
|
||||
logger.warning(
|
||||
normalized
|
||||
+ " appeared as a normalized registrar name for more than one registrar");
|
||||
logger.warningfmt(
|
||||
"%s appeared as a normalized registrar name for more than one registrar.",
|
||||
normalized);
|
||||
}
|
||||
}
|
||||
// Use the normalized registrar name without its last word as a key, assuming there are
|
||||
|
|
|
@ -203,7 +203,7 @@ public class GcsDiffFileListerTest {
|
|||
|
||||
assertThrows(IllegalStateException.class, () -> listDiffFiles(now.minusMinutes(9), null));
|
||||
assertLogContains(String.format(
|
||||
"Gap discovered in sequence terminating at %s, missing file commit_diff_until_%s",
|
||||
"Gap discovered in sequence terminating at %s, missing file: commit_diff_until_%s",
|
||||
now, now.minusMinutes(5)));
|
||||
assertLogContains(String.format(
|
||||
"Found sequence from %s to %s", now.minusMinutes(9), now.minusMinutes(6)));
|
||||
|
|
|
@ -136,7 +136,7 @@ public class BouncyCastleTest {
|
|||
}
|
||||
data = output.toByteArray();
|
||||
}
|
||||
logger.info("Compressed data: " + dumpHex(data));
|
||||
logger.infofmt("Compressed data: %s", dumpHex(data));
|
||||
|
||||
// Decompress the data.
|
||||
try (ByteArrayInputStream input = new ByteArrayInputStream(data)) {
|
||||
|
@ -167,7 +167,7 @@ public class BouncyCastleTest {
|
|||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
signer.generate().encode(output);
|
||||
byte[] signatureFileData = output.toByteArray();
|
||||
logger.info(".sig file data: " + dumpHex(signatureFileData));
|
||||
logger.infofmt(".sig file data: %s", dumpHex(signatureFileData));
|
||||
|
||||
// Load algorithm information and signature data from "signatureFileData".
|
||||
PGPSignature sig;
|
||||
|
@ -207,7 +207,7 @@ public class BouncyCastleTest {
|
|||
signer.update(FALL_OF_HYPERION_A_DREAM.getBytes(UTF_8));
|
||||
signer.generate().encode(output);
|
||||
byte[] signatureFileData = output.toByteArray();
|
||||
logger.info(".sig file data: " + dumpHex(signatureFileData));
|
||||
logger.infofmt(".sig file data: %s", dumpHex(signatureFileData));
|
||||
|
||||
// Load algorithm information and signature data from "signatureFileData".
|
||||
PGPSignature sig;
|
||||
|
@ -252,7 +252,7 @@ public class BouncyCastleTest {
|
|||
}
|
||||
encryptedData = output.toByteArray();
|
||||
}
|
||||
logger.info("Encrypted data: " + dumpHex(encryptedData));
|
||||
logger.infofmt("Encrypted data: %s", dumpHex(encryptedData));
|
||||
|
||||
// Bob loads his "privateKey" into memory.
|
||||
PGPSecretKeyRing privateKeyRing = new BcPGPSecretKeyRing(PRIVATE_KEY);
|
||||
|
@ -296,7 +296,7 @@ public class BouncyCastleTest {
|
|||
}
|
||||
encryptedData = output.toByteArray();
|
||||
}
|
||||
logger.info("Encrypted data: " + dumpHex(encryptedData));
|
||||
logger.infofmt("Encrypted data: %s", dumpHex(encryptedData));
|
||||
|
||||
// Bob loads his chain of private keys into memory.
|
||||
PGPSecretKeyRingCollection privateKeyRings = new BcPGPSecretKeyRingCollection(
|
||||
|
@ -344,7 +344,7 @@ public class BouncyCastleTest {
|
|||
}
|
||||
encryptedData = output.toByteArray();
|
||||
}
|
||||
logger.info("Encrypted data: " + dumpHex(encryptedData));
|
||||
logger.infofmt("Encrypted data: %s", dumpHex(encryptedData));
|
||||
|
||||
// Bob loads his chain of private keys into memory.
|
||||
PGPSecretKeyRingCollection privateKeyRings = new BcPGPSecretKeyRingCollection(
|
||||
|
|
|
@ -16,6 +16,7 @@ package google.registry.testing.sftp;
|
|||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import google.registry.util.FormattingLogger;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
|
@ -23,8 +24,6 @@ import java.security.KeyPair;
|
|||
import java.security.PublicKey;
|
||||
import java.security.Security;
|
||||
import java.util.Arrays;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import javax.annotation.Nullable;
|
||||
import org.apache.ftpserver.FtpServer;
|
||||
import org.apache.ftpserver.ftplet.FtpException;
|
||||
|
@ -48,7 +47,7 @@ import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter;
|
|||
/** In-process SFTP server using Apache SSHD. */
|
||||
public class TestSftpServer implements FtpServer {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(TestSftpServer.class.getName());
|
||||
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
|
||||
|
||||
private static SingletonRandomFactory secureRandomFactory;
|
||||
|
||||
|
@ -94,10 +93,10 @@ public class TestSftpServer implements FtpServer {
|
|||
try (PEMParser pemParser = new PEMParser(new StringReader(key))) {
|
||||
PEMKeyPair pemPair = (PEMKeyPair) pemParser.readObject();
|
||||
KeyPair result = new JcaPEMKeyConverter().setProvider("BC").getKeyPair(pemPair);
|
||||
logger.info("Read key pair " + result);
|
||||
logger.infofmt("Read key pair %s", result);
|
||||
return result;
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.SEVERE, "Couldn't read key pair from string(!)", e);
|
||||
logger.severe(e, "Couldn't read key pair from string.");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -190,7 +189,7 @@ public class TestSftpServer implements FtpServer {
|
|||
server.stop(true);
|
||||
stopped = true;
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.WARNING, "Error shutting down server", e);
|
||||
logger.warning(e, "Error shutting down server");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -201,7 +200,6 @@ public class TestSftpServer implements FtpServer {
|
|||
server.start();
|
||||
stopped = false;
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.WARNING, "Couldn't start server", e);
|
||||
throw new FtpException("Couldn't start server", e);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue