mirror of
https://github.com/google/nomulus.git
synced 2025-05-29 17:00:11 +02:00
Rationalize logging statements across codebase
This fixes up the following problems: 1. Using string concatenation instead of the formatting variant methods. 2. Logging or swallowing exception messages without logging the exception itself (this swallows the stack trace). 3. Unnecessary logging on re-thrown exceptions. 4. Unnecessary use of formatting variant methods when not necessary. 5. Complicated logging statements involving significant processing not being wrapped inside of a logging level check. 6. Redundant logging both of an exception itself and its message (this is unnecessary duplication). 7. Use of the base Logger class instead of our FormattingLogger class. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=182419837
This commit is contained in:
parent
f22a42cd42
commit
81dc2bbbc3
47 changed files with 172 additions and 154 deletions
|
@ -62,13 +62,13 @@ public final class CommitLogCheckpointAction implements Runnable {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
final CommitLogCheckpoint checkpoint = strategy.computeCheckpoint();
|
final CommitLogCheckpoint checkpoint = strategy.computeCheckpoint();
|
||||||
logger.info("Generated candidate checkpoint for time " + checkpoint.getCheckpointTime());
|
logger.infofmt("Generated candidate checkpoint for time: %s", checkpoint.getCheckpointTime());
|
||||||
ofy()
|
ofy()
|
||||||
.transact(
|
.transact(
|
||||||
() -> {
|
() -> {
|
||||||
DateTime lastWrittenTime = CommitLogCheckpointRoot.loadRoot().getLastWrittenTime();
|
DateTime lastWrittenTime = CommitLogCheckpointRoot.loadRoot().getLastWrittenTime();
|
||||||
if (isBeforeOrAt(checkpoint.getCheckpointTime(), lastWrittenTime)) {
|
if (isBeforeOrAt(checkpoint.getCheckpointTime(), lastWrittenTime)) {
|
||||||
logger.info("Newer checkpoint already written at time: " + lastWrittenTime);
|
logger.infofmt("Newer checkpoint already written at time: %s", lastWrittenTime);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ofy()
|
ofy()
|
||||||
|
|
|
@ -68,13 +68,13 @@ class GcsDiffFileLister {
|
||||||
metadata = Futures.getUnchecked(upperBoundTimesToMetadata.get(checkpointTime));
|
metadata = Futures.getUnchecked(upperBoundTimesToMetadata.get(checkpointTime));
|
||||||
} else {
|
} else {
|
||||||
String filename = DIFF_FILE_PREFIX + checkpointTime;
|
String filename = DIFF_FILE_PREFIX + checkpointTime;
|
||||||
logger.info("Patching GCS list; discovered file " + filename);
|
logger.infofmt("Patching GCS list; discovered file: %s", filename);
|
||||||
metadata = getMetadata(filename);
|
metadata = getMetadata(filename);
|
||||||
|
|
||||||
// If we hit a gap, quit.
|
// If we hit a gap, quit.
|
||||||
if (metadata == null) {
|
if (metadata == null) {
|
||||||
logger.infofmt(
|
logger.infofmt(
|
||||||
"Gap discovered in sequence terminating at %s, missing file %s",
|
"Gap discovered in sequence terminating at %s, missing file: %s",
|
||||||
sequence.lastKey(),
|
sequence.lastKey(),
|
||||||
filename);
|
filename);
|
||||||
logger.infofmt("Found sequence from %s to %s", checkpointTime, lastTime);
|
logger.infofmt("Found sequence from %s to %s", checkpointTime, lastTime);
|
||||||
|
@ -89,9 +89,9 @@ class GcsDiffFileLister {
|
||||||
}
|
}
|
||||||
|
|
||||||
ImmutableList<GcsFileMetadata> listDiffFiles(DateTime fromTime, @Nullable DateTime toTime) {
|
ImmutableList<GcsFileMetadata> listDiffFiles(DateTime fromTime, @Nullable DateTime toTime) {
|
||||||
logger.info("Requested restore from time: " + fromTime);
|
logger.infofmt("Requested restore from time: %s", fromTime);
|
||||||
if (toTime != null) {
|
if (toTime != null) {
|
||||||
logger.info(" Until time: " + toTime);
|
logger.infofmt(" Until time: %s", toTime);
|
||||||
}
|
}
|
||||||
// List all of the diff files on GCS and build a map from each file's upper checkpoint time
|
// List all of the diff files on GCS and build a map from each file's upper checkpoint time
|
||||||
// (extracted from the filename) to its asynchronously-loaded metadata, keeping only files with
|
// (extracted from the filename) to its asynchronously-loaded metadata, keeping only files with
|
||||||
|
@ -130,7 +130,7 @@ class GcsDiffFileLister {
|
||||||
// last file and work backwards we can verify that we have no holes in our chain (although we
|
// last file and work backwards we can verify that we have no holes in our chain (although we
|
||||||
// may be missing files at the end).
|
// may be missing files at the end).
|
||||||
TreeMap<DateTime, GcsFileMetadata> sequence = new TreeMap<>();
|
TreeMap<DateTime, GcsFileMetadata> sequence = new TreeMap<>();
|
||||||
logger.info("Restoring until: " + lastUpperBoundTime);
|
logger.infofmt("Restoring until: %s", lastUpperBoundTime);
|
||||||
boolean inconsistentFileSet = !constructDiffSequence(
|
boolean inconsistentFileSet = !constructDiffSequence(
|
||||||
upperBoundTimesToMetadata, fromTime, lastUpperBoundTime, sequence);
|
upperBoundTimesToMetadata, fromTime, lastUpperBoundTime, sequence);
|
||||||
|
|
||||||
|
@ -157,7 +157,8 @@ class GcsDiffFileLister {
|
||||||
"Unable to compute commit diff history, there are either gaps or forks in the history "
|
"Unable to compute commit diff history, there are either gaps or forks in the history "
|
||||||
+ "file set. Check log for details.");
|
+ "file set. Check log for details.");
|
||||||
|
|
||||||
logger.info("Actual restore from time: " + getLowerBoundTime(sequence.firstEntry().getValue()));
|
logger.infofmt(
|
||||||
|
"Actual restore from time: %s", getLowerBoundTime(sequence.firstEntry().getValue()));
|
||||||
logger.infofmt("Found %d files to restore", sequence.size());
|
logger.infofmt("Found %d files to restore", sequence.size());
|
||||||
return ImmutableList.copyOf(sequence.values());
|
return ImmutableList.copyOf(sequence.values());
|
||||||
}
|
}
|
||||||
|
|
|
@ -645,7 +645,7 @@ public class BigqueryConnection implements AutoCloseable {
|
||||||
logger.info(summarizeCompletedJob(job));
|
logger.info(summarizeCompletedJob(job));
|
||||||
if (jobStatus.getErrors() != null) {
|
if (jobStatus.getErrors() != null) {
|
||||||
for (ErrorProto error : jobStatus.getErrors()) {
|
for (ErrorProto error : jobStatus.getErrors()) {
|
||||||
logger.warning(String.format("%s: %s", error.getReason(), error.getMessage()));
|
logger.warningfmt("%s: %s", error.getReason(), error.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return job;
|
return job;
|
||||||
|
|
|
@ -82,7 +82,7 @@ public final class CopyDetailReportsAction implements Runnable {
|
||||||
.filter(objectName -> objectName.startsWith(BillingModule.DETAIL_REPORT_PREFIX))
|
.filter(objectName -> objectName.startsWith(BillingModule.DETAIL_REPORT_PREFIX))
|
||||||
.collect(ImmutableList.toImmutableList());
|
.collect(ImmutableList.toImmutableList());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.severefmt("Copy failed due to %s", e.getMessage());
|
logger.severe(e, "Copying registrar detail report failed");
|
||||||
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
||||||
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
||||||
response.setPayload(String.format("Failure, encountered %s", e.getMessage()));
|
response.setPayload(String.format("Failure, encountered %s", e.getMessage()));
|
||||||
|
|
|
@ -101,7 +101,7 @@ public class GenerateInvoicesAction implements Runnable {
|
||||||
String jobId = launchResponse.getJob().getId();
|
String jobId = launchResponse.getJob().getId();
|
||||||
enqueuePublishTask(jobId);
|
enqueuePublishTask(jobId);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.warningfmt("Template Launch failed due to: %s", e.getMessage());
|
logger.warning(e, "Template Launch failed");
|
||||||
emailUtils.sendAlertEmail(String.format("Template Launch failed due to %s", e.getMessage()));
|
emailUtils.sendAlertEmail(String.format("Template Launch failed due to %s", e.getMessage()));
|
||||||
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
||||||
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
||||||
|
|
|
@ -92,9 +92,9 @@ public final class YamlUtils {
|
||||||
Optional<Map<String, Object>> customMap = loadAsMap(yaml, customYaml);
|
Optional<Map<String, Object>> customMap = loadAsMap(yaml, customYaml);
|
||||||
if (customMap.isPresent()) {
|
if (customMap.isPresent()) {
|
||||||
yamlMap = mergeMaps(yamlMap, customMap.get());
|
yamlMap = mergeMaps(yamlMap, customMap.get());
|
||||||
logger.infofmt("Successfully loaded environment configuration YAML file.");
|
logger.info("Successfully loaded environment configuration YAML file.");
|
||||||
} else {
|
} else {
|
||||||
logger.infofmt("Ignoring empty environment configuration YAML file.");
|
logger.info("Ignoring empty environment configuration YAML file.");
|
||||||
}
|
}
|
||||||
return yaml.dump(yamlMap);
|
return yaml.dump(yamlMap);
|
||||||
}
|
}
|
||||||
|
|
|
@ -167,15 +167,16 @@ public final class PublishDnsUpdatesAction implements Runnable, Callable<Void> {
|
||||||
duration,
|
duration,
|
||||||
domainsPublished,
|
domainsPublished,
|
||||||
hostsPublished);
|
hostsPublished);
|
||||||
logger.info(
|
logger.infofmt(
|
||||||
"writer.commit() statistics"
|
"writer.commit() statistics:: TLD: %s, commitStatus: %s, duration: %s, "
|
||||||
+ "\nTLD: " + tld
|
+ "domainsPublished: %d, domainsRejected: %d, hostsPublished: %d, hostsRejected: %d",
|
||||||
+ "\ncommitStatus: " + commitStatus
|
tld,
|
||||||
+ "\nduration: " + duration
|
commitStatus,
|
||||||
+ "\ndomainsPublished: " + domainsPublished
|
duration,
|
||||||
+ "\ndomainsRejected: " + domainsRejected
|
domainsPublished,
|
||||||
+ "\nhostsPublished: " + hostsPublished
|
domainsRejected,
|
||||||
+ "\nhostsRejected: " + hostsRejected);
|
hostsPublished,
|
||||||
|
hostsRejected);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ java_library(
|
||||||
name = "writer",
|
name = "writer",
|
||||||
srcs = glob(["*.java"]),
|
srcs = glob(["*.java"]),
|
||||||
deps = [
|
deps = [
|
||||||
|
"//java/google/registry/util",
|
||||||
"@com_google_dagger",
|
"@com_google_dagger",
|
||||||
"@com_google_guava",
|
"@com_google_guava",
|
||||||
],
|
],
|
||||||
|
|
|
@ -14,10 +14,9 @@
|
||||||
|
|
||||||
package google.registry.dns.writer;
|
package google.registry.dns.writer;
|
||||||
|
|
||||||
import com.google.common.base.Joiner;
|
import google.registry.util.FormattingLogger;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.logging.Logger;
|
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -33,7 +32,7 @@ public final class VoidDnsWriter extends BaseDnsWriter {
|
||||||
*/
|
*/
|
||||||
public static final String NAME = "VoidDnsWriter";
|
public static final String NAME = "VoidDnsWriter";
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(VoidDnsWriter.class.getName());
|
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
|
||||||
|
|
||||||
private final Set<String> names = new HashSet<>();
|
private final Set<String> names = new HashSet<>();
|
||||||
|
|
||||||
|
@ -52,7 +51,7 @@ public final class VoidDnsWriter extends BaseDnsWriter {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void commitUnchecked() {
|
protected void commitUnchecked() {
|
||||||
logger.warning("Ignoring DNS zone updates! No DnsWriterFactory implementation specified!\n"
|
logger.warningfmt(
|
||||||
+ Joiner.on('\n').join(names));
|
"No DnsWriterFactory implementation specified; ignoring names to commit: %s", names);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,7 +82,6 @@ public class BigqueryPollJobAction implements Runnable {
|
||||||
try {
|
try {
|
||||||
task = (TaskOptions) new ObjectInputStream(new ByteArrayInputStream(payload)).readObject();
|
task = (TaskOptions) new ObjectInputStream(new ByteArrayInputStream(payload)).readObject();
|
||||||
} catch (ClassNotFoundException | IOException e) {
|
} catch (ClassNotFoundException | IOException e) {
|
||||||
logger.severe(e, e.toString());
|
|
||||||
throw new BadRequestException("Cannot deserialize task from payload", e);
|
throw new BadRequestException("Cannot deserialize task from payload", e);
|
||||||
}
|
}
|
||||||
String taskName = enqueuer.enqueue(getQueue(chainedQueueName.get()), task).getName();
|
String taskName = enqueuer.enqueue(getQueue(chainedQueueName.get()), task).getName();
|
||||||
|
@ -107,7 +106,7 @@ public class BigqueryPollJobAction implements Runnable {
|
||||||
job = bigquery.jobs().get(projectId, jobId).execute();
|
job = bigquery.jobs().get(projectId, jobId).execute();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
// We will throw a new exception because done==false, but first log this exception.
|
// We will throw a new exception because done==false, but first log this exception.
|
||||||
logger.warning(e, e.getMessage());
|
logger.warningfmt(e, "Error checking outcome of BigQuery job %s.", jobId);
|
||||||
}
|
}
|
||||||
// If job is not yet done, then throw an exception so that we'll return a failing HTTP status
|
// If job is not yet done, then throw an exception so that we'll return a failing HTTP status
|
||||||
// code and the task will be retried.
|
// code and the task will be retried.
|
||||||
|
|
|
@ -121,10 +121,9 @@ public final class PublishDetailReportAction implements Runnable, JsonAction {
|
||||||
throw new IllegalArgumentException(e.getMessage(), e);
|
throw new IllegalArgumentException(e.getMessage(), e);
|
||||||
}
|
}
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
logger.severe(e, e.toString());
|
|
||||||
String message = firstNonNull(e.getMessage(), e.toString());
|
String message = firstNonNull(e.getMessage(), e.toString());
|
||||||
throw e instanceof IllegalArgumentException
|
throw e instanceof IllegalArgumentException
|
||||||
? new BadRequestException(message) : new InternalServerErrorException(message);
|
? new BadRequestException(message, e) : new InternalServerErrorException(message, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ public final class SyncGroupMembersAction implements Runnable {
|
||||||
FAILED(SC_INTERNAL_SERVER_ERROR, "Error occurred while updating registrar contacts.") {
|
FAILED(SC_INTERNAL_SERVER_ERROR, "Error occurred while updating registrar contacts.") {
|
||||||
@Override
|
@Override
|
||||||
protected void log(Throwable cause) {
|
protected void log(Throwable cause) {
|
||||||
logger.severefmt(cause, "%s", message);
|
logger.severe(cause, message);
|
||||||
}};
|
}};
|
||||||
|
|
||||||
final int statusCode;
|
final int statusCode;
|
||||||
|
@ -79,7 +79,7 @@ public final class SyncGroupMembersAction implements Runnable {
|
||||||
|
|
||||||
/** Log an error message. Results that use log levels other than info should override this. */
|
/** Log an error message. Results that use log levels other than info should override this. */
|
||||||
void log(@Nullable Throwable cause) {
|
void log(@Nullable Throwable cause) {
|
||||||
logger.infofmt(cause, "%s", message);
|
logger.info(cause, message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,9 +90,7 @@ public final class SyncGroupMembersAction implements Runnable {
|
||||||
@Inject SyncGroupMembersAction() {}
|
@Inject SyncGroupMembersAction() {}
|
||||||
|
|
||||||
private void sendResponse(Result result, @Nullable List<Throwable> causes) {
|
private void sendResponse(Result result, @Nullable List<Throwable> causes) {
|
||||||
for (Throwable cause : nullToEmpty(causes)) {
|
nullToEmpty(causes).forEach(result::log);
|
||||||
result.log(cause);
|
|
||||||
}
|
|
||||||
response.setStatus(result.statusCode);
|
response.setStatus(result.statusCode);
|
||||||
response.setPayload(String.format("%s %s\n", result.name(), result.message));
|
response.setPayload(String.format("%s %s\n", result.name(), result.message));
|
||||||
}
|
}
|
||||||
|
|
|
@ -135,9 +135,8 @@ public class UpdateSnapshotViewAction implements Runnable {
|
||||||
.build())));
|
.build())));
|
||||||
|
|
||||||
logger.infofmt(
|
logger.infofmt(
|
||||||
"Updated view %s to point at snapshot table %s.",
|
"Updated view [%s:%s.%s] to point at snapshot table [%s:%s.%s].",
|
||||||
String.format("[%s:%s.%s]", projectId, viewDataset, kindName),
|
projectId, viewDataset, kindName, projectId, sourceDatasetId, sourceTableId);
|
||||||
String.format("[%s:%s.%s]", projectId, sourceDatasetId, sourceTableId));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void updateTable(Bigquery bigquery, Table table) throws IOException {
|
private static void updateTable(Bigquery bigquery, Table table) throws IOException {
|
||||||
|
@ -151,7 +150,8 @@ public class UpdateSnapshotViewAction implements Runnable {
|
||||||
if (e.getDetails().getCode() == 404) {
|
if (e.getDetails().getCode() == 404) {
|
||||||
bigquery.tables().insert(ref.getProjectId(), ref.getDatasetId(), table).execute();
|
bigquery.tables().insert(ref.getProjectId(), ref.getDatasetId(), table).execute();
|
||||||
} else {
|
} else {
|
||||||
logger.warningfmt("UpdateSnapshotViewAction failed, caught exception %s", e.getDetails());
|
logger.warningfmt(
|
||||||
|
e, "UpdateSnapshotViewAction failed, caught exception %s", e.getDetails());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,12 +74,12 @@ public class SyncRegistrarsSheetAction implements Runnable {
|
||||||
MISSINGNO(SC_BAD_REQUEST, "No sheet ID specified or configured; dropping task.") {
|
MISSINGNO(SC_BAD_REQUEST, "No sheet ID specified or configured; dropping task.") {
|
||||||
@Override
|
@Override
|
||||||
protected void log(Exception cause) {
|
protected void log(Exception cause) {
|
||||||
logger.warningfmt(cause, "%s", message);
|
logger.warning(cause, message);
|
||||||
}},
|
}},
|
||||||
FAILED(SC_INTERNAL_SERVER_ERROR, "Spreadsheet synchronization failed") {
|
FAILED(SC_INTERNAL_SERVER_ERROR, "Spreadsheet synchronization failed") {
|
||||||
@Override
|
@Override
|
||||||
protected void log(Exception cause) {
|
protected void log(Exception cause) {
|
||||||
logger.severefmt(cause, "%s", message);
|
logger.severe(cause, message);
|
||||||
}};
|
}};
|
||||||
|
|
||||||
private final int statusCode;
|
private final int statusCode;
|
||||||
|
@ -92,7 +92,7 @@ public class SyncRegistrarsSheetAction implements Runnable {
|
||||||
|
|
||||||
/** Log an error message. Results that use log levels other than info should override this. */
|
/** Log an error message. Results that use log levels other than info should override this. */
|
||||||
protected void log(@Nullable Exception cause) {
|
protected void log(@Nullable Exception cause) {
|
||||||
logger.infofmt(cause, "%s", message);
|
logger.info(cause, message);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void send(Response response, @Nullable Exception cause) {
|
private void send(Response response, @Nullable Exception cause) {
|
||||||
|
|
|
@ -35,6 +35,7 @@ import google.registry.monitoring.whitebox.BigQueryMetricsEnqueuer;
|
||||||
import google.registry.monitoring.whitebox.EppMetric;
|
import google.registry.monitoring.whitebox.EppMetric;
|
||||||
import google.registry.util.FormattingLogger;
|
import google.registry.util.FormattingLogger;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.logging.Level;
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
import org.json.simple.JSONValue;
|
import org.json.simple.JSONValue;
|
||||||
|
|
||||||
|
@ -70,19 +71,25 @@ public final class EppController {
|
||||||
eppInput = unmarshal(EppInput.class, inputXmlBytes);
|
eppInput = unmarshal(EppInput.class, inputXmlBytes);
|
||||||
} catch (EppException e) {
|
} catch (EppException e) {
|
||||||
// Log the unmarshalling error, with the raw bytes (in base64) to help with debugging.
|
// Log the unmarshalling error, with the raw bytes (in base64) to help with debugging.
|
||||||
|
if (logger.isLoggable(Level.INFO)) {
|
||||||
logger.infofmt(
|
logger.infofmt(
|
||||||
e,
|
e,
|
||||||
"EPP request XML unmarshalling failed - \"%s\":\n%s\n%s\n%s\n%s",
|
"EPP request XML unmarshalling failed - \"%s\":\n%s\n%s\n%s\n%s",
|
||||||
e.getMessage(),
|
e.getMessage(),
|
||||||
JSONValue.toJSONString(
|
JSONValue.toJSONString(
|
||||||
ImmutableMap.<String, Object>of(
|
ImmutableMap.<String, Object>of(
|
||||||
"clientId", nullToEmpty(sessionMetadata.getClientId()),
|
"clientId",
|
||||||
"resultCode", e.getResult().getCode().code,
|
nullToEmpty(sessionMetadata.getClientId()),
|
||||||
"resultMessage", e.getResult().getCode().msg,
|
"resultCode",
|
||||||
"xmlBytes", base64().encode(inputXmlBytes))),
|
e.getResult().getCode().code,
|
||||||
|
"resultMessage",
|
||||||
|
e.getResult().getCode().msg,
|
||||||
|
"xmlBytes",
|
||||||
|
base64().encode(inputXmlBytes))),
|
||||||
Strings.repeat("=", 40),
|
Strings.repeat("=", 40),
|
||||||
new String(inputXmlBytes, UTF_8).trim(), // Charset decoding failures are swallowed.
|
new String(inputXmlBytes, UTF_8).trim(), // Charset decoding failures are swallowed.
|
||||||
Strings.repeat("=", 40));
|
Strings.repeat("=", 40));
|
||||||
|
}
|
||||||
// Return early by sending an error message, with no clTRID since we couldn't unmarshal it.
|
// Return early by sending an error message, with no clTRID since we couldn't unmarshal it.
|
||||||
eppMetricBuilder.setStatus(e.getResult().getCode());
|
eppMetricBuilder.setStatus(e.getResult().getCode());
|
||||||
return getErrorResponse(
|
return getErrorResponse(
|
||||||
|
|
|
@ -130,7 +130,8 @@ public class EppXmlTransformer {
|
||||||
try {
|
try {
|
||||||
byte[] lenient = EppXmlTransformer.marshal(eppOutput, LENIENT);
|
byte[] lenient = EppXmlTransformer.marshal(eppOutput, LENIENT);
|
||||||
// Marshaling worked even though the results didn't validate against the schema.
|
// Marshaling worked even though the results didn't validate against the schema.
|
||||||
logger.severe(e, "Result marshaled but did not validate: " + new String(lenient, UTF_8));
|
logger.severefmt(
|
||||||
|
e, "Result marshaled but did not validate: %s", new String(lenient, UTF_8));
|
||||||
return lenient;
|
return lenient;
|
||||||
} catch (XmlException e2) {
|
} catch (XmlException e2) {
|
||||||
throw new RuntimeException(e2); // Failing to marshal at all is not recoverable.
|
throw new RuntimeException(e2); // Failing to marshal at all is not recoverable.
|
||||||
|
|
|
@ -31,6 +31,7 @@ import google.registry.model.eppcommon.Trid;
|
||||||
import google.registry.model.eppinput.EppInput;
|
import google.registry.model.eppinput.EppInput;
|
||||||
import google.registry.util.FormattingLogger;
|
import google.registry.util.FormattingLogger;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.logging.Level;
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
import org.json.simple.JSONValue;
|
import org.json.simple.JSONValue;
|
||||||
|
|
||||||
|
@ -64,6 +65,7 @@ public class FlowReporter {
|
||||||
public void recordToLogs() {
|
public void recordToLogs() {
|
||||||
// WARNING: These log statements are parsed by reporting pipelines - be careful when changing.
|
// WARNING: These log statements are parsed by reporting pipelines - be careful when changing.
|
||||||
// It should be safe to add new keys, but be very cautious in changing existing keys.
|
// It should be safe to add new keys, but be very cautious in changing existing keys.
|
||||||
|
if (logger.isLoggable(Level.INFO)) {
|
||||||
logger.infofmt(
|
logger.infofmt(
|
||||||
"%s: %s",
|
"%s: %s",
|
||||||
EPPINPUT_LOG_SIGNATURE,
|
EPPINPUT_LOG_SIGNATURE,
|
||||||
|
@ -71,11 +73,13 @@ public class FlowReporter {
|
||||||
ImmutableMap.<String, Object>of(
|
ImmutableMap.<String, Object>of(
|
||||||
"xml", prettyPrint(inputXmlBytes),
|
"xml", prettyPrint(inputXmlBytes),
|
||||||
"xmlBytes", base64().encode(inputXmlBytes))));
|
"xmlBytes", base64().encode(inputXmlBytes))));
|
||||||
|
}
|
||||||
// Explicitly log flow metadata separately from the EPP XML itself so that it stays compact
|
// Explicitly log flow metadata separately from the EPP XML itself so that it stays compact
|
||||||
// enough to be sure to fit in a single log entry (the XML part in rare cases could be long
|
// enough to be sure to fit in a single log entry (the XML part in rare cases could be long
|
||||||
// enough to overflow into multiple log entries, breaking routine parsing of the JSON format).
|
// enough to overflow into multiple log entries, breaking routine parsing of the JSON format).
|
||||||
String singleTargetId = eppInput.getSingleTargetId().orElse("");
|
String singleTargetId = eppInput.getSingleTargetId().orElse("");
|
||||||
ImmutableList<String> targetIds = eppInput.getTargetIds();
|
ImmutableList<String> targetIds = eppInput.getTargetIds();
|
||||||
|
if (logger.isLoggable(Level.INFO)) {
|
||||||
logger.infofmt(
|
logger.infofmt(
|
||||||
"%s: %s",
|
"%s: %s",
|
||||||
METADATA_LOG_SIGNATURE,
|
METADATA_LOG_SIGNATURE,
|
||||||
|
@ -89,13 +93,17 @@ public class FlowReporter {
|
||||||
.put("targetId", singleTargetId)
|
.put("targetId", singleTargetId)
|
||||||
.put("targetIds", targetIds)
|
.put("targetIds", targetIds)
|
||||||
.put(
|
.put(
|
||||||
"tld", eppInput.isDomainResourceType() ? extractTld(singleTargetId).orElse("") : "")
|
"tld",
|
||||||
|
eppInput.isDomainResourceType() ? extractTld(singleTargetId).orElse("") : "")
|
||||||
.put(
|
.put(
|
||||||
"tlds",
|
"tlds",
|
||||||
eppInput.isDomainResourceType() ? extractTlds(targetIds).asList() : EMPTY_LIST)
|
eppInput.isDomainResourceType()
|
||||||
|
? extractTlds(targetIds).asList()
|
||||||
|
: EMPTY_LIST)
|
||||||
.put("icannActivityReportField", extractActivityReportField(flowClass))
|
.put("icannActivityReportField", extractActivityReportField(flowClass))
|
||||||
.build()));
|
.build()));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the guessed TLD of the given domain name, assuming a second-level domain name, or
|
* Returns the guessed TLD of the given domain name, assuming a second-level domain name, or
|
||||||
|
|
|
@ -28,6 +28,7 @@ import google.registry.model.eppcommon.Trid;
|
||||||
import google.registry.model.eppoutput.EppOutput;
|
import google.registry.model.eppoutput.EppOutput;
|
||||||
import google.registry.monitoring.whitebox.EppMetric;
|
import google.registry.monitoring.whitebox.EppMetric;
|
||||||
import google.registry.util.FormattingLogger;
|
import google.registry.util.FormattingLogger;
|
||||||
|
import java.util.logging.Level;
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
import javax.inject.Provider;
|
import javax.inject.Provider;
|
||||||
|
|
||||||
|
@ -62,16 +63,18 @@ public class FlowRunner {
|
||||||
// New data to be logged should be added only to the JSON log statement below.
|
// New data to be logged should be added only to the JSON log statement below.
|
||||||
// TODO(b/20725722): remove this log statement entirely once we've transitioned to using the
|
// TODO(b/20725722): remove this log statement entirely once we've transitioned to using the
|
||||||
// log line below instead, or change this one to be for human consumption only.
|
// log line below instead, or change this one to be for human consumption only.
|
||||||
|
if (logger.isLoggable(Level.INFO)) {
|
||||||
logger.infofmt(
|
logger.infofmt(
|
||||||
COMMAND_LOG_FORMAT,
|
COMMAND_LOG_FORMAT,
|
||||||
trid.getServerTransactionId(),
|
trid.getServerTransactionId(),
|
||||||
clientId,
|
clientId,
|
||||||
sessionMetadata,
|
sessionMetadata,
|
||||||
prettyXml.replaceAll("\n", "\n\t"),
|
prettyXml.replace("\n", "\n\t"),
|
||||||
credentials,
|
credentials,
|
||||||
eppRequestSource,
|
eppRequestSource,
|
||||||
isDryRun ? "DRY_RUN" : "LIVE",
|
isDryRun ? "DRY_RUN" : "LIVE",
|
||||||
isSuperuser ? "SUPERUSER" : "NORMAL");
|
isSuperuser ? "SUPERUSER" : "NORMAL");
|
||||||
|
}
|
||||||
// Record flow info to the GAE request logs for reporting purposes if it's not a dry run.
|
// Record flow info to the GAE request logs for reporting purposes if it's not a dry run.
|
||||||
if (!isDryRun) {
|
if (!isDryRun) {
|
||||||
flowReporter.recordToLogs();
|
flowReporter.recordToLogs();
|
||||||
|
|
|
@ -124,7 +124,7 @@ public final class ComparatorKeyring extends ComparingInvocationHandler<Keyring>
|
||||||
return Arrays.equals(a.getFingerprint(), b.getFingerprint())
|
return Arrays.equals(a.getFingerprint(), b.getFingerprint())
|
||||||
&& Arrays.equals(a.getEncoded(), b.getEncoded());
|
&& Arrays.equals(a.getEncoded(), b.getEncoded());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.severefmt("ComparatorKeyring error: PGPPublicKey.getEncoded failed: %s", e);
|
logger.severe(e, "ComparatorKeyring error: PGPPublicKey.getEncoded failed.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -147,7 +147,7 @@ public final class ComparatorKeyring extends ComparingInvocationHandler<Keyring>
|
||||||
try {
|
try {
|
||||||
return Arrays.equals(a.getEncoded(), b.getEncoded());
|
return Arrays.equals(a.getEncoded(), b.getEncoded());
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.severefmt("ComparatorKeyring error: PublicKeyPacket.getEncoded failed: %s", e);
|
logger.severe(e, "ComparatorKeyring error: PublicKeyPacket.getEncoded failed.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,12 +51,12 @@ public final class FrontendServlet extends HttpServlet {
|
||||||
try {
|
try {
|
||||||
metricReporter.get().stopAsync().awaitTerminated(10, TimeUnit.SECONDS);
|
metricReporter.get().stopAsync().awaitTerminated(10, TimeUnit.SECONDS);
|
||||||
logger.info("Shut down MetricReporter");
|
logger.info("Shut down MetricReporter");
|
||||||
} catch (TimeoutException timeoutException) {
|
} catch (TimeoutException e) {
|
||||||
logger.severefmt("Failed to stop MetricReporter: %s", timeoutException);
|
logger.severe(e, "Failed to stop MetricReporter.");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.severefmt(e, "Failed to initialize MetricReporter: %s");
|
logger.severe(e, "Failed to initialize MetricReporter.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ public class BigQueryMetricsEnqueuer {
|
||||||
queue.add(opts);
|
queue.add(opts);
|
||||||
} catch (TransientFailureException e) {
|
} catch (TransientFailureException e) {
|
||||||
// Log and swallow. We may drop some metrics here but this should be rare.
|
// Log and swallow. We may drop some metrics here but this should be rare.
|
||||||
logger.info(e, e.getMessage());
|
logger.info(e, "Transient error occurred while recording metric; metric dropped.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,7 +97,7 @@ public class MetricsExportAction implements Runnable {
|
||||||
.collect(joining("\n")));
|
.collect(joining("\n")));
|
||||||
}
|
}
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
logger.warningfmt("Caught Unknown Exception: %s", e);
|
logger.warning(e, "Unknown error while exporting metrics to BigQuery.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -399,7 +399,7 @@ public class RdapDomainSearchAction extends RdapSearchActionBase {
|
||||||
if (hostKey != null) {
|
if (hostKey != null) {
|
||||||
builder.add(hostKey);
|
builder.add(hostKey);
|
||||||
} else {
|
} else {
|
||||||
logger.warningfmt("Host key unexpectedly null");
|
logger.warning("Host key unexpectedly null");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,14 +87,14 @@ class EscrowTaskRunner {
|
||||||
final Duration interval) {
|
final Duration interval) {
|
||||||
Callable<Void> lockRunner =
|
Callable<Void> lockRunner =
|
||||||
() -> {
|
() -> {
|
||||||
logger.info("tld=" + registry.getTld());
|
logger.infofmt("TLD: %s", registry.getTld());
|
||||||
DateTime startOfToday = clock.nowUtc().withTimeAtStartOfDay();
|
DateTime startOfToday = clock.nowUtc().withTimeAtStartOfDay();
|
||||||
Cursor cursor = ofy().load().key(Cursor.createKey(cursorType, registry)).now();
|
Cursor cursor = ofy().load().key(Cursor.createKey(cursorType, registry)).now();
|
||||||
final DateTime nextRequiredRun = (cursor == null ? startOfToday : cursor.getCursorTime());
|
final DateTime nextRequiredRun = (cursor == null ? startOfToday : cursor.getCursorTime());
|
||||||
if (nextRequiredRun.isAfter(startOfToday)) {
|
if (nextRequiredRun.isAfter(startOfToday)) {
|
||||||
throw new NoContentException("Already completed");
|
throw new NoContentException("Already completed");
|
||||||
}
|
}
|
||||||
logger.info("cursor=" + nextRequiredRun);
|
logger.infofmt("Cursor: %s", nextRequiredRun);
|
||||||
task.runWithLock(nextRequiredRun);
|
task.runWithLock(nextRequiredRun);
|
||||||
ofy()
|
ofy()
|
||||||
.transact(
|
.transact(
|
||||||
|
|
|
@ -60,7 +60,7 @@ final class JSchSshSession implements Closeable {
|
||||||
*/
|
*/
|
||||||
JSchSshSession create(JSch jsch, URI uri) throws JSchException {
|
JSchSshSession create(JSch jsch, URI uri) throws JSchException {
|
||||||
RdeUploadUrl url = RdeUploadUrl.create(uri);
|
RdeUploadUrl url = RdeUploadUrl.create(uri);
|
||||||
logger.info("Connecting to SSH endpoint: " + url);
|
logger.infofmt("Connecting to SSH endpoint: %s", url);
|
||||||
Session session = jsch.getSession(
|
Session session = jsch.getSession(
|
||||||
url.getUser().orElse("domain-registry"),
|
url.getUser().orElse("domain-registry"),
|
||||||
url.getHost(),
|
url.getHost(),
|
||||||
|
@ -99,7 +99,7 @@ final class JSchSshSession implements Closeable {
|
||||||
try {
|
try {
|
||||||
chan.cd(dir);
|
chan.cd(dir);
|
||||||
} catch (SftpException e) {
|
} catch (SftpException e) {
|
||||||
logger.warning(e.toString());
|
logger.warning(e, "Could not open SFTP channel.");
|
||||||
mkdirs(chan, dir);
|
mkdirs(chan, dir);
|
||||||
chan.cd(dir);
|
chan.cd(dir);
|
||||||
}
|
}
|
||||||
|
|
|
@ -223,9 +223,7 @@ public final class RdeStagingAction implements Runnable {
|
||||||
response.setPayload(message);
|
response.setPayload(message);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
for (PendingDeposit pending : pendings.values()) {
|
pendings.values().stream().map(Object::toString).forEach(logger::info);
|
||||||
logger.infofmt("%s", pending);
|
|
||||||
}
|
|
||||||
RdeStagingMapper mapper = new RdeStagingMapper(lenient ? LENIENT : STRICT, pendings);
|
RdeStagingMapper mapper = new RdeStagingMapper(lenient ? LENIENT : STRICT, pendings);
|
||||||
|
|
||||||
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
response.sendJavaScriptRedirect(createJobPath(mrRunner
|
||||||
|
|
|
@ -167,7 +167,7 @@ public final class RdeStagingReducer extends Reducer<PendingDeposit, DepositFrag
|
||||||
}
|
}
|
||||||
if (!fragment.error().isEmpty()) {
|
if (!fragment.error().isEmpty()) {
|
||||||
failed = true;
|
failed = true;
|
||||||
logger.severe(fragment.error());
|
logger.severefmt("Fragment error: %s", fragment.error());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (IdnTableEnum idn : IdnTableEnum.values()) {
|
for (IdnTableEnum idn : IdnTableEnum.values()) {
|
||||||
|
|
|
@ -149,9 +149,7 @@ public class RdeHostLinkAction implements Runnable {
|
||||||
case HOST_NOT_FOUND:
|
case HOST_NOT_FOUND:
|
||||||
getContext().incrementCounter("hosts not found");
|
getContext().incrementCounter("hosts not found");
|
||||||
logger.severefmt(
|
logger.severefmt(
|
||||||
"Host with name %s and repoid %s not found",
|
"Host with name %s and repoid %s not found", xjcHost.getName(), xjcHost.getRoid());
|
||||||
xjcHost.getName(),
|
|
||||||
xjcHost.getRoid());
|
|
||||||
break;
|
break;
|
||||||
case SUPERORDINATE_DOMAIN_IN_PENDING_DELETE:
|
case SUPERORDINATE_DOMAIN_IN_PENDING_DELETE:
|
||||||
getContext()
|
getContext()
|
||||||
|
|
|
@ -65,7 +65,7 @@ public class RdeHostReader extends InputReader<JaxbFragment<XjcRdeHostElement>>
|
||||||
parser.skipHosts(offset + count);
|
parser.skipHosts(offset + count);
|
||||||
return parser;
|
return parser;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.severefmt(e, "Error opening rde file %s/%s", importBucketName, importFileName);
|
logger.severefmt(e, "Error opening RDE file %s/%s", importBucketName, importFileName);
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,7 +111,7 @@ public final class IcannReportingStagingAction implements Runnable {
|
||||||
"ICANN Monthly report staging summary [FAILURE]",
|
"ICANN Monthly report staging summary [FAILURE]",
|
||||||
String.format(
|
String.format(
|
||||||
"Staging failed due to %s, check logs for more details.", thrown.toString()));
|
"Staging failed due to %s, check logs for more details.", thrown.toString()));
|
||||||
logger.severefmt("Staging action failed due to %s", thrown.toString());
|
logger.severe(thrown, "Staging action failed.");
|
||||||
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
response.setStatus(SC_INTERNAL_SERVER_ERROR);
|
||||||
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
response.setContentType(MediaType.PLAIN_TEXT_UTF_8);
|
||||||
response.setPayload(String.format("Staging failed due to %s", thrown.toString()));
|
response.setPayload(String.format("Staging failed due to %s", thrown.toString()));
|
||||||
|
|
|
@ -95,7 +95,7 @@ public final class IcannReportingUploadAction implements Runnable {
|
||||||
},
|
},
|
||||||
IOException.class);
|
IOException.class);
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
logger.warningfmt("Upload to %s failed due to %s", gcsFilename.toString(), e.toString());
|
logger.warningfmt(e, "Upload to %s failed.", gcsFilename.toString());
|
||||||
}
|
}
|
||||||
reportSummaryBuilder.put(reportFilename, success);
|
reportSummaryBuilder.put(reportFilename, success);
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,7 @@ public class ReportingEmailUtils {
|
||||||
msg.setText(body);
|
msg.setText(body);
|
||||||
emailService.sendMessage(msg);
|
emailService.sendMessage(msg);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.warningfmt("E-mail service failed due to %s", e.toString());
|
logger.warning(e, "E-mail service failed.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -195,6 +195,10 @@ public abstract class HttpException extends RuntimeException {
|
||||||
super(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, message, null);
|
super(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, message, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public InternalServerErrorException(String message, Throwable cause) {
|
||||||
|
super(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, message, cause);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getResponseCodeString() {
|
public String getResponseCodeString() {
|
||||||
return "Internal Server Error";
|
return "Internal Server Error";
|
||||||
|
|
|
@ -136,8 +136,8 @@ public final class NordnVerifyAction implements Runnable {
|
||||||
logger.warning(result.toString());
|
logger.warning(result.toString());
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
logger.warningfmt("LORDN verify task %s: Unexpected outcome: %s",
|
logger.warningfmt(
|
||||||
actionLogId, result.toString());
|
"LORDN verify task %s: Unexpected outcome: %s", actionLogId, result.toString());
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,7 +56,8 @@ public final class TmchSmdrlAction implements Runnable {
|
||||||
}
|
}
|
||||||
SignedMarkRevocationList smdrl = SmdrlCsvParser.parse(lines);
|
SignedMarkRevocationList smdrl = SmdrlCsvParser.parse(lines);
|
||||||
smdrl.save();
|
smdrl.save();
|
||||||
logger.infofmt("Inserted %,d smd revocations into Datastore, created at %s",
|
logger.infofmt(
|
||||||
|
"Inserted %,d smd revocations into Datastore, created at %s",
|
||||||
smdrl.size(), smdrl.getCreationTime());
|
smdrl.size(), smdrl.getCreationTime());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,6 @@ class LogoutCommand implements Command {
|
||||||
@Override
|
@Override
|
||||||
public void run() throws IOException {
|
public void run() throws IOException {
|
||||||
StoredCredential.getDefaultDataStore(dataStoreFactory).clear();
|
StoredCredential.getDefaultDataStore(dataStoreFactory).clear();
|
||||||
logger.infofmt("Logged out - credentials have been removed.");
|
logger.info("Logged out - credentials have been removed.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,7 +110,7 @@ public class CreateGroupsAction implements Runnable {
|
||||||
} else {
|
} else {
|
||||||
response.setStatus(SC_OK);
|
response.setStatus(SC_OK);
|
||||||
response.setPayload("Success!");
|
response.setPayload("Success!");
|
||||||
logger.info("Successfully created groups for registrar: " + registrar.getRegistrarName());
|
logger.infofmt("Successfully created groups for registrar: %s", registrar.getRegistrarName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,9 +38,9 @@ public abstract class CreateOrUpdatePremiumListAction implements Runnable {
|
||||||
public void run() {
|
public void run() {
|
||||||
try {
|
try {
|
||||||
savePremiumList();
|
savePremiumList();
|
||||||
} catch (IllegalArgumentException iae) {
|
} catch (IllegalArgumentException e) {
|
||||||
logger.info(iae, "Usage error in attempting to save premium list from nomulus tool command");
|
logger.info(e, "Usage error in attempting to save premium list from nomulus tool command");
|
||||||
response.setPayload(ImmutableMap.of("error", iae.toString(), "status", "error"));
|
response.setPayload(ImmutableMap.of("error", e.toString(), "status", "error"));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.severe(e, "Unexpected error saving premium list from nomulus tool command");
|
logger.severe(e, "Unexpected error saving premium list from nomulus tool command");
|
||||||
response.setPayload(ImmutableMap.of("error", e.toString(), "status", "error"));
|
response.setPayload(ImmutableMap.of("error", e.toString(), "status", "error"));
|
||||||
|
|
|
@ -112,8 +112,8 @@ public class DeleteEntityAction implements Runnable {
|
||||||
try {
|
try {
|
||||||
return Optional.ofNullable(getDatastoreService().get(rawKey));
|
return Optional.ofNullable(getDatastoreService().get(rawKey));
|
||||||
} catch (EntityNotFoundException e) {
|
} catch (EntityNotFoundException e) {
|
||||||
logger.warningfmt(e, "Could not load entity from Datastore service with key %s",
|
logger.warningfmt(
|
||||||
rawKey.toString());
|
e, "Could not load entity from Datastore service with key %s", rawKey.toString());
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,12 +121,12 @@ public abstract class ListObjectsAction<T extends ImmutableObject> implements Ru
|
||||||
"lines", lines,
|
"lines", lines,
|
||||||
"status", "success"));
|
"status", "success"));
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
String message = firstNonNull(e.getMessage(), e.getClass().getName());
|
logger.warning(e, "Error while listing objects.");
|
||||||
logger.warning(e, message);
|
|
||||||
// Don't return a non-200 response, since that will cause RegistryTool to barf instead of
|
// Don't return a non-200 response, since that will cause RegistryTool to barf instead of
|
||||||
// letting ListObjectsCommand parse the JSON response and return a clean error.
|
// letting ListObjectsCommand parse the JSON response and return a clean error.
|
||||||
response.setPayload(ImmutableMap.of(
|
response.setPayload(
|
||||||
"error", message,
|
ImmutableMap.of(
|
||||||
|
"error", firstNonNull(e.getMessage(), e.getClass().getName()),
|
||||||
"status", "error"));
|
"status", "error"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -177,7 +177,7 @@ public final class RegistrarPaymentAction implements Runnable, JsonAction {
|
||||||
throw new FormFieldException(CURRENCY_FIELD.name(), "Unsupported currency.");
|
throw new FormFieldException(CURRENCY_FIELD.name(), "Unsupported currency.");
|
||||||
}
|
}
|
||||||
} catch (FormFieldException e) {
|
} catch (FormFieldException e) {
|
||||||
logger.warning(e.toString());
|
logger.warning(e, "Form field error in RegistrarPaymentAction.");
|
||||||
return JsonResponseHelper.createFormFieldError(e.getMessage(), e.getFieldName());
|
return JsonResponseHelper.createFormFieldError(e.getMessage(), e.getFieldName());
|
||||||
}
|
}
|
||||||
Result<Transaction> result =
|
Result<Transaction> result =
|
||||||
|
|
|
@ -23,7 +23,6 @@ import java.net.InetAddress;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.logging.Logger;
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -41,8 +40,7 @@ import javax.annotation.Nullable;
|
||||||
// TODO(b/21870796): Migrate to Guava version when this is open-sourced.
|
// TODO(b/21870796): Migrate to Guava version when this is open-sourced.
|
||||||
public class CidrAddressBlock implements Iterable<InetAddress>, Serializable {
|
public class CidrAddressBlock implements Iterable<InetAddress>, Serializable {
|
||||||
|
|
||||||
private static final Logger logger =
|
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
|
||||||
Logger.getLogger(CidrAddressBlock.class.getName());
|
|
||||||
|
|
||||||
private final InetAddress ip;
|
private final InetAddress ip;
|
||||||
|
|
||||||
|
@ -338,13 +336,13 @@ public class CidrAddressBlock implements Iterable<InetAddress>, Serializable {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return ip.equals(applyNetmask(ipAddr, netmask));
|
return ip.equals(applyNetmask(ipAddr, netmask));
|
||||||
} catch (IllegalArgumentException iae) {
|
} catch (IllegalArgumentException e) {
|
||||||
|
|
||||||
// Something has gone very wrong. This CidrAddressBlock should
|
// Something has gone very wrong. This CidrAddressBlock should
|
||||||
// not have been created with an invalid netmask and a valid
|
// not have been created with an invalid netmask and a valid
|
||||||
// netmask should have been successfully applied to "ipAddr" as long
|
// netmask should have been successfully applied to "ipAddr" as long
|
||||||
// as it represents an address of the same family as "this.ip".
|
// as it represents an address of the same family as "this.ip".
|
||||||
logger.warning(iae.getMessage());
|
logger.warning(e, "Error while applying netmask.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -119,6 +119,10 @@ public class FormattingLogger {
|
||||||
log(Level.SEVERE, cause, String.format(fmt, args));
|
log(Level.SEVERE, cause, String.format(fmt, args));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isLoggable(Level level) {
|
||||||
|
return logger.isLoggable(level);
|
||||||
|
}
|
||||||
|
|
||||||
public void addHandler(Handler handler) {
|
public void addHandler(Handler handler) {
|
||||||
logger.addHandler(handler);
|
logger.addHandler(handler);
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,9 +56,9 @@ final class RegistrarLookupCommand implements WhoisCommand {
|
||||||
}
|
}
|
||||||
String normalized = normalizeRegistrarName(registrar.getRegistrarName());
|
String normalized = normalizeRegistrarName(registrar.getRegistrarName());
|
||||||
if (map.put(normalized, registrar) != null) {
|
if (map.put(normalized, registrar) != null) {
|
||||||
logger.warning(
|
logger.warningfmt(
|
||||||
normalized
|
"%s appeared as a normalized registrar name for more than one registrar.",
|
||||||
+ " appeared as a normalized registrar name for more than one registrar");
|
normalized);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Use the normalized registrar name without its last word as a key, assuming there are
|
// Use the normalized registrar name without its last word as a key, assuming there are
|
||||||
|
|
|
@ -203,7 +203,7 @@ public class GcsDiffFileListerTest {
|
||||||
|
|
||||||
assertThrows(IllegalStateException.class, () -> listDiffFiles(now.minusMinutes(9), null));
|
assertThrows(IllegalStateException.class, () -> listDiffFiles(now.minusMinutes(9), null));
|
||||||
assertLogContains(String.format(
|
assertLogContains(String.format(
|
||||||
"Gap discovered in sequence terminating at %s, missing file commit_diff_until_%s",
|
"Gap discovered in sequence terminating at %s, missing file: commit_diff_until_%s",
|
||||||
now, now.minusMinutes(5)));
|
now, now.minusMinutes(5)));
|
||||||
assertLogContains(String.format(
|
assertLogContains(String.format(
|
||||||
"Found sequence from %s to %s", now.minusMinutes(9), now.minusMinutes(6)));
|
"Found sequence from %s to %s", now.minusMinutes(9), now.minusMinutes(6)));
|
||||||
|
|
|
@ -136,7 +136,7 @@ public class BouncyCastleTest {
|
||||||
}
|
}
|
||||||
data = output.toByteArray();
|
data = output.toByteArray();
|
||||||
}
|
}
|
||||||
logger.info("Compressed data: " + dumpHex(data));
|
logger.infofmt("Compressed data: %s", dumpHex(data));
|
||||||
|
|
||||||
// Decompress the data.
|
// Decompress the data.
|
||||||
try (ByteArrayInputStream input = new ByteArrayInputStream(data)) {
|
try (ByteArrayInputStream input = new ByteArrayInputStream(data)) {
|
||||||
|
@ -167,7 +167,7 @@ public class BouncyCastleTest {
|
||||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||||
signer.generate().encode(output);
|
signer.generate().encode(output);
|
||||||
byte[] signatureFileData = output.toByteArray();
|
byte[] signatureFileData = output.toByteArray();
|
||||||
logger.info(".sig file data: " + dumpHex(signatureFileData));
|
logger.infofmt(".sig file data: %s", dumpHex(signatureFileData));
|
||||||
|
|
||||||
// Load algorithm information and signature data from "signatureFileData".
|
// Load algorithm information and signature data from "signatureFileData".
|
||||||
PGPSignature sig;
|
PGPSignature sig;
|
||||||
|
@ -207,7 +207,7 @@ public class BouncyCastleTest {
|
||||||
signer.update(FALL_OF_HYPERION_A_DREAM.getBytes(UTF_8));
|
signer.update(FALL_OF_HYPERION_A_DREAM.getBytes(UTF_8));
|
||||||
signer.generate().encode(output);
|
signer.generate().encode(output);
|
||||||
byte[] signatureFileData = output.toByteArray();
|
byte[] signatureFileData = output.toByteArray();
|
||||||
logger.info(".sig file data: " + dumpHex(signatureFileData));
|
logger.infofmt(".sig file data: %s", dumpHex(signatureFileData));
|
||||||
|
|
||||||
// Load algorithm information and signature data from "signatureFileData".
|
// Load algorithm information and signature data from "signatureFileData".
|
||||||
PGPSignature sig;
|
PGPSignature sig;
|
||||||
|
@ -252,7 +252,7 @@ public class BouncyCastleTest {
|
||||||
}
|
}
|
||||||
encryptedData = output.toByteArray();
|
encryptedData = output.toByteArray();
|
||||||
}
|
}
|
||||||
logger.info("Encrypted data: " + dumpHex(encryptedData));
|
logger.infofmt("Encrypted data: %s", dumpHex(encryptedData));
|
||||||
|
|
||||||
// Bob loads his "privateKey" into memory.
|
// Bob loads his "privateKey" into memory.
|
||||||
PGPSecretKeyRing privateKeyRing = new BcPGPSecretKeyRing(PRIVATE_KEY);
|
PGPSecretKeyRing privateKeyRing = new BcPGPSecretKeyRing(PRIVATE_KEY);
|
||||||
|
@ -296,7 +296,7 @@ public class BouncyCastleTest {
|
||||||
}
|
}
|
||||||
encryptedData = output.toByteArray();
|
encryptedData = output.toByteArray();
|
||||||
}
|
}
|
||||||
logger.info("Encrypted data: " + dumpHex(encryptedData));
|
logger.infofmt("Encrypted data: %s", dumpHex(encryptedData));
|
||||||
|
|
||||||
// Bob loads his chain of private keys into memory.
|
// Bob loads his chain of private keys into memory.
|
||||||
PGPSecretKeyRingCollection privateKeyRings = new BcPGPSecretKeyRingCollection(
|
PGPSecretKeyRingCollection privateKeyRings = new BcPGPSecretKeyRingCollection(
|
||||||
|
@ -344,7 +344,7 @@ public class BouncyCastleTest {
|
||||||
}
|
}
|
||||||
encryptedData = output.toByteArray();
|
encryptedData = output.toByteArray();
|
||||||
}
|
}
|
||||||
logger.info("Encrypted data: " + dumpHex(encryptedData));
|
logger.infofmt("Encrypted data: %s", dumpHex(encryptedData));
|
||||||
|
|
||||||
// Bob loads his chain of private keys into memory.
|
// Bob loads his chain of private keys into memory.
|
||||||
PGPSecretKeyRingCollection privateKeyRings = new BcPGPSecretKeyRingCollection(
|
PGPSecretKeyRingCollection privateKeyRings = new BcPGPSecretKeyRingCollection(
|
||||||
|
|
|
@ -16,6 +16,7 @@ package google.registry.testing.sftp;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import google.registry.util.FormattingLogger;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
@ -23,8 +24,6 @@ import java.security.KeyPair;
|
||||||
import java.security.PublicKey;
|
import java.security.PublicKey;
|
||||||
import java.security.Security;
|
import java.security.Security;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.logging.Level;
|
|
||||||
import java.util.logging.Logger;
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import org.apache.ftpserver.FtpServer;
|
import org.apache.ftpserver.FtpServer;
|
||||||
import org.apache.ftpserver.ftplet.FtpException;
|
import org.apache.ftpserver.ftplet.FtpException;
|
||||||
|
@ -48,7 +47,7 @@ import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter;
|
||||||
/** In-process SFTP server using Apache SSHD. */
|
/** In-process SFTP server using Apache SSHD. */
|
||||||
public class TestSftpServer implements FtpServer {
|
public class TestSftpServer implements FtpServer {
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(TestSftpServer.class.getName());
|
private static final FormattingLogger logger = FormattingLogger.getLoggerForCallerClass();
|
||||||
|
|
||||||
private static SingletonRandomFactory secureRandomFactory;
|
private static SingletonRandomFactory secureRandomFactory;
|
||||||
|
|
||||||
|
@ -94,10 +93,10 @@ public class TestSftpServer implements FtpServer {
|
||||||
try (PEMParser pemParser = new PEMParser(new StringReader(key))) {
|
try (PEMParser pemParser = new PEMParser(new StringReader(key))) {
|
||||||
PEMKeyPair pemPair = (PEMKeyPair) pemParser.readObject();
|
PEMKeyPair pemPair = (PEMKeyPair) pemParser.readObject();
|
||||||
KeyPair result = new JcaPEMKeyConverter().setProvider("BC").getKeyPair(pemPair);
|
KeyPair result = new JcaPEMKeyConverter().setProvider("BC").getKeyPair(pemPair);
|
||||||
logger.info("Read key pair " + result);
|
logger.infofmt("Read key pair %s", result);
|
||||||
return result;
|
return result;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.log(Level.SEVERE, "Couldn't read key pair from string(!)", e);
|
logger.severe(e, "Couldn't read key pair from string.");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -190,7 +189,7 @@ public class TestSftpServer implements FtpServer {
|
||||||
server.stop(true);
|
server.stop(true);
|
||||||
stopped = true;
|
stopped = true;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.log(Level.WARNING, "Error shutting down server", e);
|
logger.warning(e, "Error shutting down server");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -201,7 +200,6 @@ public class TestSftpServer implements FtpServer {
|
||||||
server.start();
|
server.start();
|
||||||
stopped = false;
|
stopped = false;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.log(Level.WARNING, "Couldn't start server", e);
|
|
||||||
throw new FtpException("Couldn't start server", e);
|
throw new FtpException("Couldn't start server", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue